diff --git a/.changes/unreleased/Breaking Changes-20231127-114757.yaml b/.changes/unreleased/Breaking Changes-20231127-114757.yaml new file mode 100644 index 00000000000..b7b8d030d6a --- /dev/null +++ b/.changes/unreleased/Breaking Changes-20231127-114757.yaml @@ -0,0 +1,6 @@ +kind: Breaking Changes +body: Remove adapter.get_compiler interface +time: 2023-11-27T11:47:57.443202-05:00 +custom: + Author: michelleark + Issue: "9148" diff --git a/.changes/unreleased/Breaking Changes-20231128-134356.yaml b/.changes/unreleased/Breaking Changes-20231128-134356.yaml new file mode 100644 index 00000000000..b3204c1418b --- /dev/null +++ b/.changes/unreleased/Breaking Changes-20231128-134356.yaml @@ -0,0 +1,6 @@ +kind: Breaking Changes +body: Move AdapterLogger to adapters folder +time: 2023-11-28T13:43:56.853925-08:00 +custom: + Author: colin-rogers-dbt + Issue: "9151" diff --git a/.changes/unreleased/Breaking Changes-20231130-135348.yaml b/.changes/unreleased/Breaking Changes-20231130-135348.yaml new file mode 100644 index 00000000000..df673d69367 --- /dev/null +++ b/.changes/unreleased/Breaking Changes-20231130-135348.yaml @@ -0,0 +1,7 @@ +kind: Breaking Changes +body: move event manager setup back to core, remove ref to global EVENT_MANAGER and + clean up event manager functions +time: 2023-11-30T13:53:48.645192-08:00 +custom: + Author: colin-rogers-dbt + Issue: "9150" diff --git a/.changes/unreleased/Features-20230915-123733.yaml b/.changes/unreleased/Features-20230915-123733.yaml new file mode 100644 index 00000000000..146ad8ef89a --- /dev/null +++ b/.changes/unreleased/Features-20230915-123733.yaml @@ -0,0 +1,6 @@ +kind: Features +body: 'Allow adapters to include package logs in dbt standard logging ' +time: 2023-09-15T12:37:33.862862-07:00 +custom: + Author: colin-rogers-dbt + Issue: "7859" diff --git a/.changes/unreleased/Features-20231026-110821.yaml b/.changes/unreleased/Features-20231026-110821.yaml new file mode 100644 index 00000000000..7fffb6fe569 --- /dev/null +++ b/.changes/unreleased/Features-20231026-110821.yaml @@ -0,0 +1,6 @@ +kind: Features +body: migrate utils to common and adapters folders +time: 2023-10-26T11:08:21.458709-07:00 +custom: + Author: colin-rogers-dbt + Issue: "8924" diff --git a/.changes/unreleased/Features-20231026-123556.yaml b/.changes/unreleased/Features-20231026-123556.yaml new file mode 100644 index 00000000000..cf37554c08d --- /dev/null +++ b/.changes/unreleased/Features-20231026-123556.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Move Agate helper client into common +time: 2023-10-26T12:35:56.538587-07:00 +custom: + Author: MichelleArk + Issue: "8926" diff --git a/.changes/unreleased/Features-20231026-123913.yaml b/.changes/unreleased/Features-20231026-123913.yaml new file mode 100644 index 00000000000..d3ada7cb691 --- /dev/null +++ b/.changes/unreleased/Features-20231026-123913.yaml @@ -0,0 +1,6 @@ +kind: Features +body: remove usage of dbt.config.PartialProject from dbt/adapters +time: 2023-10-26T12:39:13.904116-07:00 +custom: + Author: MichelleArk + Issue: "8928" diff --git a/.changes/unreleased/Features-20231107-135635.yaml b/.changes/unreleased/Features-20231107-135635.yaml new file mode 100644 index 00000000000..711ba4ce102 --- /dev/null +++ b/.changes/unreleased/Features-20231107-135635.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Remove legacy logger +time: 2023-11-07T13:56:35.186648-08:00 +custom: + Author: colin-rogers-dbt + Issue: "8027" diff --git a/.changes/unreleased/Under the Hood-20230831-164435.yaml b/.changes/unreleased/Under the Hood-20230831-164435.yaml new file mode 100644 index 00000000000..efa8a42cece --- /dev/null +++ b/.changes/unreleased/Under the Hood-20230831-164435.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Added more type annotations. +time: 2023-08-31T16:44:35.737954-04:00 +custom: + Author: peterallenwebb + Issue: "8537" diff --git a/.changes/unreleased/Under the Hood-20231026-184953.yaml b/.changes/unreleased/Under the Hood-20231026-184953.yaml new file mode 100644 index 00000000000..cdfbf25be5f --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231026-184953.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Remove usage of dbt.include.global_project in dbt/adapters +time: 2023-10-26T18:49:53.36449-04:00 +custom: + Author: michelleark + Issue: "8925" diff --git a/.changes/unreleased/Under the Hood-20231101-102758.yaml b/.changes/unreleased/Under the Hood-20231101-102758.yaml new file mode 100644 index 00000000000..790cd9ec2f0 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231101-102758.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: remove dbt.flags.MP_CONTEXT usage in dbt/adapters +time: 2023-11-01T10:27:58.790153-04:00 +custom: + Author: michelleark + Issue: "8967" diff --git a/.changes/unreleased/Under the Hood-20231101-173124.yaml b/.changes/unreleased/Under the Hood-20231101-173124.yaml new file mode 100644 index 00000000000..5a4656645f4 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231101-173124.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: 'Remove usage of dbt.flags.LOG_CACHE_EVENTS in dbt/adapters' +time: 2023-11-01T17:31:24.974093-04:00 +custom: + Author: michelleark + Issue: "8969" diff --git a/.changes/unreleased/Under the Hood-20231107-135728.yaml b/.changes/unreleased/Under the Hood-20231107-135728.yaml new file mode 100644 index 00000000000..025c871519a --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231107-135728.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Remove use of dbt/core exceptions in dbt/adapter +time: 2023-11-07T13:57:28.683727-08:00 +custom: + Author: colin-rogers-dbt MichelleArk + Issue: "8920" diff --git a/.changes/unreleased/Under the Hood-20231116-174251.yaml b/.changes/unreleased/Under the Hood-20231116-174251.yaml new file mode 100644 index 00000000000..11f02a2661e --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231116-174251.yaml @@ -0,0 +1,7 @@ +kind: Under the Hood +body: Remove usage of dbt.deprecations in dbt/adapters, enable core & adapter-specific + event types and protos +time: 2023-11-16T17:42:51.005023-05:00 +custom: + Author: michelleark + Issue: 8927 8918 diff --git a/.changes/unreleased/Under the Hood-20231120-183214.yaml b/.changes/unreleased/Under the Hood-20231120-183214.yaml new file mode 100644 index 00000000000..570dd360d8c --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231120-183214.yaml @@ -0,0 +1,7 @@ +kind: Under the Hood +body: Move column constraints into common/contracts, removing another dependency of + adapters on core. +time: 2023-11-20T18:32:14.859503-05:00 +custom: + Author: peterallenwebb + Issue: "9024" diff --git a/.changes/unreleased/Under the Hood-20231128-170732.yaml b/.changes/unreleased/Under the Hood-20231128-170732.yaml new file mode 100644 index 00000000000..d1afc0086cd --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231128-170732.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Move dbt.semver to dbt.common.semver and update references. +time: 2023-11-28T17:07:32.172421-08:00 +custom: + Author: versusfacit + Issue: "9039" diff --git a/.changes/unreleased/Under the Hood-20231130-135432.yaml b/.changes/unreleased/Under the Hood-20231130-135432.yaml new file mode 100644 index 00000000000..bc683ea3307 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231130-135432.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Move lowercase utils method to common +time: 2023-11-30T13:54:32.561673-08:00 +custom: + Author: colin-rogers-dbt + Issue: "9180" diff --git a/.changes/unreleased/Under the Hood-20231205-093544.yaml b/.changes/unreleased/Under the Hood-20231205-093544.yaml new file mode 100644 index 00000000000..fdb150c300b --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231205-093544.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Remove usages of dbt.clients.jinja in dbt/adapters +time: 2023-12-05T09:35:44.845352+09:00 +custom: + Author: michelleark + Issue: "9205" diff --git a/.changes/unreleased/Under the Hood-20231205-120559.yaml b/.changes/unreleased/Under the Hood-20231205-120559.yaml new file mode 100644 index 00000000000..a209bda9f6c --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231205-120559.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Remove usage of dbt.contracts in dbt/adapters +time: 2023-12-05T12:05:59.936775+09:00 +custom: + Author: michelleark + Issue: "9208" diff --git a/.changes/unreleased/Under the Hood-20231205-165812.yaml b/.changes/unreleased/Under the Hood-20231205-165812.yaml new file mode 100644 index 00000000000..8dcf402535c --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231205-165812.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Remove usage of dbt.contracts.graph.nodes.ResultNode in dbt/adapters +time: 2023-12-05T16:58:12.932172+09:00 +custom: + Author: michelleark + Issue: "9214" diff --git a/.changes/unreleased/Under the Hood-20231205-170725.yaml b/.changes/unreleased/Under the Hood-20231205-170725.yaml new file mode 100644 index 00000000000..2018825bcff --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231205-170725.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Introduce RelationConfig Protocol, consolidate Relation.create_from +time: 2023-12-05T17:07:25.33861+09:00 +custom: + Author: michelleark + Issue: "9215" diff --git a/.changes/unreleased/Under the Hood-20231205-185022.yaml b/.changes/unreleased/Under the Hood-20231205-185022.yaml new file mode 100644 index 00000000000..7d7b8cae1c1 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231205-185022.yaml @@ -0,0 +1,7 @@ +kind: Under the Hood +body: Move BaseConfig, Metadata and various other contract classes from model_config + to common/contracts/config +time: 2023-12-05T18:50:22.321229-08:00 +custom: + Author: colin-rorgers-dbt + Issue: "8919" diff --git a/.changes/unreleased/Under the Hood-20231205-235830.yaml b/.changes/unreleased/Under the Hood-20231205-235830.yaml new file mode 100644 index 00000000000..a533ffb4a4b --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231205-235830.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: remove manifest from adapter.set_relations_cache signature +time: 2023-12-05T23:58:30.920144+09:00 +custom: + Author: michelleark + Issue: "9217" diff --git a/.changes/unreleased/Under the Hood-20231206-000343.yaml b/.changes/unreleased/Under the Hood-20231206-000343.yaml new file mode 100644 index 00000000000..564b61e89aa --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231206-000343.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: ' remove manifest from adapter catalog method signatures' +time: 2023-12-06T00:03:43.824252+09:00 +custom: + Author: michelleark + Issue: "9218" diff --git a/.changes/unreleased/Under the Hood-20231207-111554.yaml b/.changes/unreleased/Under the Hood-20231207-111554.yaml new file mode 100644 index 00000000000..8dec8ed18e4 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231207-111554.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Add MacroResolverProtocol, remove lazy loading of manifest in adapter.execute_macro +time: 2023-12-07T11:15:54.427818+09:00 +custom: + Author: michelleark + Issue: "9244" diff --git a/.changes/unreleased/Under the Hood-20231207-224139.yaml b/.changes/unreleased/Under the Hood-20231207-224139.yaml new file mode 100644 index 00000000000..8c4f4fd3c1f --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231207-224139.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: pass query header context to MacroQueryStringSetter +time: 2023-12-07T22:41:39.498024+09:00 +custom: + Author: michelleark + Issue: 9249 9250 diff --git a/.changes/unreleased/Under the Hood-20231208-004854.yaml b/.changes/unreleased/Under the Hood-20231208-004854.yaml new file mode 100644 index 00000000000..e9d4a6fd9c5 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231208-004854.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: add macro_context_generator on adapter +time: 2023-12-08T00:48:54.506911+09:00 +custom: + Author: michelleark + Issue: "9247" diff --git a/.changes/unreleased/Under the Hood-20231212-154842.yaml b/.changes/unreleased/Under the Hood-20231212-154842.yaml new file mode 100644 index 00000000000..8ae42fa6482 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231212-154842.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: pass mp_context to adapter factory as argument instead of import +time: 2023-12-12T15:48:42.866175-08:00 +custom: + Author: colin-rogers-dbt + Issue: "9025" diff --git a/.changes/unreleased/Under the Hood-20231214-122134.yaml b/.changes/unreleased/Under the Hood-20231214-122134.yaml new file mode 100644 index 00000000000..97f98fb0592 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231214-122134.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: have dbt-postgres use RelationConfig protocol for materialized views' +time: 2023-12-14T12:21:34.756973-08:00 +custom: + Author: colin-rogers-dbt + Issue: "9292" diff --git a/.changes/unreleased/Under the Hood-20231214-164107.yaml b/.changes/unreleased/Under the Hood-20231214-164107.yaml new file mode 100644 index 00000000000..ded8a3b3c7c --- /dev/null +++ b/.changes/unreleased/Under the Hood-20231214-164107.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: move system.py to common as dbt-bigquery relies on it to call gcloud +time: 2023-12-14T16:41:07.539814-08:00 +custom: + Author: colin-rogers-dbt + Issue: "9293" diff --git a/.flake8 b/.flake8 index e39b2fa4646..26e20a5d209 100644 --- a/.flake8 +++ b/.flake8 @@ -10,3 +10,5 @@ ignore = E741 E501 # long line checking is done in black exclude = test/ +per-file-ignores = + */__init__.py: F401 diff --git a/.gitattributes b/.gitattributes index 2cc23be887c..b963dbdc1c5 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,4 +1,4 @@ -core/dbt/include/index.html binary +core/dbt/task/docs/index.html binary tests/functional/artifacts/data/state/*/manifest.json binary core/dbt/docs/build/html/searchindex.js binary core/dbt/docs/build/html/index.html binary diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b41cac4ae5b..160c9cfd0f7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ # Configuration for pre-commit hooks (see https://pre-commit.com/). # Eventually the hooks described here will be run as tests before merging each PR. -exclude: ^(core/dbt/docs/build/|core/dbt/events/types_pb2.py) +exclude: ^(core/dbt/docs/build/|core/dbt/common/events/types_pb2.py|core/dbt/events/core_types_pb2.py|core/dbt/adapters/events/adapter_types_pb2.py) # Force all unspecified python hooks to run python 3.8 default_language_version: diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md index e689d32f2ee..5373f780aa1 100644 --- a/ARCHITECTURE.md +++ b/ARCHITECTURE.md @@ -26,7 +26,7 @@ Legacy tests are found in the 'test' directory: The "tasks" map to top-level dbt commands. So `dbt run` => task.run.RunTask, etc. Some are more like abstract base classes (GraphRunnableTask, for example) but all the concrete types outside of task should map to tasks. Currently one executes at a time. The tasks kick off their “Runners” and those do execute in parallel. The parallelism is managed via a thread pool, in GraphRunnableTask. -core/dbt/include/index.html +core/dbt/task/docs/index.html This is the docs website code. It comes from the dbt-docs repository, and is generated when a release is packaged. ## Adapters diff --git a/Makefile b/Makefile index 9c276902194..595026452ab 100644 --- a/Makefile +++ b/Makefile @@ -40,7 +40,16 @@ dev: dev_req ## Installs dbt-* packages in develop mode along with development d .PHONY: proto_types proto_types: ## generates google protobuf python file from types.proto - protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/types.proto + protoc -I=./core/dbt/common/events --python_out=./core/dbt/common/events ./core/dbt/common/events/types.proto + +.PHONY: core_proto_types +core_proto_types: ## generates google protobuf python file from core_types.proto + protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/core_types.proto + +.PHONY: adapter_proto_types +adapter_proto_types: ## generates google protobuf python file from core_types.proto + protoc -I=./core/dbt/adapters/events --python_out=./core/dbt/adapters/events ./core/dbt/adapters/events/adapter_types.proto + .PHONY: mypy mypy: .env ## Runs mypy against staged changes for static type checking. diff --git a/core/MANIFEST.in b/core/MANIFEST.in index ba02c346851..595aea2a8a8 100644 --- a/core/MANIFEST.in +++ b/core/MANIFEST.in @@ -1,2 +1,3 @@ -recursive-include dbt/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore +recursive-include dbt/adapters/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore include dbt/py.typed +recursive-include dbt/task/docs *.html diff --git a/core/dbt/adapters/base/__init__.py b/core/dbt/adapters/base/__init__.py index 07f5303992e..41c253efd87 100644 --- a/core/dbt/adapters/base/__init__.py +++ b/core/dbt/adapters/base/__init__.py @@ -1,7 +1,7 @@ # these are all just exports, #noqa them so flake8 will be happy # TODO: Should we still include this in the `adapters` namespace? -from dbt.contracts.connection import Credentials # noqa: F401 +from dbt.adapters.contracts.connection import Credentials # noqa: F401 from dbt.adapters.base.meta import available # noqa: F401 from dbt.adapters.base.connections import BaseConnectionManager # noqa: F401 from dbt.adapters.base.relation import ( # noqa: F401 diff --git a/core/dbt/adapters/base/column.py b/core/dbt/adapters/base/column.py index 50a687c2f5d..7d08780c4f4 100644 --- a/core/dbt/adapters/base/column.py +++ b/core/dbt/adapters/base/column.py @@ -2,7 +2,7 @@ import re from typing import Dict, ClassVar, Any, Optional -from dbt.exceptions import DbtRuntimeError +from dbt.common.exceptions import DbtRuntimeError @dataclass diff --git a/core/dbt/adapters/base/connections.py b/core/dbt/adapters/base/connections.py index 1731924dffb..f347876f62e 100644 --- a/core/dbt/adapters/base/connections.py +++ b/core/dbt/adapters/base/connections.py @@ -6,6 +6,7 @@ # multiprocessing.RLock is a function returning this type from multiprocessing.synchronize import RLock +from multiprocessing.context import SpawnContext from threading import get_ident from typing import ( Any, @@ -23,8 +24,9 @@ import agate -import dbt.exceptions -from dbt.contracts.connection import ( +import dbt.adapters.exceptions +import dbt.common.exceptions.base +from dbt.adapters.contracts.connection import ( Connection, Identifier, ConnectionState, @@ -32,13 +34,12 @@ LazyHandle, AdapterResponse, ) -from dbt.contracts.graph.manifest import Manifest from dbt.adapters.base.query_headers import ( MacroQueryStringSetter, ) -from dbt.events import AdapterLogger -from dbt.events.functions import fire_event -from dbt.events.types import ( +from dbt.adapters.events.logging import AdapterLogger +from dbt.common.events.functions import fire_event +from dbt.adapters.events.types import ( NewConnection, ConnectionReused, ConnectionLeftOpenInCleanup, @@ -48,9 +49,8 @@ Rollback, RollbackFailed, ) -from dbt.events.contextvars import get_node_info -from dbt import flags -from dbt.utils import cast_to_str +from dbt.common.events.contextvars import get_node_info +from dbt.common.utils import cast_to_str SleepTime = Union[int, float] # As taken by time.sleep. AdapterHandle = Any # Adapter connection handle objects can be any class. @@ -72,14 +72,14 @@ class BaseConnectionManager(metaclass=abc.ABCMeta): TYPE: str = NotImplemented - def __init__(self, profile: AdapterRequiredConfig) -> None: + def __init__(self, profile: AdapterRequiredConfig, mp_context: SpawnContext) -> None: self.profile = profile self.thread_connections: Dict[Hashable, Connection] = {} - self.lock: RLock = flags.MP_CONTEXT.RLock() + self.lock: RLock = mp_context.RLock() self.query_header: Optional[MacroQueryStringSetter] = None - def set_query_header(self, manifest: Manifest) -> None: - self.query_header = MacroQueryStringSetter(self.profile, manifest) + def set_query_header(self, query_header_context: Dict[str, Any]) -> None: + self.query_header = MacroQueryStringSetter(self.profile, query_header_context) @staticmethod def get_thread_identifier() -> Hashable: @@ -91,13 +91,15 @@ def get_thread_connection(self) -> Connection: key = self.get_thread_identifier() with self.lock: if key not in self.thread_connections: - raise dbt.exceptions.InvalidConnectionError(key, list(self.thread_connections)) + raise dbt.adapters.exceptions.InvalidConnectionError( + key, list(self.thread_connections) + ) return self.thread_connections[key] def set_thread_connection(self, conn: Connection) -> None: key = self.get_thread_identifier() if key in self.thread_connections: - raise dbt.exceptions.DbtInternalError( + raise dbt.common.exceptions.DbtInternalError( "In set_thread_connection, existing connection exists for {}" ) self.thread_connections[key] = conn @@ -137,13 +139,13 @@ def exception_handler(self, sql: str) -> ContextManager: :return: A context manager that handles exceptions raised by the underlying database. """ - raise dbt.exceptions.NotImplementedError( + raise dbt.common.exceptions.base.NotImplementedError( "`exception_handler` is not implemented for this adapter!" ) def set_connection_name(self, name: Optional[str] = None) -> Connection: """Called by 'acquire_connection' in BaseAdapter, which is called by - 'connection_named', called by 'connection_for(node)'. + 'connection_named'. Creates a connection for this thread if one doesn't already exist, and will rename an existing connection.""" @@ -220,14 +222,14 @@ def retry_connection( :param int _attempts: Parameter used to keep track of the number of attempts in calling the connect function across recursive calls. Passed as an argument to retry_timeout if it is a Callable. This parameter should not be set by the initial caller. - :raises dbt.exceptions.FailedToConnectError: Upon exhausting all retry attempts without + :raises dbt.adapters.exceptions.FailedToConnectError: Upon exhausting all retry attempts without successfully acquiring a handle. :return: The given connection with its appropriate state and handle attributes set depending on whether we successfully acquired a handle or not. """ timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout if timeout < 0: - raise dbt.exceptions.FailedToConnectError( + raise dbt.adapters.exceptions.FailedToConnectError( "retry_timeout cannot be negative or return a negative time." ) @@ -235,7 +237,7 @@ def retry_connection( # This guard is not perfect others may add to the recursion limit (e.g. built-ins). connection.handle = None connection.state = ConnectionState.FAIL - raise dbt.exceptions.FailedToConnectError("retry_limit cannot be negative") + raise dbt.adapters.exceptions.FailedToConnectError("retry_limit cannot be negative") try: connection.handle = connect() @@ -246,7 +248,7 @@ def retry_connection( if retry_limit <= 0: connection.handle = None connection.state = ConnectionState.FAIL - raise dbt.exceptions.FailedToConnectError(str(e)) + raise dbt.adapters.exceptions.FailedToConnectError(str(e)) logger.debug( f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n" @@ -268,12 +270,12 @@ def retry_connection( except Exception as e: connection.handle = None connection.state = ConnectionState.FAIL - raise dbt.exceptions.FailedToConnectError(str(e)) + raise dbt.adapters.exceptions.FailedToConnectError(str(e)) @abc.abstractmethod def cancel_open(self) -> Optional[List[str]]: """Cancel all open connections on the adapter. (passable)""" - raise dbt.exceptions.NotImplementedError( + raise dbt.common.exceptions.base.NotImplementedError( "`cancel_open` is not implemented for this adapter!" ) @@ -288,7 +290,9 @@ def open(cls, connection: Connection) -> Connection: This should be thread-safe, or hold the lock if necessary. The given connection should not be in either in_use or available. """ - raise dbt.exceptions.NotImplementedError("`open` is not implemented for this adapter!") + raise dbt.common.exceptions.base.NotImplementedError( + "`open` is not implemented for this adapter!" + ) def release(self) -> None: with self.lock: @@ -320,12 +324,16 @@ def cleanup_all(self) -> None: @abc.abstractmethod def begin(self) -> None: """Begin a transaction. (passable)""" - raise dbt.exceptions.NotImplementedError("`begin` is not implemented for this adapter!") + raise dbt.common.exceptions.base.NotImplementedError( + "`begin` is not implemented for this adapter!" + ) @abc.abstractmethod def commit(self) -> None: """Commit a transaction. (passable)""" - raise dbt.exceptions.NotImplementedError("`commit` is not implemented for this adapter!") + raise dbt.common.exceptions.base.NotImplementedError( + "`commit` is not implemented for this adapter!" + ) @classmethod def _rollback_handle(cls, connection: Connection) -> None: @@ -361,7 +369,7 @@ def _close_handle(cls, connection: Connection) -> None: def _rollback(cls, connection: Connection) -> None: """Roll back the given connection.""" if connection.transaction_open is False: - raise dbt.exceptions.DbtInternalError( + raise dbt.common.exceptions.DbtInternalError( f"Tried to rollback transaction on connection " f'"{connection.name}", but it does not have one open!' ) @@ -412,7 +420,9 @@ def execute( :return: A tuple of the query status and results (empty if fetch=False). :rtype: Tuple[AdapterResponse, agate.Table] """ - raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!") + raise dbt.common.exceptions.base.NotImplementedError( + "`execute` is not implemented for this adapter!" + ) def add_select_query(self, sql: str) -> Tuple[Connection, Any]: """ @@ -422,7 +432,7 @@ def add_select_query(self, sql: str) -> Tuple[Connection, Any]: See https://github.com/dbt-labs/dbt-core/issues/8396 for more information. """ - raise dbt.exceptions.NotImplementedError( + raise dbt.common.exceptions.base.NotImplementedError( "`add_select_query` is not implemented for this adapter!" ) @@ -430,6 +440,6 @@ def add_select_query(self, sql: str) -> Tuple[Connection, Any]: def data_type_code_to_name(cls, type_code: Union[int, str]) -> str: """Get the string representation of the data type from the type_code.""" # https://peps.python.org/pep-0249/#type-objects - raise dbt.exceptions.NotImplementedError( + raise dbt.common.exceptions.base.NotImplementedError( "`data_type_code_to_name` is not implemented for this adapter!" ) diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py index 60c2dd397b2..6aa6c587316 100644 --- a/core/dbt/adapters/base/impl.py +++ b/core/dbt/adapters/base/impl.py @@ -4,12 +4,10 @@ from datetime import datetime from enum import Enum import time -from itertools import chain from typing import ( Any, Callable, Dict, - Iterable, Iterator, List, Mapping, @@ -19,45 +17,57 @@ Type, TypedDict, Union, + FrozenSet, + Iterable, ) +from multiprocessing.context import SpawnContext from dbt.adapters.capability import Capability, CapabilityDict -from dbt.contracts.graph.nodes import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint +from dbt.common.contracts.constraints import ( + ColumnLevelConstraint, + ConstraintType, + ModelLevelConstraint, +) +from dbt.adapters.contracts.macros import MacroResolverProtocol import agate import pytz -from dbt.exceptions import ( +from dbt.adapters.exceptions import ( + SnapshotTargetIncompleteError, + SnapshotTargetNotSnapshotTableError, + NullRelationDropAttemptedError, + NullRelationCacheAttemptedError, + RelationReturnedMultipleResultsError, + UnexpectedNonTimestampError, + RenameToNoneAttemptedError, + QuoteConfigTypeError, +) + +from dbt.common.exceptions import ( + NotImplementedError, DbtInternalError, DbtRuntimeError, DbtValidationError, + UnexpectedNullError, MacroArgTypeError, MacroResultError, - NotImplementedError, - NullRelationCacheAttemptedError, - NullRelationDropAttemptedError, - QuoteConfigTypeError, - RelationReturnedMultipleResultsError, - RenameToNoneAttemptedError, - SnapshotTargetIncompleteError, - SnapshotTargetNotSnapshotTableError, - UnexpectedNonTimestampError, - UnexpectedNullError, ) -from dbt.adapters.protocol import AdapterConfig -from dbt.clients.agate_helper import ( +from dbt.adapters.protocol import ( + AdapterConfig, + MacroContextGeneratorCallable, +) +from dbt.common.clients.agate_helper import ( empty_table, get_column_value_uncased, merge_tables, table_from_rows, Integer, ) -from dbt.clients.jinja import MacroGenerator -from dbt.contracts.graph.manifest import Manifest, MacroManifest -from dbt.contracts.graph.nodes import ResultNode -from dbt.events.functions import fire_event, warn_or_error -from dbt.events.types import ( +from dbt.common.clients.jinja import CallableMacroGenerator +from dbt.common.events.functions import fire_event, warn_or_error +from dbt.adapters.events.types import ( CacheMiss, ListRelations, CodeExecution, @@ -66,9 +76,14 @@ ConstraintNotSupported, ConstraintNotEnforced, ) -from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict +from dbt.common.utils import filter_null_values, executor, cast_to_str, AttrDict -from dbt.adapters.base.connections import Connection, AdapterResponse, BaseConnectionManager +from dbt.adapters.contracts.relation import RelationConfig +from dbt.adapters.base.connections import ( + Connection, + AdapterResponse, + BaseConnectionManager, +) from dbt.adapters.base.meta import AdapterMeta, available from dbt.adapters.base.relation import ( ComponentName, @@ -79,7 +94,8 @@ from dbt.adapters.base import Column as BaseColumn from dbt.adapters.base import Credentials from dbt.adapters.cache import RelationsCache, _make_ref_key_dict -from dbt import deprecations +from dbt.adapters.events.types import CollectFreshnessReturnSignature + GET_CATALOG_MACRO_NAME = "get_catalog" GET_CATALOG_RELATIONS_MACRO_NAME = "get_catalog_relations" @@ -101,11 +117,13 @@ def _expect_row_value(key: str, row: agate.Row): return row[key] -def _catalog_filter_schemas(manifest: Manifest) -> Callable[[agate.Row], bool]: +def _catalog_filter_schemas( + used_schemas: FrozenSet[Tuple[str, str]] +) -> Callable[[agate.Row], bool]: """Return a function that takes a row and decides if the row should be included in the catalog output. """ - schemas = frozenset((d.lower(), s.lower()) for d, s in manifest.get_used_schemas()) + schemas = frozenset((d.lower(), s.lower()) for d, s in used_schemas) def test(row: agate.Row) -> bool: table_database = _expect_row_value("table_database", row) @@ -242,11 +260,31 @@ class BaseAdapter(metaclass=AdapterMeta): # implementations to indicate adapter support for optional capabilities. _capabilities = CapabilityDict({}) - def __init__(self, config) -> None: + def __init__(self, config, mp_context: SpawnContext) -> None: self.config = config - self.cache = RelationsCache() - self.connections = self.ConnectionManager(config) - self._macro_manifest_lazy: Optional[MacroManifest] = None + self.cache = RelationsCache(log_cache_events=config.log_cache_events) + self.connections = self.ConnectionManager(config, mp_context) + self._macro_resolver: Optional[MacroResolverProtocol] = None + self._macro_context_generator: Optional[MacroContextGeneratorCallable] = None + + ### + # Methods to set / access a macro resolver + ### + def set_macro_resolver(self, macro_resolver: MacroResolverProtocol) -> None: + self._macro_resolver = macro_resolver + + def get_macro_resolver(self) -> Optional[MacroResolverProtocol]: + return self._macro_resolver + + def clear_macro_resolver(self) -> None: + if self._macro_resolver is not None: + self._macro_resolver = None + + def set_macro_context_generator( + self, + macro_context_generator: MacroContextGeneratorCallable, + ) -> None: + self._macro_context_generator = macro_context_generator ### # Methods that pass through to the connection manager @@ -276,10 +314,10 @@ def nice_connection_name(self) -> str: return conn.name @contextmanager - def connection_named(self, name: str, node: Optional[ResultNode] = None) -> Iterator[None]: + def connection_named(self, name: str, query_header_context: Any = None) -> Iterator[None]: try: if self.connections.query_header is not None: - self.connections.query_header.set(name, node) + self.connections.query_header.set(name, query_header_context) self.acquire_connection(name) yield finally: @@ -287,11 +325,6 @@ def connection_named(self, name: str, node: Optional[ResultNode] = None) -> Iter if self.connections.query_header is not None: self.connections.query_header.reset() - @contextmanager - def connection_for(self, node: ResultNode) -> Iterator[None]: - with self.connection_named(node.unique_id, node): - yield - @available.parse(lambda *a, **k: ("", empty_table())) def execute( self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None @@ -364,39 +397,6 @@ def type(cls) -> str: """ return cls.ConnectionManager.TYPE - @property - def _macro_manifest(self) -> MacroManifest: - if self._macro_manifest_lazy is None: - return self.load_macro_manifest() - return self._macro_manifest_lazy - - def check_macro_manifest(self) -> Optional[MacroManifest]: - """Return the internal manifest (used for executing macros) if it's - been initialized, otherwise return None. - """ - return self._macro_manifest_lazy - - def load_macro_manifest(self, base_macros_only=False) -> MacroManifest: - # base_macros_only is for the test framework - if self._macro_manifest_lazy is None: - # avoid a circular import - from dbt.parser.manifest import ManifestLoader - - manifest = ManifestLoader.load_macros( - self.config, - self.connections.set_query_header, - base_macros_only=base_macros_only, - ) - # TODO CT-211 - self._macro_manifest_lazy = manifest # type: ignore[assignment] - # TODO CT-211 - return self._macro_manifest_lazy # type: ignore[return-value] - - def clear_macro_manifest(self): - if self._macro_manifest_lazy is not None: - self._macro_manifest_lazy = None - - ### # Caching methods ### def _schema_is_cached(self, database: Optional[str], schema: str) -> bool: @@ -414,18 +414,16 @@ def _schema_is_cached(self, database: Optional[str], schema: str) -> bool: else: return True - def _get_cache_schemas(self, manifest: Manifest) -> Set[BaseRelation]: + def _get_cache_schemas(self, relation_configs: Iterable[RelationConfig]) -> Set[BaseRelation]: """Get the set of schema relations that the cache logic needs to - populate. This means only executable nodes are included. + populate. """ - # the cache only cares about executable nodes return { - self.Relation.create_from(self.config, node).without_identifier() - for node in manifest.nodes.values() - if (node.is_relational and not node.is_ephemeral_model and not node.is_external_node) + self.Relation.create_from(quoting=self.config, relation_config=relation_config) + for relation_config in relation_configs } - def _get_catalog_schemas(self, manifest: Manifest) -> SchemaSearchMap: + def _get_catalog_schemas(self, relation_configs: Iterable[RelationConfig]) -> SchemaSearchMap: """Get a mapping of each node's "information_schema" relations to a set of all schemas expected in that information_schema. @@ -435,7 +433,7 @@ def _get_catalog_schemas(self, manifest: Manifest) -> SchemaSearchMap: lowercase strings. """ info_schema_name_map = SchemaSearchMap() - relations = self._get_catalog_relations(manifest) + relations = self._get_catalog_relations(relation_configs) for relation in relations: info_schema_name_map.add(relation) # result is a map whose keys are information_schema Relations without @@ -456,28 +454,25 @@ def _get_catalog_relations_by_info_schema( return relations_by_info_schema - def _get_catalog_relations(self, manifest: Manifest) -> List[BaseRelation]: - - nodes = chain( - [ - node - for node in manifest.nodes.values() - if (node.is_relational and not node.is_ephemeral_model) - ], - manifest.sources.values(), - ) - - relations = [self.Relation.create_from(self.config, n) for n in nodes] + def _get_catalog_relations( + self, relation_configs: Iterable[RelationConfig] + ) -> List[BaseRelation]: + relations = [ + self.Relation.create_from(quoting=self.config, relation_config=relation_config) + for relation_config in relation_configs + ] return relations def _relations_cache_for_schemas( - self, manifest: Manifest, cache_schemas: Optional[Set[BaseRelation]] = None + self, + relation_configs: Iterable[RelationConfig], + cache_schemas: Optional[Set[BaseRelation]] = None, ) -> None: """Populate the relations cache for the given schemas. Returns an iterable of the schemas populated, as strings. """ if not cache_schemas: - cache_schemas = self._get_cache_schemas(manifest) + cache_schemas = self._get_cache_schemas(relation_configs) with executor(self.config) as tpe: futures: List[Future[List[BaseRelation]]] = [] for cache_schema in cache_schemas: @@ -506,7 +501,7 @@ def _relations_cache_for_schemas( def set_relations_cache( self, - manifest: Manifest, + relation_configs: Iterable[RelationConfig], clear: bool = False, required_schemas: Optional[Set[BaseRelation]] = None, ) -> None: @@ -516,7 +511,7 @@ def set_relations_cache( with self.cache.lock: if clear: self.cache.clear() - self._relations_cache_for_schemas(manifest, required_schemas) + self._relations_cache_for_schemas(relation_configs, required_schemas) @available def cache_added(self, relation: Optional[BaseRelation]) -> str: @@ -1051,11 +1046,10 @@ def convert_agate_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[ def execute_macro( self, macro_name: str, - manifest: Optional[Manifest] = None, + macro_resolver: Optional[MacroResolverProtocol] = None, project: Optional[str] = None, context_override: Optional[Dict[str, Any]] = None, kwargs: Optional[Dict[str, Any]] = None, - text_only_columns: Optional[Iterable[str]] = None, ) -> AttrDict: """Look macro_name up in the manifest and execute its results. @@ -1075,13 +1069,14 @@ def execute_macro( if context_override is None: context_override = {} - if manifest is None: - # TODO CT-211 - manifest = self._macro_manifest # type: ignore[assignment] - # TODO CT-211 - macro = manifest.find_macro_by_name( # type: ignore[union-attr] - macro_name, self.config.project_name, project - ) + resolver = macro_resolver or self._macro_resolver + if resolver is None: + raise DbtInternalError("Macro resolver was None when calling execute_macro!") + + if self._macro_context_generator is None: + raise DbtInternalError("Macro context generator was None when calling execute_macro!") + + macro = resolver.find_macro_by_name(macro_name, self.config.project_name, project) if macro is None: if project is None: package_name = "any package" @@ -1093,27 +1088,20 @@ def execute_macro( macro_name, package_name ) ) - # This causes a reference cycle, as generate_runtime_macro_context() - # ends up calling get_adapter, so the import has to be here. - from dbt.context.providers import generate_runtime_macro_context - - macro_context = generate_runtime_macro_context( - # TODO CT-211 - macro=macro, - config=self.config, - manifest=manifest, # type: ignore[arg-type] - package_name=project, - ) + + macro_context = self._macro_context_generator(macro, self.config, resolver, project) macro_context.update(context_override) - macro_function = MacroGenerator(macro, macro_context) + macro_function = CallableMacroGenerator(macro, macro_context) with self.connections.exception_handler(f"macro {macro_name}"): result = macro_function(**kwargs) return result @classmethod - def _catalog_filter_table(cls, table: agate.Table, manifest: Manifest) -> agate.Table: + def _catalog_filter_table( + cls, table: agate.Table, used_schemas: FrozenSet[Tuple[str, str]] + ) -> agate.Table: """Filter the table as appropriate for catalog entries. Subclasses can override this to change filtering rules on a per-adapter basis. """ @@ -1123,50 +1111,41 @@ def _catalog_filter_table(cls, table: agate.Table, manifest: Manifest) -> agate. table.column_names, text_only_columns=["table_database", "table_schema", "table_name"], ) - return table.where(_catalog_filter_schemas(manifest)) + return table.where(_catalog_filter_schemas(used_schemas)) def _get_one_catalog( self, information_schema: InformationSchema, schemas: Set[str], - manifest: Manifest, + used_schemas: FrozenSet[Tuple[str, str]], ) -> agate.Table: kwargs = {"information_schema": information_schema, "schemas": schemas} - table = self.execute_macro( - GET_CATALOG_MACRO_NAME, - kwargs=kwargs, - # pass in the full manifest so we get any local project - # overrides - manifest=manifest, - ) + table = self.execute_macro(GET_CATALOG_MACRO_NAME, kwargs=kwargs) - results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type] + results = self._catalog_filter_table(table, used_schemas) # type: ignore[arg-type] return results def _get_one_catalog_by_relations( self, information_schema: InformationSchema, relations: List[BaseRelation], - manifest: Manifest, + used_schemas: FrozenSet[Tuple[str, str]], ) -> agate.Table: kwargs = { "information_schema": information_schema, "relations": relations, } - table = self.execute_macro( - GET_CATALOG_RELATIONS_MACRO_NAME, - kwargs=kwargs, - # pass in the full manifest, so we get any local project - # overrides - manifest=manifest, - ) + table = self.execute_macro(GET_CATALOG_RELATIONS_MACRO_NAME, kwargs=kwargs) - results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type] + results = self._catalog_filter_table(table, used_schemas) # type: ignore[arg-type] return results def get_filtered_catalog( - self, manifest: Manifest, relations: Optional[Set[BaseRelation]] = None + self, + relation_configs: Iterable[RelationConfig], + used_schemas: FrozenSet[Tuple[str, str]], + relations: Optional[Set[BaseRelation]] = None, ): catalogs: agate.Table if ( @@ -1175,11 +1154,11 @@ def get_filtered_catalog( or not self.supports(Capability.SchemaMetadataByRelations) ): # Do it the traditional way. We get the full catalog. - catalogs, exceptions = self.get_catalog(manifest) + catalogs, exceptions = self.get_catalog(relation_configs, used_schemas) else: # Do it the new way. We try to save time by selecting information # only for the exact set of relations we are interested in. - catalogs, exceptions = self.get_catalog_by_relations(manifest, relations) + catalogs, exceptions = self.get_catalog_by_relations(used_schemas, relations) if relations and catalogs: relation_map = { @@ -1207,16 +1186,20 @@ def in_map(row: agate.Row): def row_matches_relation(self, row: agate.Row, relations: Set[BaseRelation]): pass - def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]: + def get_catalog( + self, + relation_configs: Iterable[RelationConfig], + used_schemas: FrozenSet[Tuple[str, str]], + ) -> Tuple[agate.Table, List[Exception]]: with executor(self.config) as tpe: futures: List[Future[agate.Table]] = [] - schema_map: SchemaSearchMap = self._get_catalog_schemas(manifest) + schema_map: SchemaSearchMap = self._get_catalog_schemas(relation_configs) for info, schemas in schema_map.items(): if len(schemas) == 0: continue name = ".".join([str(info.database), "information_schema"]) fut = tpe.submit_connected( - self, name, self._get_one_catalog, info, schemas, manifest + self, name, self._get_one_catalog, info, schemas, used_schemas ) futures.append(fut) @@ -1224,7 +1207,7 @@ def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]] return catalogs, exceptions def get_catalog_by_relations( - self, manifest: Manifest, relations: Set[BaseRelation] + self, used_schemas: FrozenSet[Tuple[str, str]], relations: Set[BaseRelation] ) -> Tuple[agate.Table, List[Exception]]: with executor(self.config) as tpe: futures: List[Future[agate.Table]] = [] @@ -1238,7 +1221,7 @@ def get_catalog_by_relations( self._get_one_catalog_by_relations, info_schema, relations, - manifest, + used_schemas, ) futures.append(fut) @@ -1254,7 +1237,7 @@ def calculate_freshness( source: BaseRelation, loaded_at_field: str, filter: Optional[str], - manifest: Optional[Manifest] = None, + macro_resolver: Optional[MacroResolverProtocol] = None, ) -> Tuple[Optional[AdapterResponse], FreshnessResponse]: """Calculate the freshness of sources in dbt, and return it""" kwargs: Dict[str, Any] = { @@ -1270,9 +1253,11 @@ def calculate_freshness( AttrDict, # current: contains AdapterResponse + agate.Table agate.Table, # previous: just table ] - result = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest) + result = self.execute_macro( + FRESHNESS_MACRO_NAME, kwargs=kwargs, macro_resolver=macro_resolver + ) if isinstance(result, agate.Table): - deprecations.warn("collect-freshness-return-signature") + warn_or_error(CollectFreshnessReturnSignature()) adapter_response = None table = result else: @@ -1300,14 +1285,14 @@ def calculate_freshness( def calculate_freshness_from_metadata( self, source: BaseRelation, - manifest: Optional[Manifest] = None, + macro_resolver: Optional[MacroResolverProtocol] = None, ) -> Tuple[Optional[AdapterResponse], FreshnessResponse]: kwargs: Dict[str, Any] = { "information_schema": source.information_schema_only(), "relations": [source], } result = self.execute_macro( - GET_RELATION_LAST_MODIFIED_MACRO_NAME, kwargs=kwargs, manifest=manifest + GET_RELATION_LAST_MODIFIED_MACRO_NAME, kwargs=kwargs, macro_resolver=macro_resolver ) adapter_response, table = result.response, result.table # type: ignore[attr-defined] @@ -1361,11 +1346,6 @@ def post_model_hook(self, config: Mapping[str, Any], context: Any) -> None: """ pass - def get_compiler(self): - from dbt.compilation import Compiler - - return Compiler(self.config) - # Methods used in adapter tests def update_column_sql( self, @@ -1485,7 +1465,7 @@ def get_incremental_strategy_macro(self, model_context, strategy: str): strategy = strategy.replace("+", "_") macro_name = f"get_incremental_{strategy}_sql" - # The model_context should have MacroGenerator callable objects for all macros + # The model_context should have callable objects for all macros if macro_name not in model_context: raise DbtRuntimeError( 'dbt could not find an incremental strategy macro with the name "{}" in {}'.format( diff --git a/core/dbt/adapters/base/meta.py b/core/dbt/adapters/base/meta.py index bc5b7f0a6b2..12f318d0c18 100644 --- a/core/dbt/adapters/base/meta.py +++ b/core/dbt/adapters/base/meta.py @@ -1,9 +1,8 @@ import abc from functools import wraps from typing import Callable, Optional, Any, FrozenSet, Dict, Set - -from dbt.deprecations import warn, renamed_method - +from dbt.common.events.functions import warn_or_error +from dbt.adapters.events.types import AdapterDeprecationWarning Decorator = Callable[[Any], Callable] @@ -62,11 +61,12 @@ def my_old_slow_method(self, arg): def wrapper(func): func_name = func.__name__ - renamed_method(func_name, supported_name) @wraps(func) def inner(*args, **kwargs): - warn("adapter:{}".format(func_name)) + warn_or_error( + AdapterDeprecationWarning(old_name=func_name, new_name=supported_name) + ) return func(*args, **kwargs) if parse_replacement: diff --git a/core/dbt/adapters/base/plugin.py b/core/dbt/adapters/base/plugin.py index 5faa2163a4a..dc41fb68110 100644 --- a/core/dbt/adapters/base/plugin.py +++ b/core/dbt/adapters/base/plugin.py @@ -1,20 +1,10 @@ from typing import List, Optional, Type +from pathlib import Path from dbt.adapters.base import Credentials -from dbt.exceptions import CompilationError from dbt.adapters.protocol import AdapterProtocol -def project_name_from_path(include_path: str) -> str: - # avoid an import cycle - from dbt.config.project import PartialProject - - partial = PartialProject.from_project_root(include_path) - if partial.project_name is None: - raise CompilationError(f"Invalid project at {include_path}: name not set!") - return partial.project_name - - class AdapterPlugin: """Defines the basic requirements for a dbt adapter plugin. @@ -29,12 +19,13 @@ def __init__( credentials: Type[Credentials], include_path: str, dependencies: Optional[List[str]] = None, + project_name: Optional[str] = None, ) -> None: self.adapter: Type[AdapterProtocol] = adapter self.credentials: Type[Credentials] = credentials self.include_path: str = include_path - self.project_name: str = project_name_from_path(include_path) + self.project_name: str = project_name or f"dbt_{Path(include_path).name}" self.dependencies: List[str] if dependencies is None: self.dependencies = [] diff --git a/core/dbt/adapters/base/query_headers.py b/core/dbt/adapters/base/query_headers.py index f1fde8dc027..b5f64d6214c 100644 --- a/core/dbt/adapters/base/query_headers.py +++ b/core/dbt/adapters/base/query_headers.py @@ -1,21 +1,17 @@ from threading import local from typing import Optional, Callable, Dict, Any -from dbt.clients.jinja import QueryStringGenerator +from dbt.adapters.clients.jinja import QueryStringGenerator +from dbt.adapters.contracts.connection import AdapterRequiredConfig, QueryComment +from dbt.common.exceptions import DbtRuntimeError -from dbt.context.manifest import generate_query_header_context -from dbt.contracts.connection import AdapterRequiredConfig, QueryComment -from dbt.contracts.graph.nodes import ResultNode -from dbt.contracts.graph.manifest import Manifest -from dbt.exceptions import DbtRuntimeError - -class NodeWrapper: - def __init__(self, node) -> None: - self._inner_node = node +class QueryHeaderContextWrapper: + def __init__(self, context) -> None: + self._inner_context = context def __getattr__(self, name): - return getattr(self._inner_node, name, "") + return getattr(self._inner_context, name, "") class _QueryComment(local): @@ -53,13 +49,15 @@ def set(self, comment: Optional[str], append: bool): self.append = append -QueryStringFunc = Callable[[str, Optional[NodeWrapper]], str] +QueryStringFunc = Callable[[str, Optional[QueryHeaderContextWrapper]], str] class MacroQueryStringSetter: - def __init__(self, config: AdapterRequiredConfig, manifest: Manifest) -> None: - self.manifest = manifest + def __init__( + self, config: AdapterRequiredConfig, query_header_context: Dict[str, Any] + ) -> None: self.config = config + self._query_header_context = query_header_context comment_macro = self._get_comment_macro() self.generator: QueryStringFunc = lambda name, model: "" @@ -82,7 +80,7 @@ def _get_comment_macro(self) -> Optional[str]: return self.config.query_comment.comment def _get_context(self) -> Dict[str, Any]: - return generate_query_header_context(self.config, self.manifest) + return self._query_header_context def add(self, sql: str) -> str: return self.comment.add(sql) @@ -90,10 +88,10 @@ def add(self, sql: str) -> str: def reset(self): self.set("master", None) - def set(self, name: str, node: Optional[ResultNode]): - wrapped: Optional[NodeWrapper] = None - if node is not None: - wrapped = NodeWrapper(node) + def set(self, name: str, query_header_context: Any): + wrapped: Optional[QueryHeaderContextWrapper] = None + if query_header_context is not None: + wrapped = QueryHeaderContextWrapper(query_header_context) comment_str = self.generator(name, wrapped) append = False diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py index e1aabfe85ed..70c1151f570 100644 --- a/core/dbt/adapters/base/relation.py +++ b/core/dbt/adapters/base/relation.py @@ -2,14 +2,8 @@ from dataclasses import dataclass, field from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set, Union, FrozenSet -from dbt.contracts.graph.nodes import ( - SourceDefinition, - ManifestNode, - ResultNode, - ParsedNode, - UnitTestSourceDefinition, -) -from dbt.contracts.relation import ( +from dbt.adapters.contracts.relation import ( + RelationConfig, RelationType, ComponentName, HasQuoting, @@ -17,15 +11,11 @@ Policy, Path, ) -from dbt.exceptions import ( - ApproximateMatchError, - DbtInternalError, - MultipleDatabasesNotAllowedError, -) -from dbt.node_types import NodeType -from dbt.utils import filter_null_values, deep_merge, classproperty +from dbt.adapters.exceptions import MultipleDatabasesNotAllowedError, ApproximateMatchError +from dbt.common.utils import filter_null_values, deep_merge +from dbt.adapters.utils import classproperty -import dbt.exceptions +import dbt.common.exceptions Self = TypeVar("Self", bound="BaseRelation") @@ -107,7 +97,7 @@ def matches( if not search: # nothing was passed in - raise dbt.exceptions.DbtRuntimeError( + raise dbt.common.exceptions.DbtRuntimeError( "Tried to match relation, but no search path was passed!" ) @@ -216,85 +206,51 @@ def quoted(self, identifier): identifier=identifier, ) - @classmethod - def create_from_source( - cls: Type[Self], source: Union[SourceDefinition, UnitTestSourceDefinition], **kwargs: Any - ) -> Self: - source_quoting = source.quoting.to_dict(omit_none=True) - source_quoting.pop("column", None) - quote_policy = deep_merge( - cls.get_default_quote_policy().to_dict(omit_none=True), - source_quoting, - kwargs.get("quote_policy", {}), - ) - - return cls.create( - database=source.database, - schema=source.schema, - identifier=source.identifier, - quote_policy=quote_policy, - **kwargs, - ) - @staticmethod def add_ephemeral_prefix(name: str): return f"__dbt__cte__{name}" @classmethod - def create_ephemeral_from_node( + def create_ephemeral_from( cls: Type[Self], - config: HasQuoting, - node: ManifestNode, + relation_config: RelationConfig, limit: Optional[int] = None, ) -> Self: # Note that ephemeral models are based on the name. - identifier = cls.add_ephemeral_prefix(node.name) - return cls.create(type=cls.CTE, identifier=identifier, limit=limit).quote(identifier=False) + identifier = cls.add_ephemeral_prefix(relation_config.name) + return cls.create( + type=cls.CTE, + identifier=identifier, + limit=limit, + ).quote(identifier=False) @classmethod - def create_from_node( + def create_from( cls: Type[Self], - config: HasQuoting, - node, - quote_policy: Optional[Dict[str, bool]] = None, + quoting: HasQuoting, + relation_config: RelationConfig, **kwargs: Any, ) -> Self: - if quote_policy is None: - quote_policy = {} + quote_policy = kwargs.pop("quote_policy", {}) - quote_policy = dbt.utils.merge(config.quoting, quote_policy) + config_quoting = relation_config.quoting_dict + config_quoting.pop("column", None) + # precedence: kwargs quoting > relation config quoting > base quoting > default quoting + quote_policy = deep_merge( + cls.get_default_quote_policy().to_dict(omit_none=True), + quoting.quoting, + config_quoting, + quote_policy, + ) return cls.create( - database=node.database, - schema=node.schema, - identifier=node.alias, + database=relation_config.database, + schema=relation_config.schema, + identifier=relation_config.identifier, quote_policy=quote_policy, **kwargs, ) - @classmethod - def create_from( - cls: Type[Self], - config: HasQuoting, - node: ResultNode, - **kwargs: Any, - ) -> Self: - if node.resource_type == NodeType.Source or isinstance(node, UnitTestSourceDefinition): - if not ( - isinstance(node, SourceDefinition) or isinstance(node, UnitTestSourceDefinition) - ): - raise DbtInternalError( - "type mismatch, expected SourceDefinition but got {}".format(type(node)) - ) - return cls.create_from_source(node, **kwargs) - else: - # Can't use ManifestNode here because of parameterized generics - if not isinstance(node, (ParsedNode)): - raise DbtInternalError( - f"type mismatch, expected ManifestNode but got {type(node)}" - ) - return cls.create_from_node(config, node, **kwargs) - @classmethod def create( cls: Type[Self], @@ -404,7 +360,7 @@ class InformationSchema(BaseRelation): def __post_init__(self): if not isinstance(self.information_schema_view, (type(None), str)): - raise dbt.exceptions.CompilationError( + raise dbt.common.exceptions.CompilationError( "Got an invalid name: {}".format(self.information_schema_view) ) diff --git a/core/dbt/adapters/cache.py b/core/dbt/adapters/cache.py index c5559729678..9cf21c6a791 100644 --- a/core/dbt/adapters/cache.py +++ b/core/dbt/adapters/cache.py @@ -7,17 +7,16 @@ _make_ref_key_dict, _ReferenceKey, ) -from dbt.exceptions import ( - DependentLinkNotCachedError, +from dbt.common.exceptions.cache import ( NewNameAlreadyInCacheError, - NoneRelationFoundError, ReferencedLinkNotCachedError, + DependentLinkNotCachedError, TruncatedModelNameCausedCollisionError, + NoneRelationFoundError, ) -from dbt.events.functions import fire_event, fire_event_if -from dbt.events.types import CacheAction, CacheDumpGraph -from dbt.flags import get_flags -from dbt.utils import lowercase +from dbt.common.events.functions import fire_event, fire_event_if +from dbt.adapters.events.types import CacheAction, CacheDumpGraph +from dbt.common.utils.formatting import lowercase def dot_separated(key: _ReferenceKey) -> str: @@ -165,10 +164,11 @@ class RelationsCache: :attr Set[str] schemas: The set of known/cached schemas, all lowercased. """ - def __init__(self) -> None: + def __init__(self, log_cache_events: bool = False) -> None: self.relations: Dict[_ReferenceKey, _CachedRelation] = {} self.lock = threading.RLock() self.schemas: Set[Tuple[Optional[str], Optional[str]]] = set() + self.log_cache_events = log_cache_events def add_schema( self, @@ -318,10 +318,9 @@ def add(self, relation): :param BaseRelation relation: The underlying relation. """ - flags = get_flags() cached = _CachedRelation(relation) fire_event_if( - flags.LOG_CACHE_EVENTS, + self.log_cache_events, lambda: CacheDumpGraph(before_after="before", action="adding", dump=self.dump_graph()), ) fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_dict(cached))) @@ -329,7 +328,7 @@ def add(self, relation): with self.lock: self._setdefault(cached) fire_event_if( - flags.LOG_CACHE_EVENTS, + self.log_cache_events, lambda: CacheDumpGraph(before_after="after", action="adding", dump=self.dump_graph()), ) @@ -454,9 +453,8 @@ def rename(self, old, new): ref_key_2=new_key._asdict(), ) ) - flags = get_flags() fire_event_if( - flags.LOG_CACHE_EVENTS, + self.log_cache_events, lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()), ) @@ -467,7 +465,7 @@ def rename(self, old, new): self._setdefault(_CachedRelation(new)) fire_event_if( - flags.LOG_CACHE_EVENTS, + self.log_cache_events, lambda: CacheDumpGraph(before_after="after", action="rename", dump=self.dump_graph()), ) diff --git a/core/dbt/adapters/clients/__init__.py b/core/dbt/adapters/clients/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/core/dbt/adapters/clients/jinja.py b/core/dbt/adapters/clients/jinja.py new file mode 100644 index 00000000000..ace89c0d1d4 --- /dev/null +++ b/core/dbt/adapters/clients/jinja.py @@ -0,0 +1,23 @@ +from typing import Dict, Any +from dbt.common.clients.jinja import BaseMacroGenerator, get_environment + + +class QueryStringGenerator(BaseMacroGenerator): + def __init__(self, template_str: str, context: Dict[str, Any]) -> None: + super().__init__(context) + self.template_str: str = template_str + env = get_environment() + self.template = env.from_string( + self.template_str, + globals=self.context, + ) + + def get_name(self) -> str: + return "query_comment_macro" + + def get_template(self): + """Don't use the template cache, we don't have a node""" + return self.template + + def __call__(self, connection_name: str, node) -> str: + return str(self.call_macro(connection_name, node)) diff --git a/core/dbt/adapters/contracts/__init__.py b/core/dbt/adapters/contracts/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/core/dbt/contracts/connection.py b/core/dbt/adapters/contracts/connection.py similarity index 93% rename from core/dbt/contracts/connection.py rename to core/dbt/adapters/contracts/connection.py index 692f40f71b7..e5985682ac5 100644 --- a/core/dbt/contracts/connection.py +++ b/core/dbt/adapters/contracts/connection.py @@ -11,20 +11,26 @@ List, Callable, ) -from dbt.exceptions import DbtInternalError -from dbt.utils import translate_aliases, md5 -from dbt.events.functions import fire_event -from dbt.events.types import NewConnectionOpening -from dbt.events.contextvars import get_node_info from typing_extensions import Protocol, Annotated -from dbt.dataclass_schema import ( + +from mashumaro.jsonschema.annotations import Pattern + +from dbt.adapters.utils import translate_aliases +from dbt.common.exceptions import DbtInternalError +from dbt.common.dataclass_schema import ( dbtClassMixin, StrEnum, ExtensibleDbtClassMixin, ValidatedStringMixin, ) -from dbt.contracts.util import Replaceable -from mashumaro.jsonschema.annotations import Pattern +from dbt.common.contracts.util import Replaceable +from dbt.common.utils import md5 + +from dbt.common.events.functions import fire_event +from dbt.adapters.events.types import NewConnectionOpening + +# TODO: this is a very bad dependency - shared global state +from dbt.common.events.contextvars import get_node_info class Identifier(ValidatedStringMixin): @@ -228,3 +234,4 @@ class AdapterRequiredConfig(HasCredentials, Protocol): query_comment: QueryComment cli_vars: Dict[str, Any] target_path: str + log_cache_events: bool diff --git a/core/dbt/adapters/contracts/macros.py b/core/dbt/adapters/contracts/macros.py new file mode 100644 index 00000000000..151c9c44dde --- /dev/null +++ b/core/dbt/adapters/contracts/macros.py @@ -0,0 +1,11 @@ +from typing import Optional +from typing_extensions import Protocol + +from dbt.common.clients.jinja import MacroProtocol + + +class MacroResolverProtocol(Protocol): + def find_macro_by_name( + self, name: str, root_project_name: str, package: Optional[str] + ) -> Optional[MacroProtocol]: + raise NotImplementedError("find_macro_by_name not implemented") diff --git a/core/dbt/contracts/relation.py b/core/dbt/adapters/contracts/relation.py similarity index 89% rename from core/dbt/contracts/relation.py rename to core/dbt/adapters/contracts/relation.py index 52f7a07976f..aea294e922c 100644 --- a/core/dbt/contracts/relation.py +++ b/core/dbt/adapters/contracts/relation.py @@ -6,11 +6,11 @@ ) from typing_extensions import Protocol -from dbt.dataclass_schema import dbtClassMixin, StrEnum +from dbt.common.dataclass_schema import dbtClassMixin, StrEnum -from dbt.contracts.util import Replaceable -from dbt.exceptions import CompilationError, DataclassNotDictError -from dbt.utils import deep_merge +from dbt.common.contracts.util import Replaceable +from dbt.common.exceptions import CompilationError, DataclassNotDictError +from dbt.common.utils import deep_merge class RelationType(StrEnum): @@ -22,6 +22,15 @@ class RelationType(StrEnum): Ephemeral = "ephemeral" +class RelationConfig(Protocol): + name: str + database: str + schema: str + identifier: str + quoting_dict: Dict[str, bool] + config: Dict[str, str] + + class ComponentName(StrEnum): Database = "database" Schema = "schema" diff --git a/core/dbt/adapters/events/README.md b/core/dbt/adapters/events/README.md new file mode 100644 index 00000000000..cdb7852aed6 --- /dev/null +++ b/core/dbt/adapters/events/README.md @@ -0,0 +1,57 @@ +# Events Module +The Events module is responsible for communicating internal dbt structures into a consumable interface. Because the "event" classes are based entirely on protobuf definitions, the interface is really clearly defined, whether or not protobufs are used to consume it. We use Betterproto for compiling the protobuf message definitions into Python classes. + +# Using the Events Module +The event module provides types that represent what is happening in dbt in `events.types`. These types are intended to represent an exhaustive list of all things happening within dbt that will need to be logged, streamed, or printed. To fire an event, `common.events.functions::fire_event` is the entry point to the module from everywhere in dbt. + +# Logging +When events are processed via `fire_event`, nearly everything is logged. Whether or not the user has enabled the debug flag, all debug messages are still logged to the file. However, some events are particularly time consuming to construct because they return a huge amount of data. Today, the only messages in this category are cache events and are only logged if the `--log-cache-events` flag is on. This is important because these messages should not be created unless they are going to be logged, because they cause a noticable performance degredation. These events use a "fire_event_if" functions. + +# Adding a New Event +* Add a new message in types.proto, and a second message with the same name + "Msg". The "Msg" message should have two fields, an "info" field of EventInfo, and a "data" field referring to the message name without "Msg" +* run the protoc compiler to update adapter_types_pb2.py: make adapter_proto_types +* Add a wrapping class in core/dbt/adapters/event/types.py with a Level superclass plus code and message methods + +We have switched from using betterproto to using google protobuf, because of a lack of support for Struct fields in betterproto. + +The google protobuf interface is janky and very much non-Pythonic. The "generated" classes in types_pb2.py do not resemble regular Python classes. They do not have normal constructors; they can only be constructed empty. They can be "filled" by setting fields individually or using a json_format method like ParseDict. We have wrapped the logging events with a class (in types.py) which allows using a constructor -- keywords only, no positional parameters. + +## Required for Every Event + +- a method `code`, that's unique across events +- assign a log level by using the Level mixin: `DebugLevel`, `InfoLevel`, `WarnLevel`, or `ErrorLevel` +- a message() + +Example +``` +class PartialParsingDeletedExposure(DebugLevel): + def code(self): + return "I049" + + def message(self) -> str: + return f"Partial parsing: deleted exposure {self.unique_id}" + +``` + + +# Adapter Maintainers +To integrate existing log messages from adapters, you likely have a line of code like this in your adapter already: +```python +from dbt.logger import GLOBAL_LOGGER as logger +``` + +Simply change it to these two lines with your adapter's database name, and all your existing call sites will now use the new system for v1.0: + +```python + +from dbt.adapter.events.logging import AdapterLogger + +logger = AdapterLogger("") +# e.g. AdapterLogger("Snowflake") +``` + +## Compiling types.proto + +After adding a new message in `adapter_types.proto`, either: +- In the repository root directory: `make adapter_proto_types` +- In the `core/dbt/adapters/events` directory: `protoc -I=. --python_out=. types.proto` diff --git a/core/dbt/adapters/events/__init__.py b/core/dbt/adapters/events/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/core/dbt/adapters/events/adapter_types.proto b/core/dbt/adapters/events/adapter_types.proto new file mode 100644 index 00000000000..aa0b507c41e --- /dev/null +++ b/core/dbt/adapters/events/adapter_types.proto @@ -0,0 +1,517 @@ +syntax = "proto3"; + +package proto_types; + +import "google/protobuf/timestamp.proto"; +import "google/protobuf/struct.proto"; + +// Common event info +message AdapterCommonEventInfo { + string name = 1; + string code = 2; + string msg = 3; + string level = 4; + string invocation_id = 5; + int32 pid = 6; + string thread = 7; + google.protobuf.Timestamp ts = 8; + map extra = 9; + string category = 10; +} + +// AdapterNodeRelation +message AdapterNodeRelation { + string database = 10; + string schema = 11; + string alias = 12; + string relation_name = 13; +} + +// NodeInfo +message AdapterNodeInfo { + string node_path = 1; + string node_name = 2; + string unique_id = 3; + string resource_type = 4; + string materialized = 5; + string node_status = 6; + string node_started_at = 7; + string node_finished_at = 8; + google.protobuf.Struct meta = 9; + AdapterNodeRelation node_relation = 10; +} + +// ReferenceKey +message ReferenceKeyMsg { + string database = 1; + string schema = 2; + string identifier = 3; +} + +// D - Deprecations + +// D005 +message AdapterDeprecationWarning { + string old_name = 1; + string new_name = 2; +} + +message AdapterDeprecationWarningMsg { + AdapterCommonEventInfo info = 1; + AdapterDeprecationWarning data = 2; +} + +// D012 +message CollectFreshnessReturnSignature { +} + +message CollectFreshnessReturnSignatureMsg { + AdapterCommonEventInfo info = 1; + CollectFreshnessReturnSignature data = 2; +} + +// E - DB Adapter + +// E001 +message AdapterEventDebug { + AdapterNodeInfo node_info = 1; + string name = 2; + string base_msg = 3; + google.protobuf.ListValue args = 4; +} + +message AdapterEventDebugMsg { + AdapterCommonEventInfo info = 1; + AdapterEventDebug data = 2; +} + +// E002 +message AdapterEventInfo { + AdapterNodeInfo node_info = 1; + string name = 2; + string base_msg = 3; + google.protobuf.ListValue args = 4; +} + +message AdapterEventInfoMsg { + AdapterCommonEventInfo info = 1; + AdapterEventInfo data = 2; +} + +// E003 +message AdapterEventWarning { + AdapterNodeInfo node_info = 1; + string name = 2; + string base_msg = 3; + google.protobuf.ListValue args = 4; +} + +message AdapterEventWarningMsg { + AdapterCommonEventInfo info = 1; + AdapterEventWarning data = 2; +} + +// E004 +message AdapterEventError { + AdapterNodeInfo node_info = 1; + string name = 2; + string base_msg = 3; + google.protobuf.ListValue args = 4; + string exc_info = 5; +} + +message AdapterEventErrorMsg { + AdapterCommonEventInfo info = 1; + AdapterEventError data = 2; +} + +// E005 +message NewConnection { + AdapterNodeInfo node_info = 1; + string conn_type = 2; + string conn_name = 3; +} + +message NewConnectionMsg { + AdapterCommonEventInfo info = 1; + NewConnection data = 2; +} + +// E006 +message ConnectionReused { + string conn_name = 1; + string orig_conn_name = 2; +} + +message ConnectionReusedMsg { + AdapterCommonEventInfo info = 1; + ConnectionReused data = 2; +} + +// E007 +message ConnectionLeftOpenInCleanup { + string conn_name = 1; +} + +message ConnectionLeftOpenInCleanupMsg { + AdapterCommonEventInfo info = 1; + ConnectionLeftOpenInCleanup data = 2; +} + +// E008 +message ConnectionClosedInCleanup { + string conn_name = 1; +} + +message ConnectionClosedInCleanupMsg { + AdapterCommonEventInfo info = 1; + ConnectionClosedInCleanup data = 2; +} + +// E009 +message RollbackFailed { + AdapterNodeInfo node_info = 1; + string conn_name = 2; + string exc_info = 3; +} + +message RollbackFailedMsg { + AdapterCommonEventInfo info = 1; + RollbackFailed data = 2; +} + +// E010 +message ConnectionClosed { + AdapterNodeInfo node_info = 1; + string conn_name = 2; +} + +message ConnectionClosedMsg { + AdapterCommonEventInfo info = 1; + ConnectionClosed data = 2; +} + +// E011 +message ConnectionLeftOpen { + AdapterNodeInfo node_info = 1; + string conn_name = 2; +} + +message ConnectionLeftOpenMsg { + AdapterCommonEventInfo info = 1; + ConnectionLeftOpen data = 2; +} + +// E012 +message Rollback { + AdapterNodeInfo node_info = 1; + string conn_name = 2; +} + +message RollbackMsg { + AdapterCommonEventInfo info = 1; + Rollback data = 2; +} + +// E013 +message CacheMiss { + string conn_name = 1; + string database = 2; + string schema = 3; +} + +message CacheMissMsg { + AdapterCommonEventInfo info = 1; + CacheMiss data = 2; +} + +// E014 +message ListRelations { + string database = 1; + string schema = 2; + repeated ReferenceKeyMsg relations = 3; +} + +message ListRelationsMsg { + AdapterCommonEventInfo info = 1; + ListRelations data = 2; +} + +// E015 +message ConnectionUsed { + AdapterNodeInfo node_info = 1; + string conn_type = 2; + string conn_name = 3; +} + +message ConnectionUsedMsg { + AdapterCommonEventInfo info = 1; + ConnectionUsed data = 2; +} + +// E016 +message SQLQuery { + AdapterNodeInfo node_info = 1; + string conn_name = 2; + string sql = 3; +} + +message SQLQueryMsg { + AdapterCommonEventInfo info = 1; + SQLQuery data = 2; +} + +// E017 +message SQLQueryStatus { + AdapterNodeInfo node_info = 1; + string status = 2; + float elapsed = 3; +} + +message SQLQueryStatusMsg { + AdapterCommonEventInfo info = 1; + SQLQueryStatus data = 2; +} + +// E018 +message SQLCommit { + AdapterNodeInfo node_info = 1; + string conn_name = 2; +} + +message SQLCommitMsg { + AdapterCommonEventInfo info = 1; + SQLCommit data = 2; +} + +// E019 +message ColTypeChange { + string orig_type = 1; + string new_type = 2; + ReferenceKeyMsg table = 3; +} + +message ColTypeChangeMsg { + AdapterCommonEventInfo info = 1; + ColTypeChange data = 2; +} + +// E020 +message SchemaCreation { + ReferenceKeyMsg relation = 1; +} + +message SchemaCreationMsg { + AdapterCommonEventInfo info = 1; + SchemaCreation data = 2; +} + +// E021 +message SchemaDrop { + ReferenceKeyMsg relation = 1; +} + +message SchemaDropMsg { + AdapterCommonEventInfo info = 1; + SchemaDrop data = 2; +} + +// E022 +message CacheAction { + string action = 1; + ReferenceKeyMsg ref_key = 2; + ReferenceKeyMsg ref_key_2 = 3; + ReferenceKeyMsg ref_key_3 = 4; + repeated ReferenceKeyMsg ref_list = 5; +} + +message CacheActionMsg { + AdapterCommonEventInfo info = 1; + CacheAction data = 2; +} + +// Skipping E023, E024, E025, E026, E027, E028, E029, E0230 + +// E031 +message CacheDumpGraph { + map dump = 1; + string before_after = 2; + string action = 3; +} + +message CacheDumpGraphMsg { + AdapterCommonEventInfo info = 1; + CacheDumpGraph data = 2; +} + + +// Skipping E032, E033, E034 + + + +// E034 +message AdapterRegistered { + string adapter_name = 1; + string adapter_version = 2; +} + +message AdapterRegisteredMsg { + AdapterCommonEventInfo info = 1; + AdapterRegistered data = 2; +} + +// E035 +message AdapterImportError { + string exc = 1; +} + +message AdapterImportErrorMsg { + AdapterCommonEventInfo info = 1; + AdapterImportError data = 2; +} + +// E036 +message PluginLoadError { + string exc_info = 1; +} + +message PluginLoadErrorMsg { + AdapterCommonEventInfo info = 1; + PluginLoadError data = 2; +} + +// E037 +message NewConnectionOpening { + AdapterNodeInfo node_info = 1; + string connection_state = 2; +} + +message NewConnectionOpeningMsg { + AdapterCommonEventInfo info = 1; + NewConnectionOpening data = 2; +} + +// E038 +message CodeExecution { + string conn_name = 1; + string code_content = 2; +} + +message CodeExecutionMsg { + AdapterCommonEventInfo info = 1; + CodeExecution data = 2; +} + +// E039 +message CodeExecutionStatus { + string status = 1; + float elapsed = 2; +} + +message CodeExecutionStatusMsg { + AdapterCommonEventInfo info = 1; + CodeExecutionStatus data = 2; +} + +// E040 +message CatalogGenerationError { + string exc = 1; +} + +message CatalogGenerationErrorMsg { + AdapterCommonEventInfo info = 1; + CatalogGenerationError data = 2; +} + +// E041 +message WriteCatalogFailure { + int32 num_exceptions = 1; +} + +message WriteCatalogFailureMsg { + AdapterCommonEventInfo info = 1; + WriteCatalogFailure data = 2; +} + +// E042 +message CatalogWritten { + string path = 1; +} + +message CatalogWrittenMsg { + AdapterCommonEventInfo info = 1; + CatalogWritten data = 2; +} + +// E043 +message CannotGenerateDocs { +} + +message CannotGenerateDocsMsg { + AdapterCommonEventInfo info = 1; + CannotGenerateDocs data = 2; +} + +// E044 +message BuildingCatalog { +} + +message BuildingCatalogMsg { + AdapterCommonEventInfo info = 1; + BuildingCatalog data = 2; +} + +// E045 +message DatabaseErrorRunningHook { + string hook_type = 1; +} + +message DatabaseErrorRunningHookMsg { + AdapterCommonEventInfo info = 1; + DatabaseErrorRunningHook data = 2; +} + +// E046 +message HooksRunning { + int32 num_hooks = 1; + string hook_type = 2; +} + +message HooksRunningMsg { + AdapterCommonEventInfo info = 1; + HooksRunning data = 2; +} + +// E047 +message FinishedRunningStats { + string stat_line = 1; + string execution = 2; + float execution_time = 3; +} + +message FinishedRunningStatsMsg { + AdapterCommonEventInfo info = 1; + FinishedRunningStats data = 2; +} + +// E048 +message ConstraintNotEnforced { + string constraint = 1; + string adapter = 2; +} + +message ConstraintNotEnforcedMsg { + AdapterCommonEventInfo info = 1; + ConstraintNotEnforced data = 2; +} + +// E049 +message ConstraintNotSupported { + string constraint = 1; + string adapter = 2; +} + +message ConstraintNotSupportedMsg { + AdapterCommonEventInfo info = 1; + ConstraintNotSupported data = 2; +} diff --git a/core/dbt/adapters/events/adapter_types_pb2.py b/core/dbt/adapters/events/adapter_types_pb2.py new file mode 100644 index 00000000000..59d665dbc1d --- /dev/null +++ b/core/dbt/adapters/events/adapter_types_pb2.py @@ -0,0 +1,205 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: adapter_types.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x61\x64\x61pter_types.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xab\x02\n\x16\x41\x64\x61pterCommonEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\x05\x65xtra\x18\t \x03(\x0b\x32..proto_types.AdapterCommonEventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"]\n\x13\x41\x64\x61pterNodeRelation\x12\x10\n\x08\x64\x61tabase\x18\n \x01(\t\x12\x0e\n\x06schema\x18\x0b \x01(\t\x12\r\n\x05\x61lias\x18\x0c \x01(\t\x12\x15\n\rrelation_name\x18\r \x01(\t\"\x9f\x02\n\x0f\x41\x64\x61pterNodeInfo\x12\x11\n\tnode_path\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x11\n\tunique_id\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x14\n\x0cmaterialized\x18\x05 \x01(\t\x12\x13\n\x0bnode_status\x18\x06 \x01(\t\x12\x17\n\x0fnode_started_at\x18\x07 \x01(\t\x12\x18\n\x10node_finished_at\x18\x08 \x01(\t\x12%\n\x04meta\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x37\n\rnode_relation\x18\n \x01(\x0b\x32 .proto_types.AdapterNodeRelation\"G\n\x0fReferenceKeyMsg\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\nidentifier\x18\x03 \x01(\t\"?\n\x19\x41\x64\x61pterDeprecationWarning\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x87\x01\n\x1c\x41\x64\x61pterDeprecationWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.AdapterDeprecationWarning\"!\n\x1f\x43ollectFreshnessReturnSignature\"\x93\x01\n\"CollectFreshnessReturnSignatureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.CollectFreshnessReturnSignature\"\x8e\x01\n\x11\x41\x64\x61pterEventDebug\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"w\n\x14\x41\x64\x61pterEventDebugMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventDebug\"\x8d\x01\n\x10\x41\x64\x61pterEventInfo\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"u\n\x13\x41\x64\x61pterEventInfoMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.AdapterEventInfo\"\x90\x01\n\x13\x41\x64\x61pterEventWarning\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"{\n\x16\x41\x64\x61pterEventWarningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.AdapterEventWarning\"\xa0\x01\n\x11\x41\x64\x61pterEventError\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x10\n\x08\x65xc_info\x18\x05 \x01(\t\"w\n\x14\x41\x64\x61pterEventErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventError\"f\n\rNewConnection\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"o\n\x10NewConnectionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NewConnection\"=\n\x10\x43onnectionReused\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x16\n\x0eorig_conn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionReusedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionReused\"0\n\x1b\x43onnectionLeftOpenInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x8b\x01\n\x1e\x43onnectionLeftOpenInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConnectionLeftOpenInCleanup\".\n\x19\x43onnectionClosedInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"\x87\x01\n\x1c\x43onnectionClosedInCleanupMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConnectionClosedInCleanup\"f\n\x0eRollbackFailed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"q\n\x11RollbackFailedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RollbackFailed\"V\n\x10\x43onnectionClosed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"u\n\x13\x43onnectionClosedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionClosed\"X\n\x12\x43onnectionLeftOpen\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"y\n\x15\x43onnectionLeftOpenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ConnectionLeftOpen\"N\n\x08Rollback\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"e\n\x0bRollbackMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.Rollback\"@\n\tCacheMiss\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\"g\n\x0c\x43\x61\x63heMissMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.CacheMiss\"b\n\rListRelations\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12/\n\trelations\x18\x03 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10ListRelationsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ListRelations\"g\n\x0e\x43onnectionUsed\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"q\n\x11\x43onnectionUsedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ConnectionUsed\"[\n\x08SQLQuery\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x0b\n\x03sql\x18\x03 \x01(\t\"e\n\x0bSQLQueryMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.SQLQuery\"b\n\x0eSQLQueryStatus\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x03 \x01(\x02\"q\n\x11SQLQueryStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SQLQueryStatus\"O\n\tSQLCommit\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"g\n\x0cSQLCommitMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.SQLCommit\"a\n\rColTypeChange\x12\x11\n\torig_type\x18\x01 \x01(\t\x12\x10\n\x08new_type\x18\x02 \x01(\t\x12+\n\x05table\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"o\n\x10\x43olTypeChangeMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ColTypeChange\"@\n\x0eSchemaCreation\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"q\n\x11SchemaCreationMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SchemaCreation\"<\n\nSchemaDrop\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"i\n\rSchemaDropMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SchemaDrop\"\xde\x01\n\x0b\x43\x61\x63heAction\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12-\n\x07ref_key\x18\x02 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_2\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_3\x18\x04 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12.\n\x08ref_list\x18\x05 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"k\n\x0e\x43\x61\x63heActionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.CacheAction\"\x98\x01\n\x0e\x43\x61\x63heDumpGraph\x12\x33\n\x04\x64ump\x18\x01 \x03(\x0b\x32%.proto_types.CacheDumpGraph.DumpEntry\x12\x14\n\x0c\x62\x65\x66ore_after\x18\x02 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\x1a+\n\tDumpEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"q\n\x11\x43\x61\x63heDumpGraphMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CacheDumpGraph\"B\n\x11\x41\x64\x61pterRegistered\x12\x14\n\x0c\x61\x64\x61pter_name\x18\x01 \x01(\t\x12\x17\n\x0f\x61\x64\x61pter_version\x18\x02 \x01(\t\"w\n\x14\x41\x64\x61pterRegisteredMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterRegistered\"!\n\x12\x41\x64\x61pterImportError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"y\n\x15\x41\x64\x61pterImportErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.AdapterImportError\"#\n\x0fPluginLoadError\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"s\n\x12PluginLoadErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.PluginLoadError\"a\n\x14NewConnectionOpening\x12/\n\tnode_info\x18\x01 \x01(\x0b\x32\x1c.proto_types.AdapterNodeInfo\x12\x18\n\x10\x63onnection_state\x18\x02 \x01(\t\"}\n\x17NewConnectionOpeningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NewConnectionOpening\"8\n\rCodeExecution\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63ode_content\x18\x02 \x01(\t\"o\n\x10\x43odeExecutionMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.CodeExecution\"6\n\x13\x43odeExecutionStatus\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x02 \x01(\x02\"{\n\x16\x43odeExecutionStatusMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.CodeExecutionStatus\"%\n\x16\x43\x61talogGenerationError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x81\x01\n\x19\x43\x61talogGenerationErrorMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.CatalogGenerationError\"-\n\x13WriteCatalogFailure\x12\x16\n\x0enum_exceptions\x18\x01 \x01(\x05\"{\n\x16WriteCatalogFailureMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.WriteCatalogFailure\"\x1e\n\x0e\x43\x61talogWritten\x12\x0c\n\x04path\x18\x01 \x01(\t\"q\n\x11\x43\x61talogWrittenMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CatalogWritten\"\x14\n\x12\x43\x61nnotGenerateDocs\"y\n\x15\x43\x61nnotGenerateDocsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.CannotGenerateDocs\"\x11\n\x0f\x42uildingCatalog\"s\n\x12\x42uildingCatalogMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.BuildingCatalog\"-\n\x18\x44\x61tabaseErrorRunningHook\x12\x11\n\thook_type\x18\x01 \x01(\t\"\x85\x01\n\x1b\x44\x61tabaseErrorRunningHookMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DatabaseErrorRunningHook\"4\n\x0cHooksRunning\x12\x11\n\tnum_hooks\x18\x01 \x01(\x05\x12\x11\n\thook_type\x18\x02 \x01(\t\"m\n\x0fHooksRunningMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.HooksRunning\"T\n\x14\x46inishedRunningStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\x12\x11\n\texecution\x18\x02 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x03 \x01(\x02\"}\n\x17\x46inishedRunningStatsMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.FinishedRunningStats\"<\n\x15\x43onstraintNotEnforced\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x7f\n\x18\x43onstraintNotEnforcedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConstraintNotEnforced\"=\n\x16\x43onstraintNotSupported\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"\x81\x01\n\x19\x43onstraintNotSupportedMsg\x12\x31\n\x04info\x18\x01 \x01(\x0b\x32#.proto_types.AdapterCommonEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.ConstraintNotSupportedb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'adapter_types_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _ADAPTERCOMMONEVENTINFO_EXTRAENTRY._options = None + _ADAPTERCOMMONEVENTINFO_EXTRAENTRY._serialized_options = b'8\001' + _CACHEDUMPGRAPH_DUMPENTRY._options = None + _CACHEDUMPGRAPH_DUMPENTRY._serialized_options = b'8\001' + _ADAPTERCOMMONEVENTINFO._serialized_start=100 + _ADAPTERCOMMONEVENTINFO._serialized_end=399 + _ADAPTERCOMMONEVENTINFO_EXTRAENTRY._serialized_start=355 + _ADAPTERCOMMONEVENTINFO_EXTRAENTRY._serialized_end=399 + _ADAPTERNODERELATION._serialized_start=401 + _ADAPTERNODERELATION._serialized_end=494 + _ADAPTERNODEINFO._serialized_start=497 + _ADAPTERNODEINFO._serialized_end=784 + _REFERENCEKEYMSG._serialized_start=786 + _REFERENCEKEYMSG._serialized_end=857 + _ADAPTERDEPRECATIONWARNING._serialized_start=859 + _ADAPTERDEPRECATIONWARNING._serialized_end=922 + _ADAPTERDEPRECATIONWARNINGMSG._serialized_start=925 + _ADAPTERDEPRECATIONWARNINGMSG._serialized_end=1060 + _COLLECTFRESHNESSRETURNSIGNATURE._serialized_start=1062 + _COLLECTFRESHNESSRETURNSIGNATURE._serialized_end=1095 + _COLLECTFRESHNESSRETURNSIGNATUREMSG._serialized_start=1098 + _COLLECTFRESHNESSRETURNSIGNATUREMSG._serialized_end=1245 + _ADAPTEREVENTDEBUG._serialized_start=1248 + _ADAPTEREVENTDEBUG._serialized_end=1390 + _ADAPTEREVENTDEBUGMSG._serialized_start=1392 + _ADAPTEREVENTDEBUGMSG._serialized_end=1511 + _ADAPTEREVENTINFO._serialized_start=1514 + _ADAPTEREVENTINFO._serialized_end=1655 + _ADAPTEREVENTINFOMSG._serialized_start=1657 + _ADAPTEREVENTINFOMSG._serialized_end=1774 + _ADAPTEREVENTWARNING._serialized_start=1777 + _ADAPTEREVENTWARNING._serialized_end=1921 + _ADAPTEREVENTWARNINGMSG._serialized_start=1923 + _ADAPTEREVENTWARNINGMSG._serialized_end=2046 + _ADAPTEREVENTERROR._serialized_start=2049 + _ADAPTEREVENTERROR._serialized_end=2209 + _ADAPTEREVENTERRORMSG._serialized_start=2211 + _ADAPTEREVENTERRORMSG._serialized_end=2330 + _NEWCONNECTION._serialized_start=2332 + _NEWCONNECTION._serialized_end=2434 + _NEWCONNECTIONMSG._serialized_start=2436 + _NEWCONNECTIONMSG._serialized_end=2547 + _CONNECTIONREUSED._serialized_start=2549 + _CONNECTIONREUSED._serialized_end=2610 + _CONNECTIONREUSEDMSG._serialized_start=2612 + _CONNECTIONREUSEDMSG._serialized_end=2729 + _CONNECTIONLEFTOPENINCLEANUP._serialized_start=2731 + _CONNECTIONLEFTOPENINCLEANUP._serialized_end=2779 + _CONNECTIONLEFTOPENINCLEANUPMSG._serialized_start=2782 + _CONNECTIONLEFTOPENINCLEANUPMSG._serialized_end=2921 + _CONNECTIONCLOSEDINCLEANUP._serialized_start=2923 + _CONNECTIONCLOSEDINCLEANUP._serialized_end=2969 + _CONNECTIONCLOSEDINCLEANUPMSG._serialized_start=2972 + _CONNECTIONCLOSEDINCLEANUPMSG._serialized_end=3107 + _ROLLBACKFAILED._serialized_start=3109 + _ROLLBACKFAILED._serialized_end=3211 + _ROLLBACKFAILEDMSG._serialized_start=3213 + _ROLLBACKFAILEDMSG._serialized_end=3326 + _CONNECTIONCLOSED._serialized_start=3328 + _CONNECTIONCLOSED._serialized_end=3414 + _CONNECTIONCLOSEDMSG._serialized_start=3416 + _CONNECTIONCLOSEDMSG._serialized_end=3533 + _CONNECTIONLEFTOPEN._serialized_start=3535 + _CONNECTIONLEFTOPEN._serialized_end=3623 + _CONNECTIONLEFTOPENMSG._serialized_start=3625 + _CONNECTIONLEFTOPENMSG._serialized_end=3746 + _ROLLBACK._serialized_start=3748 + _ROLLBACK._serialized_end=3826 + _ROLLBACKMSG._serialized_start=3828 + _ROLLBACKMSG._serialized_end=3929 + _CACHEMISS._serialized_start=3931 + _CACHEMISS._serialized_end=3995 + _CACHEMISSMSG._serialized_start=3997 + _CACHEMISSMSG._serialized_end=4100 + _LISTRELATIONS._serialized_start=4102 + _LISTRELATIONS._serialized_end=4200 + _LISTRELATIONSMSG._serialized_start=4202 + _LISTRELATIONSMSG._serialized_end=4313 + _CONNECTIONUSED._serialized_start=4315 + _CONNECTIONUSED._serialized_end=4418 + _CONNECTIONUSEDMSG._serialized_start=4420 + _CONNECTIONUSEDMSG._serialized_end=4533 + _SQLQUERY._serialized_start=4535 + _SQLQUERY._serialized_end=4626 + _SQLQUERYMSG._serialized_start=4628 + _SQLQUERYMSG._serialized_end=4729 + _SQLQUERYSTATUS._serialized_start=4731 + _SQLQUERYSTATUS._serialized_end=4829 + _SQLQUERYSTATUSMSG._serialized_start=4831 + _SQLQUERYSTATUSMSG._serialized_end=4944 + _SQLCOMMIT._serialized_start=4946 + _SQLCOMMIT._serialized_end=5025 + _SQLCOMMITMSG._serialized_start=5027 + _SQLCOMMITMSG._serialized_end=5130 + _COLTYPECHANGE._serialized_start=5132 + _COLTYPECHANGE._serialized_end=5229 + _COLTYPECHANGEMSG._serialized_start=5231 + _COLTYPECHANGEMSG._serialized_end=5342 + _SCHEMACREATION._serialized_start=5344 + _SCHEMACREATION._serialized_end=5408 + _SCHEMACREATIONMSG._serialized_start=5410 + _SCHEMACREATIONMSG._serialized_end=5523 + _SCHEMADROP._serialized_start=5525 + _SCHEMADROP._serialized_end=5585 + _SCHEMADROPMSG._serialized_start=5587 + _SCHEMADROPMSG._serialized_end=5692 + _CACHEACTION._serialized_start=5695 + _CACHEACTION._serialized_end=5917 + _CACHEACTIONMSG._serialized_start=5919 + _CACHEACTIONMSG._serialized_end=6026 + _CACHEDUMPGRAPH._serialized_start=6029 + _CACHEDUMPGRAPH._serialized_end=6181 + _CACHEDUMPGRAPH_DUMPENTRY._serialized_start=6138 + _CACHEDUMPGRAPH_DUMPENTRY._serialized_end=6181 + _CACHEDUMPGRAPHMSG._serialized_start=6183 + _CACHEDUMPGRAPHMSG._serialized_end=6296 + _ADAPTERREGISTERED._serialized_start=6298 + _ADAPTERREGISTERED._serialized_end=6364 + _ADAPTERREGISTEREDMSG._serialized_start=6366 + _ADAPTERREGISTEREDMSG._serialized_end=6485 + _ADAPTERIMPORTERROR._serialized_start=6487 + _ADAPTERIMPORTERROR._serialized_end=6520 + _ADAPTERIMPORTERRORMSG._serialized_start=6522 + _ADAPTERIMPORTERRORMSG._serialized_end=6643 + _PLUGINLOADERROR._serialized_start=6645 + _PLUGINLOADERROR._serialized_end=6680 + _PLUGINLOADERRORMSG._serialized_start=6682 + _PLUGINLOADERRORMSG._serialized_end=6797 + _NEWCONNECTIONOPENING._serialized_start=6799 + _NEWCONNECTIONOPENING._serialized_end=6896 + _NEWCONNECTIONOPENINGMSG._serialized_start=6898 + _NEWCONNECTIONOPENINGMSG._serialized_end=7023 + _CODEEXECUTION._serialized_start=7025 + _CODEEXECUTION._serialized_end=7081 + _CODEEXECUTIONMSG._serialized_start=7083 + _CODEEXECUTIONMSG._serialized_end=7194 + _CODEEXECUTIONSTATUS._serialized_start=7196 + _CODEEXECUTIONSTATUS._serialized_end=7250 + _CODEEXECUTIONSTATUSMSG._serialized_start=7252 + _CODEEXECUTIONSTATUSMSG._serialized_end=7375 + _CATALOGGENERATIONERROR._serialized_start=7377 + _CATALOGGENERATIONERROR._serialized_end=7414 + _CATALOGGENERATIONERRORMSG._serialized_start=7417 + _CATALOGGENERATIONERRORMSG._serialized_end=7546 + _WRITECATALOGFAILURE._serialized_start=7548 + _WRITECATALOGFAILURE._serialized_end=7593 + _WRITECATALOGFAILUREMSG._serialized_start=7595 + _WRITECATALOGFAILUREMSG._serialized_end=7718 + _CATALOGWRITTEN._serialized_start=7720 + _CATALOGWRITTEN._serialized_end=7750 + _CATALOGWRITTENMSG._serialized_start=7752 + _CATALOGWRITTENMSG._serialized_end=7865 + _CANNOTGENERATEDOCS._serialized_start=7867 + _CANNOTGENERATEDOCS._serialized_end=7887 + _CANNOTGENERATEDOCSMSG._serialized_start=7889 + _CANNOTGENERATEDOCSMSG._serialized_end=8010 + _BUILDINGCATALOG._serialized_start=8012 + _BUILDINGCATALOG._serialized_end=8029 + _BUILDINGCATALOGMSG._serialized_start=8031 + _BUILDINGCATALOGMSG._serialized_end=8146 + _DATABASEERRORRUNNINGHOOK._serialized_start=8148 + _DATABASEERRORRUNNINGHOOK._serialized_end=8193 + _DATABASEERRORRUNNINGHOOKMSG._serialized_start=8196 + _DATABASEERRORRUNNINGHOOKMSG._serialized_end=8329 + _HOOKSRUNNING._serialized_start=8331 + _HOOKSRUNNING._serialized_end=8383 + _HOOKSRUNNINGMSG._serialized_start=8385 + _HOOKSRUNNINGMSG._serialized_end=8494 + _FINISHEDRUNNINGSTATS._serialized_start=8496 + _FINISHEDRUNNINGSTATS._serialized_end=8580 + _FINISHEDRUNNINGSTATSMSG._serialized_start=8582 + _FINISHEDRUNNINGSTATSMSG._serialized_end=8707 + _CONSTRAINTNOTENFORCED._serialized_start=8709 + _CONSTRAINTNOTENFORCED._serialized_end=8769 + _CONSTRAINTNOTENFORCEDMSG._serialized_start=8771 + _CONSTRAINTNOTENFORCEDMSG._serialized_end=8898 + _CONSTRAINTNOTSUPPORTED._serialized_start=8900 + _CONSTRAINTNOTSUPPORTED._serialized_end=8961 + _CONSTRAINTNOTSUPPORTEDMSG._serialized_start=8964 + _CONSTRAINTNOTSUPPORTEDMSG._serialized_end=9093 +# @@protoc_insertion_point(module_scope) diff --git a/core/dbt/adapters/events/base_types.py b/core/dbt/adapters/events/base_types.py new file mode 100644 index 00000000000..3717fb44071 --- /dev/null +++ b/core/dbt/adapters/events/base_types.py @@ -0,0 +1,39 @@ +# Aliasing common Level classes in order to make custom, but not overly-verbose versions that have PROTO_TYPES_MODULE set to the adapter-specific generated types_pb2 module +from dbt.common.events.base_types import ( + BaseEvent, + DynamicLevel as CommonDyanicLevel, + TestLevel as CommonTestLevel, + DebugLevel as CommonDebugLevel, + InfoLevel as CommonInfoLevel, + WarnLevel as CommonWarnLevel, + ErrorLevel as CommonErrorLevel, +) +from dbt.adapters.events import adapter_types_pb2 + + +class AdapterBaseEvent(BaseEvent): + PROTO_TYPES_MODULE = adapter_types_pb2 + + +class DynamicLevel(CommonDyanicLevel, AdapterBaseEvent): + pass + + +class TestLevel(CommonTestLevel, AdapterBaseEvent): + pass + + +class DebugLevel(CommonDebugLevel, AdapterBaseEvent): + pass + + +class InfoLevel(CommonInfoLevel, AdapterBaseEvent): + pass + + +class WarnLevel(CommonWarnLevel, AdapterBaseEvent): + pass + + +class ErrorLevel(CommonErrorLevel, AdapterBaseEvent): + pass diff --git a/core/dbt/events/adapter_endpoint.py b/core/dbt/adapters/events/logging.py similarity index 84% rename from core/dbt/events/adapter_endpoint.py rename to core/dbt/adapters/events/logging.py index 938af608b72..f85b3358520 100644 --- a/core/dbt/events/adapter_endpoint.py +++ b/core/dbt/adapters/events/logging.py @@ -1,17 +1,18 @@ import traceback from dataclasses import dataclass -from dbt.events.functions import fire_event, EVENT_MANAGER -from dbt.events.contextvars import get_node_info -from dbt.events.event_handler import set_package_logging -from dbt.events.types import ( + +from dbt.adapters.events.types import ( AdapterEventDebug, AdapterEventInfo, AdapterEventWarning, AdapterEventError, ) +from dbt.common.events import get_event_manager +from dbt.common.events.contextvars import get_node_info +from dbt.common.events.event_handler import set_package_logging +from dbt.common.events.functions import fire_event -# N.B. No guarantees for what type param msg is. @dataclass class AdapterLogger: name: str @@ -63,4 +64,4 @@ def set_adapter_dependency_log_level(package_name, level): """By default, dbt suppresses non-dbt package logs. This method allows you to set the log level for a specific package. """ - set_package_logging(package_name, level, EVENT_MANAGER) + set_package_logging(package_name, level, get_event_manager()) diff --git a/core/dbt/adapters/events/types.py b/core/dbt/adapters/events/types.py new file mode 100644 index 00000000000..d3aa0a87214 --- /dev/null +++ b/core/dbt/adapters/events/types.py @@ -0,0 +1,417 @@ +from dbt.adapters.events.base_types import WarnLevel, InfoLevel, ErrorLevel, DebugLevel +from dbt.common.ui import line_wrap_message, warning_tag + + +def format_adapter_message(name, base_msg, args) -> str: + # only apply formatting if there are arguments to format. + # avoids issues like "dict: {k: v}".format() which results in `KeyError 'k'` + msg = base_msg if len(args) == 0 else base_msg.format(*args) + return f"{name} adapter: {msg}" + + +# ======================================================= +# D - Deprecations +# ======================================================= + + +class CollectFreshnessReturnSignature(WarnLevel): + def code(self) -> str: + return "D012" + + def message(self) -> str: + description = ( + "The 'collect_freshness' macro signature has changed to return the full " + "query result, rather than just a table of values. See the v1.5 migration guide " + "for details on how to update your custom macro: https://docs.getdbt.com/guides/migration/versions/upgrading-to-v1.5" + ) + return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}")) + + +class AdapterDeprecationWarning(WarnLevel): + def code(self) -> str: + return "D005" + + def message(self) -> str: + description = ( + f"The adapter function `adapter.{self.old_name}` is deprecated and will be removed in " + f"a future release of dbt. Please use `adapter.{self.new_name}` instead. " + f"\n\nDocumentation for {self.new_name} can be found here:" + f"\n\nhttps://docs.getdbt.com/docs/adapter" + ) + return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}")) + + +# ======================================================= +# E - DB Adapter +# ======================================================= + + +class AdapterEventDebug(DebugLevel): + def code(self) -> str: + return "E001" + + def message(self) -> str: + return format_adapter_message(self.name, self.base_msg, self.args) + + +class AdapterEventInfo(InfoLevel): + def code(self) -> str: + return "E002" + + def message(self) -> str: + return format_adapter_message(self.name, self.base_msg, self.args) + + +class AdapterEventWarning(WarnLevel): + def code(self) -> str: + return "E003" + + def message(self) -> str: + return format_adapter_message(self.name, self.base_msg, self.args) + + +class AdapterEventError(ErrorLevel): + def code(self) -> str: + return "E004" + + def message(self) -> str: + return format_adapter_message(self.name, self.base_msg, self.args) + + +class NewConnection(DebugLevel): + def code(self) -> str: + return "E005" + + def message(self) -> str: + return f"Acquiring new {self.conn_type} connection '{self.conn_name}'" + + +class ConnectionReused(DebugLevel): + def code(self) -> str: + return "E006" + + def message(self) -> str: + return f"Re-using an available connection from the pool (formerly {self.orig_conn_name}, now {self.conn_name})" + + +class ConnectionLeftOpenInCleanup(DebugLevel): + def code(self) -> str: + return "E007" + + def message(self) -> str: + return f"Connection '{self.conn_name}' was left open." + + +class ConnectionClosedInCleanup(DebugLevel): + def code(self) -> str: + return "E008" + + def message(self) -> str: + return f"Connection '{self.conn_name}' was properly closed." + + +class RollbackFailed(DebugLevel): + def code(self) -> str: + return "E009" + + def message(self) -> str: + return f"Failed to rollback '{self.conn_name}'" + + +class ConnectionClosed(DebugLevel): + def code(self) -> str: + return "E010" + + def message(self) -> str: + return f"On {self.conn_name}: Close" + + +class ConnectionLeftOpen(DebugLevel): + def code(self) -> str: + return "E011" + + def message(self) -> str: + return f"On {self.conn_name}: No close available on handle" + + +class Rollback(DebugLevel): + def code(self) -> str: + return "E012" + + def message(self) -> str: + return f"On {self.conn_name}: ROLLBACK" + + +class CacheMiss(DebugLevel): + def code(self) -> str: + return "E013" + + def message(self) -> str: + return ( + f'On "{self.conn_name}": cache miss for schema ' + f'"{self.database}.{self.schema}", this is inefficient' + ) + + +class ListRelations(DebugLevel): + def code(self) -> str: + return "E014" + + def message(self) -> str: + identifiers_str = ", ".join(r.identifier for r in self.relations) + return f"While listing relations in database={self.database}, schema={self.schema}, found: {identifiers_str}" + + +class ConnectionUsed(DebugLevel): + def code(self) -> str: + return "E015" + + def message(self) -> str: + return f'Using {self.conn_type} connection "{self.conn_name}"' + + +class SQLQuery(DebugLevel): + def code(self) -> str: + return "E016" + + def message(self) -> str: + return f"On {self.conn_name}: {self.sql}" + + +class SQLQueryStatus(DebugLevel): + def code(self) -> str: + return "E017" + + def message(self) -> str: + return f"SQL status: {self.status} in {self.elapsed} seconds" + + +class SQLCommit(DebugLevel): + def code(self) -> str: + return "E018" + + def message(self) -> str: + return f"On {self.conn_name}: COMMIT" + + +class ColTypeChange(DebugLevel): + def code(self) -> str: + return "E019" + + def message(self) -> str: + return f"Changing col type from {self.orig_type} to {self.new_type} in table {self.table}" + + +class SchemaCreation(DebugLevel): + def code(self) -> str: + return "E020" + + def message(self) -> str: + return f'Creating schema "{self.relation}"' + + +class SchemaDrop(DebugLevel): + def code(self) -> str: + return "E021" + + def message(self) -> str: + return f'Dropping schema "{self.relation}".' + + +class CacheAction(DebugLevel): + def code(self) -> str: + return "E022" + + def format_ref_key(self, ref_key) -> str: + return f"(database={ref_key.database}, schema={ref_key.schema}, identifier={ref_key.identifier})" + + def message(self) -> str: + ref_key = self.format_ref_key(self.ref_key) + ref_key_2 = self.format_ref_key(self.ref_key_2) + ref_key_3 = self.format_ref_key(self.ref_key_3) + ref_list = [] + for rfk in self.ref_list: + ref_list.append(self.format_ref_key(rfk)) + if self.action == "add_link": + return f"adding link, {ref_key} references {ref_key_2}" + elif self.action == "add_relation": + return f"adding relation: {ref_key}" + elif self.action == "drop_missing_relation": + return f"dropped a nonexistent relationship: {ref_key}" + elif self.action == "drop_cascade": + return f"drop {ref_key} is cascading to {ref_list}" + elif self.action == "drop_relation": + return f"Dropping relation: {ref_key}" + elif self.action == "update_reference": + return ( + f"updated reference from {ref_key} -> {ref_key_3} to " + f"{ref_key_2} -> {ref_key_3}" + ) + elif self.action == "temporary_relation": + return f"old key {ref_key} not found in self.relations, assuming temporary" + elif self.action == "rename_relation": + return f"Renaming relation {ref_key} to {ref_key_2}" + elif self.action == "uncached_relation": + return ( + f"{ref_key_2} references {ref_key} " + f"but {self.ref_key.database}.{self.ref_key.schema}" + "is not in the cache, skipping assumed external relation" + ) + else: + return ref_key + + +# Skipping E023, E024, E025, E026, E027, E028, E029, E030 + + +class CacheDumpGraph(DebugLevel): + def code(self) -> str: + return "E031" + + def message(self) -> str: + return f"dump {self.before_after} {self.action} : {self.dump}" + + +# Skipping E032, E033, E034 + + +class AdapterRegistered(InfoLevel): + def code(self) -> str: + return "E034" + + def message(self) -> str: + return f"Registered adapter: {self.adapter_name}{self.adapter_version}" + + +class AdapterImportError(InfoLevel): + def code(self) -> str: + return "E035" + + def message(self) -> str: + return f"Error importing adapter: {self.exc}" + + +class PluginLoadError(DebugLevel): + def code(self) -> str: + return "E036" + + def message(self) -> str: + return f"{self.exc_info}" + + +class NewConnectionOpening(DebugLevel): + def code(self) -> str: + return "E037" + + def message(self) -> str: + return f"Opening a new connection, currently in state {self.connection_state}" + + +class CodeExecution(DebugLevel): + def code(self) -> str: + return "E038" + + def message(self) -> str: + return f"On {self.conn_name}: {self.code_content}" + + +class CodeExecutionStatus(DebugLevel): + def code(self) -> str: + return "E039" + + def message(self) -> str: + return f"Execution status: {self.status} in {self.elapsed} seconds" + + +class CatalogGenerationError(WarnLevel): + def code(self) -> str: + return "E040" + + def message(self) -> str: + return f"Encountered an error while generating catalog: {self.exc}" + + +class WriteCatalogFailure(ErrorLevel): + def code(self) -> str: + return "E041" + + def message(self) -> str: + return ( + f"dbt encountered {self.num_exceptions} failure{(self.num_exceptions != 1) * 's'} " + "while writing the catalog" + ) + + +class CatalogWritten(InfoLevel): + def code(self) -> str: + return "E042" + + def message(self) -> str: + return f"Catalog written to {self.path}" + + +class CannotGenerateDocs(InfoLevel): + def code(self) -> str: + return "E043" + + def message(self) -> str: + return "compile failed, cannot generate docs" + + +class BuildingCatalog(InfoLevel): + def code(self) -> str: + return "E044" + + def message(self) -> str: + return "Building catalog" + + +class DatabaseErrorRunningHook(InfoLevel): + def code(self) -> str: + return "E045" + + def message(self) -> str: + return f"Database error while running {self.hook_type}" + + +class HooksRunning(InfoLevel): + def code(self) -> str: + return "E046" + + def message(self) -> str: + plural = "hook" if self.num_hooks == 1 else "hooks" + return f"Running {self.num_hooks} {self.hook_type} {plural}" + + +class FinishedRunningStats(InfoLevel): + def code(self) -> str: + return "E047" + + def message(self) -> str: + return f"Finished running {self.stat_line}{self.execution} ({self.execution_time:0.2f}s)." + + +class ConstraintNotEnforced(WarnLevel): + def code(self) -> str: + return "E048" + + def message(self) -> str: + msg = ( + f"The constraint type {self.constraint} is not enforced by {self.adapter}. " + "The constraint will be included in this model's DDL statement, but it will not " + "guarantee anything about the underlying data. Set 'warn_unenforced: false' on " + "this constraint to ignore this warning." + ) + return line_wrap_message(warning_tag(msg)) + + +class ConstraintNotSupported(WarnLevel): + def code(self) -> str: + return "E049" + + def message(self) -> str: + msg = ( + f"The constraint type {self.constraint} is not supported by {self.adapter}, and will " + "be ignored. Set 'warn_unsupported: false' on this constraint to ignore this warning." + ) + return line_wrap_message(warning_tag(msg)) diff --git a/core/dbt/adapters/exceptions/__init__.py b/core/dbt/adapters/exceptions/__init__.py new file mode 100644 index 00000000000..9b36beb21b3 --- /dev/null +++ b/core/dbt/adapters/exceptions/__init__.py @@ -0,0 +1,4 @@ +from dbt.adapters.exceptions.compilation import * # noqa +from dbt.adapters.exceptions.alias import * # noqa +from dbt.adapters.exceptions.database import * # noqa +from dbt.adapters.exceptions.connection import * # noqa diff --git a/core/dbt/adapters/exceptions/alias.py b/core/dbt/adapters/exceptions/alias.py new file mode 100644 index 00000000000..68a677088d2 --- /dev/null +++ b/core/dbt/adapters/exceptions/alias.py @@ -0,0 +1,24 @@ +from typing import Mapping, Any + +from dbt.common.exceptions import DbtValidationError + + +class AliasError(DbtValidationError): + pass + + +# core level exceptions +class DuplicateAliasError(AliasError): + def __init__(self, kwargs: Mapping[str, Any], aliases: Mapping[str, str], canonical_key: str): + self.kwargs = kwargs + self.aliases = aliases + self.canonical_key = canonical_key + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + # dupe found: go through the dict so we can have a nice-ish error + key_names = ", ".join( + "{}".format(k) for k in self.kwargs if self.aliases.get(k) == self.canonical_key + ) + msg = f'Got duplicate keys: ({key_names}) all map to "{self.canonical_key}"' + return msg diff --git a/core/dbt/adapters/exceptions/compilation.py b/core/dbt/adapters/exceptions/compilation.py new file mode 100644 index 00000000000..c87e74b05e7 --- /dev/null +++ b/core/dbt/adapters/exceptions/compilation.py @@ -0,0 +1,255 @@ +from typing import List, Mapping, Any + +from dbt.common.exceptions import CompilationError, DbtDatabaseError +from dbt.common.ui import line_wrap_message + + +class MissingConfigError(CompilationError): + def __init__(self, unique_id: str, name: str): + self.unique_id = unique_id + self.name = name + msg = ( + f"Model '{self.unique_id}' does not define a required config parameter '{self.name}'." + ) + super().__init__(msg=msg) + + +class MultipleDatabasesNotAllowedError(CompilationError): + def __init__(self, databases): + self.databases = databases + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + msg = str(self.databases) + return msg + + +class ApproximateMatchError(CompilationError): + def __init__(self, target, relation): + self.target = target + self.relation = relation + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + msg = ( + "When searching for a relation, dbt found an approximate match. " + "Instead of guessing \nwhich relation to use, dbt will move on. " + f"Please delete {self.relation}, or rename it to be less ambiguous." + f"\nSearched for: {self.target}\nFound: {self.relation}" + ) + + return msg + + +class SnapshotTargetIncompleteError(CompilationError): + def __init__(self, extra: List, missing: List): + self.extra = extra + self.missing = missing + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + msg = ( + 'Snapshot target has ("{}") but not ("{}") - is it an ' + "unmigrated previous version archive?".format( + '", "'.join(self.extra), '", "'.join(self.missing) + ) + ) + return msg + + +class DuplicateMacroInPackageError(CompilationError): + def __init__(self, macro, macro_mapping: Mapping): + self.macro = macro + self.macro_mapping = macro_mapping + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + other_path = self.macro_mapping[self.macro.unique_id].original_file_path + # subtract 2 for the "Compilation Error" indent + # note that the line wrap eats newlines, so if you want newlines, + # this is the result :( + msg = line_wrap_message( + f"""\ + dbt found two macros named "{self.macro.name}" in the project + "{self.macro.package_name}". + + + To fix this error, rename or remove one of the following + macros: + + - {self.macro.original_file_path} + + - {other_path} + """, + subtract=2, + ) + return msg + + +class DuplicateMaterializationNameError(CompilationError): + def __init__(self, macro, other_macro): + self.macro = macro + self.other_macro = other_macro + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + macro_name = self.macro.name + macro_package_name = self.macro.package_name + other_package_name = self.other_macro.macro.package_name + + msg = ( + f"Found two materializations with the name {macro_name} (packages " + f"{macro_package_name} and {other_package_name}). dbt cannot resolve " + "this ambiguity" + ) + return msg + + +class ColumnTypeMissingError(CompilationError): + def __init__(self, column_names: List): + self.column_names = column_names + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + msg = ( + "Contracted models require data_type to be defined for each column. " + "Please ensure that the column name and data_type are defined within " + f"the YAML configuration for the {self.column_names} column(s)." + ) + return msg + + +class MacroNotFoundError(CompilationError): + def __init__(self, node, target_macro_id: str): + self.node = node + self.target_macro_id = target_macro_id + msg = f"'{self.node.unique_id}' references macro '{self.target_macro_id}' which is not defined!" + + super().__init__(msg=msg) + + +class MissingMaterializationError(CompilationError): + def __init__(self, materialization, adapter_type): + self.materialization = materialization + self.adapter_type = adapter_type + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + valid_types = "'default'" + + if self.adapter_type != "default": + valid_types = f"'default' and '{self.adapter_type}'" + + msg = f"No materialization '{self.materialization}' was found for adapter {self.adapter_type}! (searched types {valid_types})" + return msg + + +class SnapshotTargetNotSnapshotTableError(CompilationError): + def __init__(self, missing: List): + self.missing = missing + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + msg = 'Snapshot target is not a snapshot table (missing "{}")'.format( + '", "'.join(self.missing) + ) + return msg + + +class NullRelationDropAttemptedError(CompilationError): + def __init__(self, name: str): + self.name = name + self.msg = f"Attempted to drop a null relation for {self.name}" + super().__init__(msg=self.msg) + + +class NullRelationCacheAttemptedError(CompilationError): + def __init__(self, name: str): + self.name = name + self.msg = f"Attempted to cache a null relation for {self.name}" + super().__init__(msg=self.msg) + + +class RelationTypeNullError(CompilationError): + def __init__(self, relation): + self.relation = relation + self.msg = f"Tried to drop relation {self.relation}, but its type is null." + super().__init__(msg=self.msg) + + +class MaterializationNotAvailableError(CompilationError): + def __init__(self, materialization, adapter_type: str): + self.materialization = materialization + self.adapter_type = adapter_type + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + msg = f"Materialization '{self.materialization}' is not available for {self.adapter_type}!" + return msg + + +class RelationReturnedMultipleResultsError(CompilationError): + def __init__(self, kwargs: Mapping[str, Any], matches: List): + self.kwargs = kwargs + self.matches = matches + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + msg = ( + "get_relation returned more than one relation with the given args. " + "Please specify a database or schema to narrow down the result set." + f"\n{self.kwargs}\n\n{self.matches}" + ) + return msg + + +class UnexpectedNonTimestampError(DbtDatabaseError): + def __init__(self, field_name: str, source, dt: Any): + self.field_name = field_name + self.source = source + self.type_name = type(dt).__name__ + msg = ( + f"Expected a timestamp value when querying field '{self.field_name}' of table " + f"{self.source} but received value of type '{self.type_name}' instead" + ) + super().__init__(msg) + + +class RenameToNoneAttemptedError(CompilationError): + def __init__(self, src_name: str, dst_name: str, name: str): + self.src_name = src_name + self.dst_name = dst_name + self.name = name + self.msg = f"Attempted to rename {self.src_name} to {self.dst_name} for {self.name}" + super().__init__(msg=self.msg) + + +class QuoteConfigTypeError(CompilationError): + def __init__(self, quote_config: Any): + self.quote_config = quote_config + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + msg = ( + 'The seed configuration value of "quote_columns" has an ' + f"invalid type {type(self.quote_config)}" + ) + return msg + + +class RelationWrongTypeError(CompilationError): + def __init__(self, relation, expected_type, model=None): + self.relation = relation + self.expected_type = expected_type + self.model = model + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + msg = ( + f"Trying to create {self.expected_type} {self.relation}, " + f"but it currently exists as a {self.relation.type}. Either " + f"drop {self.relation} manually, or run dbt with " + "`--full-refresh` and dbt will drop it for you." + ) + + return msg diff --git a/core/dbt/adapters/exceptions/connection.py b/core/dbt/adapters/exceptions/connection.py new file mode 100644 index 00000000000..aac55166407 --- /dev/null +++ b/core/dbt/adapters/exceptions/connection.py @@ -0,0 +1,16 @@ +from typing import List + +from dbt.common.exceptions import DbtRuntimeError, DbtDatabaseError + + +class InvalidConnectionError(DbtRuntimeError): + def __init__(self, thread_id, known: List) -> None: + self.thread_id = thread_id + self.known = known + super().__init__( + msg=f"connection never acquired for thread {self.thread_id}, have {self.known}" + ) + + +class FailedToConnectError(DbtDatabaseError): + pass diff --git a/core/dbt/adapters/exceptions/database.py b/core/dbt/adapters/exceptions/database.py new file mode 100644 index 00000000000..ff177289a03 --- /dev/null +++ b/core/dbt/adapters/exceptions/database.py @@ -0,0 +1,51 @@ +from typing import Any + +from dbt.common.exceptions import NotImplementedError, CompilationError + + +class UnexpectedDbReferenceError(NotImplementedError): + def __init__(self, adapter, database, expected): + self.adapter = adapter + self.database = database + self.expected = expected + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + msg = f"Cross-db references not allowed in {self.adapter} ({self.database} vs {self.expected})" + return msg + + +class CrossDbReferenceProhibitedError(CompilationError): + def __init__(self, adapter, exc_msg: str): + self.adapter = adapter + self.exc_msg = exc_msg + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + msg = f"Cross-db references not allowed in adapter {self.adapter}: Got {self.exc_msg}" + return msg + + +class IndexConfigNotDictError(CompilationError): + def __init__(self, raw_index: Any): + self.raw_index = raw_index + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + msg = ( + f"Invalid index config:\n" + f" Got: {self.raw_index}\n" + f' Expected a dictionary with at minimum a "columns" key' + ) + return msg + + +class IndexConfigError(CompilationError): + def __init__(self, exc: TypeError): + self.exc = exc + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + validator_msg = self.validator_error_message(self.exc) + msg = f"Could not parse index config: {validator_msg}" + return msg diff --git a/core/dbt/adapters/factory.py b/core/dbt/adapters/factory.py index 513336a554d..17023d8bf64 100644 --- a/core/dbt/adapters/factory.py +++ b/core/dbt/adapters/factory.py @@ -1,3 +1,5 @@ +from multiprocessing.context import SpawnContext + import threading import traceback from contextlib import contextmanager @@ -7,13 +9,13 @@ from dbt.adapters.base.plugin import AdapterPlugin from dbt.adapters.protocol import AdapterConfig, AdapterProtocol, RelationProtocol -from dbt.contracts.connection import AdapterRequiredConfig, Credentials -from dbt.events.functions import fire_event -from dbt.events.types import AdapterImportError, PluginLoadError, AdapterRegistered -from dbt.exceptions import DbtInternalError, DbtRuntimeError -from dbt.include.global_project import PACKAGE_PATH as GLOBAL_PROJECT_PATH -from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME -from dbt.semver import VersionSpecifier +from dbt.adapters.contracts.connection import AdapterRequiredConfig, Credentials +from dbt.common.events.functions import fire_event +from dbt.adapters.events.types import AdapterImportError, PluginLoadError, AdapterRegistered +from dbt.common.exceptions import DbtInternalError, DbtRuntimeError +from dbt.adapters.include.global_project import PACKAGE_PATH as GLOBAL_PROJECT_PATH +from dbt.adapters.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME +from dbt.common.semver import VersionSpecifier Adapter = AdapterProtocol @@ -87,7 +89,7 @@ def load_plugin(self, name: str) -> Type[Credentials]: return plugin.credentials - def register_adapter(self, config: AdapterRequiredConfig) -> None: + def register_adapter(self, config: AdapterRequiredConfig, mp_context: SpawnContext) -> None: adapter_name = config.credentials.type adapter_type = self.get_adapter_class_by_name(adapter_name) adapter_version = import_module(f".{adapter_name}.__version__", "dbt.adapters").version @@ -102,7 +104,7 @@ def register_adapter(self, config: AdapterRequiredConfig) -> None: # this shouldn't really happen... return - adapter: Adapter = adapter_type(config) # type: ignore + adapter: Adapter = adapter_type(config, mp_context) # type: ignore self.adapters[adapter_name] = adapter def lookup_adapter(self, adapter_name: str) -> Adapter: @@ -172,8 +174,8 @@ def get_adapter_constraint_support(self, name: Optional[str]) -> List[str]: FACTORY: AdapterContainer = AdapterContainer() -def register_adapter(config: AdapterRequiredConfig) -> None: - FACTORY.register_adapter(config) +def register_adapter(config: AdapterRequiredConfig, mp_context: SpawnContext) -> None: + FACTORY.register_adapter(config, mp_context) def get_adapter(config: AdapterRequiredConfig): diff --git a/core/dbt/adapters/include/global_project/__init__.py b/core/dbt/adapters/include/global_project/__init__.py new file mode 100644 index 00000000000..4043ffebb6e --- /dev/null +++ b/core/dbt/adapters/include/global_project/__init__.py @@ -0,0 +1,4 @@ +import os + +PACKAGE_PATH = os.path.dirname(__file__) +PROJECT_NAME = "dbt" diff --git a/core/dbt/include/global_project/dbt_project.yml b/core/dbt/adapters/include/global_project/dbt_project.yml similarity index 100% rename from core/dbt/include/global_project/dbt_project.yml rename to core/dbt/adapters/include/global_project/dbt_project.yml diff --git a/core/dbt/include/global_project/docs/overview.md b/core/dbt/adapters/include/global_project/docs/overview.md similarity index 100% rename from core/dbt/include/global_project/docs/overview.md rename to core/dbt/adapters/include/global_project/docs/overview.md diff --git a/core/dbt/include/global_project/macros/adapters/apply_grants.sql b/core/dbt/adapters/include/global_project/macros/adapters/apply_grants.sql similarity index 100% rename from core/dbt/include/global_project/macros/adapters/apply_grants.sql rename to core/dbt/adapters/include/global_project/macros/adapters/apply_grants.sql diff --git a/core/dbt/include/global_project/macros/adapters/columns.sql b/core/dbt/adapters/include/global_project/macros/adapters/columns.sql similarity index 100% rename from core/dbt/include/global_project/macros/adapters/columns.sql rename to core/dbt/adapters/include/global_project/macros/adapters/columns.sql diff --git a/core/dbt/include/global_project/macros/adapters/freshness.sql b/core/dbt/adapters/include/global_project/macros/adapters/freshness.sql similarity index 100% rename from core/dbt/include/global_project/macros/adapters/freshness.sql rename to core/dbt/adapters/include/global_project/macros/adapters/freshness.sql diff --git a/core/dbt/include/global_project/macros/adapters/indexes.sql b/core/dbt/adapters/include/global_project/macros/adapters/indexes.sql similarity index 100% rename from core/dbt/include/global_project/macros/adapters/indexes.sql rename to core/dbt/adapters/include/global_project/macros/adapters/indexes.sql diff --git a/core/dbt/include/global_project/macros/adapters/metadata.sql b/core/dbt/adapters/include/global_project/macros/adapters/metadata.sql similarity index 100% rename from core/dbt/include/global_project/macros/adapters/metadata.sql rename to core/dbt/adapters/include/global_project/macros/adapters/metadata.sql diff --git a/core/dbt/include/global_project/macros/adapters/persist_docs.sql b/core/dbt/adapters/include/global_project/macros/adapters/persist_docs.sql similarity index 100% rename from core/dbt/include/global_project/macros/adapters/persist_docs.sql rename to core/dbt/adapters/include/global_project/macros/adapters/persist_docs.sql diff --git a/core/dbt/include/global_project/macros/adapters/relation.sql b/core/dbt/adapters/include/global_project/macros/adapters/relation.sql similarity index 100% rename from core/dbt/include/global_project/macros/adapters/relation.sql rename to core/dbt/adapters/include/global_project/macros/adapters/relation.sql diff --git a/core/dbt/include/global_project/macros/adapters/schema.sql b/core/dbt/adapters/include/global_project/macros/adapters/schema.sql similarity index 100% rename from core/dbt/include/global_project/macros/adapters/schema.sql rename to core/dbt/adapters/include/global_project/macros/adapters/schema.sql diff --git a/core/dbt/include/global_project/macros/adapters/show.sql b/core/dbt/adapters/include/global_project/macros/adapters/show.sql similarity index 100% rename from core/dbt/include/global_project/macros/adapters/show.sql rename to core/dbt/adapters/include/global_project/macros/adapters/show.sql diff --git a/core/dbt/include/global_project/macros/adapters/timestamps.sql b/core/dbt/adapters/include/global_project/macros/adapters/timestamps.sql similarity index 100% rename from core/dbt/include/global_project/macros/adapters/timestamps.sql rename to core/dbt/adapters/include/global_project/macros/adapters/timestamps.sql diff --git a/core/dbt/include/global_project/macros/adapters/validate_sql.sql b/core/dbt/adapters/include/global_project/macros/adapters/validate_sql.sql similarity index 100% rename from core/dbt/include/global_project/macros/adapters/validate_sql.sql rename to core/dbt/adapters/include/global_project/macros/adapters/validate_sql.sql diff --git a/core/dbt/include/global_project/macros/etc/datetime.sql b/core/dbt/adapters/include/global_project/macros/etc/datetime.sql similarity index 100% rename from core/dbt/include/global_project/macros/etc/datetime.sql rename to core/dbt/adapters/include/global_project/macros/etc/datetime.sql diff --git a/core/dbt/include/global_project/macros/etc/statement.sql b/core/dbt/adapters/include/global_project/macros/etc/statement.sql similarity index 100% rename from core/dbt/include/global_project/macros/etc/statement.sql rename to core/dbt/adapters/include/global_project/macros/etc/statement.sql diff --git a/core/dbt/include/global_project/macros/generic_test_sql/accepted_values.sql b/core/dbt/adapters/include/global_project/macros/generic_test_sql/accepted_values.sql similarity index 100% rename from core/dbt/include/global_project/macros/generic_test_sql/accepted_values.sql rename to core/dbt/adapters/include/global_project/macros/generic_test_sql/accepted_values.sql diff --git a/core/dbt/include/global_project/macros/generic_test_sql/not_null.sql b/core/dbt/adapters/include/global_project/macros/generic_test_sql/not_null.sql similarity index 100% rename from core/dbt/include/global_project/macros/generic_test_sql/not_null.sql rename to core/dbt/adapters/include/global_project/macros/generic_test_sql/not_null.sql diff --git a/core/dbt/include/global_project/macros/generic_test_sql/relationships.sql b/core/dbt/adapters/include/global_project/macros/generic_test_sql/relationships.sql similarity index 100% rename from core/dbt/include/global_project/macros/generic_test_sql/relationships.sql rename to core/dbt/adapters/include/global_project/macros/generic_test_sql/relationships.sql diff --git a/core/dbt/include/global_project/macros/generic_test_sql/unique.sql b/core/dbt/adapters/include/global_project/macros/generic_test_sql/unique.sql similarity index 100% rename from core/dbt/include/global_project/macros/generic_test_sql/unique.sql rename to core/dbt/adapters/include/global_project/macros/generic_test_sql/unique.sql diff --git a/core/dbt/include/global_project/macros/get_custom_name/get_custom_alias.sql b/core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_alias.sql similarity index 100% rename from core/dbt/include/global_project/macros/get_custom_name/get_custom_alias.sql rename to core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_alias.sql diff --git a/core/dbt/include/global_project/macros/get_custom_name/get_custom_database.sql b/core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_database.sql similarity index 100% rename from core/dbt/include/global_project/macros/get_custom_name/get_custom_database.sql rename to core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_database.sql diff --git a/core/dbt/include/global_project/macros/get_custom_name/get_custom_schema.sql b/core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_schema.sql similarity index 100% rename from core/dbt/include/global_project/macros/get_custom_name/get_custom_schema.sql rename to core/dbt/adapters/include/global_project/macros/get_custom_name/get_custom_schema.sql diff --git a/core/dbt/include/global_project/macros/materializations/configs.sql b/core/dbt/adapters/include/global_project/macros/materializations/configs.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/configs.sql rename to core/dbt/adapters/include/global_project/macros/materializations/configs.sql diff --git a/core/dbt/include/global_project/macros/materializations/hooks.sql b/core/dbt/adapters/include/global_project/macros/materializations/hooks.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/hooks.sql rename to core/dbt/adapters/include/global_project/macros/materializations/hooks.sql diff --git a/core/dbt/include/global_project/macros/materializations/models/clone/can_clone_table.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/clone/can_clone_table.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/models/clone/can_clone_table.sql rename to core/dbt/adapters/include/global_project/macros/materializations/models/clone/can_clone_table.sql diff --git a/core/dbt/include/global_project/macros/materializations/models/clone/clone.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/clone/clone.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/models/clone/clone.sql rename to core/dbt/adapters/include/global_project/macros/materializations/models/clone/clone.sql diff --git a/core/dbt/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql rename to core/dbt/adapters/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql diff --git a/core/dbt/include/global_project/macros/materializations/models/incremental/column_helpers.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/column_helpers.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/models/incremental/column_helpers.sql rename to core/dbt/adapters/include/global_project/macros/materializations/models/incremental/column_helpers.sql diff --git a/core/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/incremental.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql rename to core/dbt/adapters/include/global_project/macros/materializations/models/incremental/incremental.sql diff --git a/core/dbt/include/global_project/macros/materializations/models/incremental/is_incremental.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/is_incremental.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/models/incremental/is_incremental.sql rename to core/dbt/adapters/include/global_project/macros/materializations/models/incremental/is_incremental.sql diff --git a/core/dbt/include/global_project/macros/materializations/models/incremental/merge.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/merge.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/models/incremental/merge.sql rename to core/dbt/adapters/include/global_project/macros/materializations/models/incremental/merge.sql diff --git a/core/dbt/include/global_project/macros/materializations/models/incremental/on_schema_change.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/on_schema_change.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/models/incremental/on_schema_change.sql rename to core/dbt/adapters/include/global_project/macros/materializations/models/incremental/on_schema_change.sql diff --git a/core/dbt/include/global_project/macros/materializations/models/incremental/strategies.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/incremental/strategies.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/models/incremental/strategies.sql rename to core/dbt/adapters/include/global_project/macros/materializations/models/incremental/strategies.sql diff --git a/core/dbt/include/global_project/macros/materializations/models/materialized_view.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/materialized_view.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/models/materialized_view.sql rename to core/dbt/adapters/include/global_project/macros/materializations/models/materialized_view.sql diff --git a/core/dbt/include/global_project/macros/materializations/models/table.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/table.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/models/table.sql rename to core/dbt/adapters/include/global_project/macros/materializations/models/table.sql diff --git a/core/dbt/include/global_project/macros/materializations/models/view.sql b/core/dbt/adapters/include/global_project/macros/materializations/models/view.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/models/view.sql rename to core/dbt/adapters/include/global_project/macros/materializations/models/view.sql diff --git a/core/dbt/include/global_project/macros/materializations/seeds/helpers.sql b/core/dbt/adapters/include/global_project/macros/materializations/seeds/helpers.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/seeds/helpers.sql rename to core/dbt/adapters/include/global_project/macros/materializations/seeds/helpers.sql diff --git a/core/dbt/include/global_project/macros/materializations/seeds/seed.sql b/core/dbt/adapters/include/global_project/macros/materializations/seeds/seed.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/seeds/seed.sql rename to core/dbt/adapters/include/global_project/macros/materializations/seeds/seed.sql diff --git a/core/dbt/include/global_project/macros/materializations/snapshots/helpers.sql b/core/dbt/adapters/include/global_project/macros/materializations/snapshots/helpers.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/snapshots/helpers.sql rename to core/dbt/adapters/include/global_project/macros/materializations/snapshots/helpers.sql diff --git a/core/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql b/core/dbt/adapters/include/global_project/macros/materializations/snapshots/snapshot.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql rename to core/dbt/adapters/include/global_project/macros/materializations/snapshots/snapshot.sql diff --git a/core/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql b/core/dbt/adapters/include/global_project/macros/materializations/snapshots/snapshot_merge.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql rename to core/dbt/adapters/include/global_project/macros/materializations/snapshots/snapshot_merge.sql diff --git a/core/dbt/include/global_project/macros/materializations/snapshots/strategies.sql b/core/dbt/adapters/include/global_project/macros/materializations/snapshots/strategies.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/snapshots/strategies.sql rename to core/dbt/adapters/include/global_project/macros/materializations/snapshots/strategies.sql diff --git a/core/dbt/include/global_project/macros/materializations/tests/helpers.sql b/core/dbt/adapters/include/global_project/macros/materializations/tests/helpers.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/tests/helpers.sql rename to core/dbt/adapters/include/global_project/macros/materializations/tests/helpers.sql diff --git a/core/dbt/include/global_project/macros/materializations/tests/test.sql b/core/dbt/adapters/include/global_project/macros/materializations/tests/test.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/tests/test.sql rename to core/dbt/adapters/include/global_project/macros/materializations/tests/test.sql diff --git a/core/dbt/include/global_project/macros/materializations/tests/unit.sql b/core/dbt/adapters/include/global_project/macros/materializations/tests/unit.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/tests/unit.sql rename to core/dbt/adapters/include/global_project/macros/materializations/tests/unit.sql diff --git a/core/dbt/include/global_project/macros/materializations/tests/where_subquery.sql b/core/dbt/adapters/include/global_project/macros/materializations/tests/where_subquery.sql similarity index 100% rename from core/dbt/include/global_project/macros/materializations/tests/where_subquery.sql rename to core/dbt/adapters/include/global_project/macros/materializations/tests/where_subquery.sql diff --git a/core/dbt/include/global_project/macros/python_model/python.sql b/core/dbt/adapters/include/global_project/macros/python_model/python.sql similarity index 100% rename from core/dbt/include/global_project/macros/python_model/python.sql rename to core/dbt/adapters/include/global_project/macros/python_model/python.sql diff --git a/core/dbt/include/global_project/macros/relations/column/columns_spec_ddl.sql b/core/dbt/adapters/include/global_project/macros/relations/column/columns_spec_ddl.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/column/columns_spec_ddl.sql rename to core/dbt/adapters/include/global_project/macros/relations/column/columns_spec_ddl.sql diff --git a/core/dbt/include/global_project/macros/relations/create.sql b/core/dbt/adapters/include/global_project/macros/relations/create.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/create.sql rename to core/dbt/adapters/include/global_project/macros/relations/create.sql diff --git a/core/dbt/include/global_project/macros/relations/create_backup.sql b/core/dbt/adapters/include/global_project/macros/relations/create_backup.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/create_backup.sql rename to core/dbt/adapters/include/global_project/macros/relations/create_backup.sql diff --git a/core/dbt/include/global_project/macros/relations/create_intermediate.sql b/core/dbt/adapters/include/global_project/macros/relations/create_intermediate.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/create_intermediate.sql rename to core/dbt/adapters/include/global_project/macros/relations/create_intermediate.sql diff --git a/core/dbt/include/global_project/macros/relations/drop.sql b/core/dbt/adapters/include/global_project/macros/relations/drop.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/drop.sql rename to core/dbt/adapters/include/global_project/macros/relations/drop.sql diff --git a/core/dbt/include/global_project/macros/relations/drop_backup.sql b/core/dbt/adapters/include/global_project/macros/relations/drop_backup.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/drop_backup.sql rename to core/dbt/adapters/include/global_project/macros/relations/drop_backup.sql diff --git a/core/dbt/include/global_project/macros/relations/materialized_view/alter.sql b/core/dbt/adapters/include/global_project/macros/relations/materialized_view/alter.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/materialized_view/alter.sql rename to core/dbt/adapters/include/global_project/macros/relations/materialized_view/alter.sql diff --git a/core/dbt/include/global_project/macros/relations/materialized_view/create.sql b/core/dbt/adapters/include/global_project/macros/relations/materialized_view/create.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/materialized_view/create.sql rename to core/dbt/adapters/include/global_project/macros/relations/materialized_view/create.sql diff --git a/core/dbt/include/global_project/macros/relations/materialized_view/drop.sql b/core/dbt/adapters/include/global_project/macros/relations/materialized_view/drop.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/materialized_view/drop.sql rename to core/dbt/adapters/include/global_project/macros/relations/materialized_view/drop.sql diff --git a/core/dbt/include/global_project/macros/relations/materialized_view/refresh.sql b/core/dbt/adapters/include/global_project/macros/relations/materialized_view/refresh.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/materialized_view/refresh.sql rename to core/dbt/adapters/include/global_project/macros/relations/materialized_view/refresh.sql diff --git a/core/dbt/include/global_project/macros/relations/materialized_view/rename.sql b/core/dbt/adapters/include/global_project/macros/relations/materialized_view/rename.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/materialized_view/rename.sql rename to core/dbt/adapters/include/global_project/macros/relations/materialized_view/rename.sql diff --git a/core/dbt/include/global_project/macros/relations/materialized_view/replace.sql b/core/dbt/adapters/include/global_project/macros/relations/materialized_view/replace.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/materialized_view/replace.sql rename to core/dbt/adapters/include/global_project/macros/relations/materialized_view/replace.sql diff --git a/core/dbt/include/global_project/macros/relations/rename.sql b/core/dbt/adapters/include/global_project/macros/relations/rename.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/rename.sql rename to core/dbt/adapters/include/global_project/macros/relations/rename.sql diff --git a/core/dbt/include/global_project/macros/relations/rename_intermediate.sql b/core/dbt/adapters/include/global_project/macros/relations/rename_intermediate.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/rename_intermediate.sql rename to core/dbt/adapters/include/global_project/macros/relations/rename_intermediate.sql diff --git a/core/dbt/include/global_project/macros/relations/replace.sql b/core/dbt/adapters/include/global_project/macros/relations/replace.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/replace.sql rename to core/dbt/adapters/include/global_project/macros/relations/replace.sql diff --git a/core/dbt/include/global_project/macros/relations/schema.sql b/core/dbt/adapters/include/global_project/macros/relations/schema.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/schema.sql rename to core/dbt/adapters/include/global_project/macros/relations/schema.sql diff --git a/core/dbt/include/global_project/macros/relations/table/create.sql b/core/dbt/adapters/include/global_project/macros/relations/table/create.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/table/create.sql rename to core/dbt/adapters/include/global_project/macros/relations/table/create.sql diff --git a/core/dbt/include/global_project/macros/relations/table/drop.sql b/core/dbt/adapters/include/global_project/macros/relations/table/drop.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/table/drop.sql rename to core/dbt/adapters/include/global_project/macros/relations/table/drop.sql diff --git a/core/dbt/include/global_project/macros/relations/table/rename.sql b/core/dbt/adapters/include/global_project/macros/relations/table/rename.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/table/rename.sql rename to core/dbt/adapters/include/global_project/macros/relations/table/rename.sql diff --git a/core/dbt/include/global_project/macros/relations/table/replace.sql b/core/dbt/adapters/include/global_project/macros/relations/table/replace.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/table/replace.sql rename to core/dbt/adapters/include/global_project/macros/relations/table/replace.sql diff --git a/core/dbt/include/global_project/macros/relations/view/create.sql b/core/dbt/adapters/include/global_project/macros/relations/view/create.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/view/create.sql rename to core/dbt/adapters/include/global_project/macros/relations/view/create.sql diff --git a/core/dbt/include/global_project/macros/relations/view/drop.sql b/core/dbt/adapters/include/global_project/macros/relations/view/drop.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/view/drop.sql rename to core/dbt/adapters/include/global_project/macros/relations/view/drop.sql diff --git a/core/dbt/include/global_project/macros/relations/view/rename.sql b/core/dbt/adapters/include/global_project/macros/relations/view/rename.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/view/rename.sql rename to core/dbt/adapters/include/global_project/macros/relations/view/rename.sql diff --git a/core/dbt/include/global_project/macros/relations/view/replace.sql b/core/dbt/adapters/include/global_project/macros/relations/view/replace.sql similarity index 100% rename from core/dbt/include/global_project/macros/relations/view/replace.sql rename to core/dbt/adapters/include/global_project/macros/relations/view/replace.sql diff --git a/core/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql b/core/dbt/adapters/include/global_project/macros/unit_test_sql/get_fixture_sql.sql similarity index 100% rename from core/dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql rename to core/dbt/adapters/include/global_project/macros/unit_test_sql/get_fixture_sql.sql diff --git a/core/dbt/include/global_project/macros/utils/any_value.sql b/core/dbt/adapters/include/global_project/macros/utils/any_value.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/any_value.sql rename to core/dbt/adapters/include/global_project/macros/utils/any_value.sql diff --git a/core/dbt/include/global_project/macros/utils/array_append.sql b/core/dbt/adapters/include/global_project/macros/utils/array_append.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/array_append.sql rename to core/dbt/adapters/include/global_project/macros/utils/array_append.sql diff --git a/core/dbt/include/global_project/macros/utils/array_concat.sql b/core/dbt/adapters/include/global_project/macros/utils/array_concat.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/array_concat.sql rename to core/dbt/adapters/include/global_project/macros/utils/array_concat.sql diff --git a/core/dbt/include/global_project/macros/utils/array_construct.sql b/core/dbt/adapters/include/global_project/macros/utils/array_construct.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/array_construct.sql rename to core/dbt/adapters/include/global_project/macros/utils/array_construct.sql diff --git a/core/dbt/include/global_project/macros/utils/bool_or.sql b/core/dbt/adapters/include/global_project/macros/utils/bool_or.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/bool_or.sql rename to core/dbt/adapters/include/global_project/macros/utils/bool_or.sql diff --git a/core/dbt/include/global_project/macros/utils/cast_bool_to_text.sql b/core/dbt/adapters/include/global_project/macros/utils/cast_bool_to_text.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/cast_bool_to_text.sql rename to core/dbt/adapters/include/global_project/macros/utils/cast_bool_to_text.sql diff --git a/core/dbt/include/global_project/macros/utils/concat.sql b/core/dbt/adapters/include/global_project/macros/utils/concat.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/concat.sql rename to core/dbt/adapters/include/global_project/macros/utils/concat.sql diff --git a/core/dbt/include/global_project/macros/utils/data_types.sql b/core/dbt/adapters/include/global_project/macros/utils/data_types.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/data_types.sql rename to core/dbt/adapters/include/global_project/macros/utils/data_types.sql diff --git a/core/dbt/include/global_project/macros/utils/date_spine.sql b/core/dbt/adapters/include/global_project/macros/utils/date_spine.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/date_spine.sql rename to core/dbt/adapters/include/global_project/macros/utils/date_spine.sql diff --git a/core/dbt/include/global_project/macros/utils/date_trunc.sql b/core/dbt/adapters/include/global_project/macros/utils/date_trunc.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/date_trunc.sql rename to core/dbt/adapters/include/global_project/macros/utils/date_trunc.sql diff --git a/core/dbt/include/global_project/macros/utils/dateadd.sql b/core/dbt/adapters/include/global_project/macros/utils/dateadd.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/dateadd.sql rename to core/dbt/adapters/include/global_project/macros/utils/dateadd.sql diff --git a/core/dbt/include/global_project/macros/utils/datediff.sql b/core/dbt/adapters/include/global_project/macros/utils/datediff.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/datediff.sql rename to core/dbt/adapters/include/global_project/macros/utils/datediff.sql diff --git a/core/dbt/include/global_project/macros/utils/escape_single_quotes.sql b/core/dbt/adapters/include/global_project/macros/utils/escape_single_quotes.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/escape_single_quotes.sql rename to core/dbt/adapters/include/global_project/macros/utils/escape_single_quotes.sql diff --git a/core/dbt/include/global_project/macros/utils/except.sql b/core/dbt/adapters/include/global_project/macros/utils/except.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/except.sql rename to core/dbt/adapters/include/global_project/macros/utils/except.sql diff --git a/core/dbt/include/global_project/macros/utils/generate_series.sql b/core/dbt/adapters/include/global_project/macros/utils/generate_series.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/generate_series.sql rename to core/dbt/adapters/include/global_project/macros/utils/generate_series.sql diff --git a/core/dbt/include/global_project/macros/utils/hash.sql b/core/dbt/adapters/include/global_project/macros/utils/hash.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/hash.sql rename to core/dbt/adapters/include/global_project/macros/utils/hash.sql diff --git a/core/dbt/include/global_project/macros/utils/intersect.sql b/core/dbt/adapters/include/global_project/macros/utils/intersect.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/intersect.sql rename to core/dbt/adapters/include/global_project/macros/utils/intersect.sql diff --git a/core/dbt/include/global_project/macros/utils/last_day.sql b/core/dbt/adapters/include/global_project/macros/utils/last_day.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/last_day.sql rename to core/dbt/adapters/include/global_project/macros/utils/last_day.sql diff --git a/core/dbt/include/global_project/macros/utils/length.sql b/core/dbt/adapters/include/global_project/macros/utils/length.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/length.sql rename to core/dbt/adapters/include/global_project/macros/utils/length.sql diff --git a/core/dbt/include/global_project/macros/utils/listagg.sql b/core/dbt/adapters/include/global_project/macros/utils/listagg.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/listagg.sql rename to core/dbt/adapters/include/global_project/macros/utils/listagg.sql diff --git a/core/dbt/include/global_project/macros/utils/literal.sql b/core/dbt/adapters/include/global_project/macros/utils/literal.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/literal.sql rename to core/dbt/adapters/include/global_project/macros/utils/literal.sql diff --git a/core/dbt/include/global_project/macros/utils/position.sql b/core/dbt/adapters/include/global_project/macros/utils/position.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/position.sql rename to core/dbt/adapters/include/global_project/macros/utils/position.sql diff --git a/core/dbt/include/global_project/macros/utils/replace.sql b/core/dbt/adapters/include/global_project/macros/utils/replace.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/replace.sql rename to core/dbt/adapters/include/global_project/macros/utils/replace.sql diff --git a/core/dbt/include/global_project/macros/utils/right.sql b/core/dbt/adapters/include/global_project/macros/utils/right.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/right.sql rename to core/dbt/adapters/include/global_project/macros/utils/right.sql diff --git a/core/dbt/include/global_project/macros/utils/safe_cast.sql b/core/dbt/adapters/include/global_project/macros/utils/safe_cast.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/safe_cast.sql rename to core/dbt/adapters/include/global_project/macros/utils/safe_cast.sql diff --git a/core/dbt/include/global_project/macros/utils/split_part.sql b/core/dbt/adapters/include/global_project/macros/utils/split_part.sql similarity index 100% rename from core/dbt/include/global_project/macros/utils/split_part.sql rename to core/dbt/adapters/include/global_project/macros/utils/split_part.sql diff --git a/core/dbt/include/global_project/tests/generic/builtin.sql b/core/dbt/adapters/include/global_project/tests/generic/builtin.sql similarity index 100% rename from core/dbt/include/global_project/tests/generic/builtin.sql rename to core/dbt/adapters/include/global_project/tests/generic/builtin.sql diff --git a/core/dbt/adapters/protocol.py b/core/dbt/adapters/protocol.py index 1c58cc78bab..2b0399acfc0 100644 --- a/core/dbt/adapters/protocol.py +++ b/core/dbt/adapters/protocol.py @@ -8,20 +8,18 @@ Generic, TypeVar, Tuple, - Dict, Any, + Dict, ) from typing_extensions import Protocol import agate -from dbt.contracts.connection import Connection, AdapterRequiredConfig, AdapterResponse -from dbt.contracts.graph.nodes import ResultNode, ManifestNode -from dbt.contracts.graph.model_config import BaseConfig -from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.relation import Policy, HasQuoting - -from dbt.graph import Graph +from dbt.adapters.contracts.connection import Connection, AdapterRequiredConfig, AdapterResponse +from dbt.adapters.contracts.macros import MacroResolverProtocol +from dbt.adapters.contracts.relation import Policy, HasQuoting, RelationConfig +from dbt.common.contracts.config.base import BaseConfig +from dbt.common.clients.jinja import MacroProtocol @dataclass @@ -46,20 +44,9 @@ def get_default_quote_policy(cls) -> Policy: ... @classmethod - def create_from(cls: Type[Self], config: HasQuoting, node: ResultNode) -> Self: - ... - - -class CompilerProtocol(Protocol): - def compile(self, manifest: Manifest, write=True) -> Graph: - ... - - def compile_node( - self, - node: ManifestNode, - manifest: Manifest, - extra_context: Optional[Dict[str, Any]] = None, - ) -> ManifestNode: + def create_from( + cls: Type[Self], quoting: HasQuoting, relation_config: RelationConfig, **kwargs: Any + ) -> Self: ... @@ -67,7 +54,17 @@ def compile_node( ConnectionManager_T = TypeVar("ConnectionManager_T", bound=ConnectionManagerProtocol) Relation_T = TypeVar("Relation_T", bound=RelationProtocol) Column_T = TypeVar("Column_T", bound=ColumnProtocol) -Compiler_T = TypeVar("Compiler_T", bound=CompilerProtocol) + + +class MacroContextGeneratorCallable(Protocol): + def __call__( + self, + macro_protocol: MacroProtocol, + config: AdapterRequiredConfig, + macro_resolver: MacroResolverProtocol, + package_name: Optional[str], + ) -> Dict[str, Any]: + ... # TODO CT-211 @@ -78,7 +75,6 @@ class AdapterProtocol( # type: ignore[misc] ConnectionManager_T, Relation_T, Column_T, - Compiler_T, ], ): # N.B. Technically these are ClassVars, but mypy doesn't support putting type vars in a @@ -93,11 +89,26 @@ class AdapterProtocol( # type: ignore[misc] def __init__(self, config: AdapterRequiredConfig) -> None: ... + def set_macro_resolver(self, macro_resolver: MacroResolverProtocol) -> None: + ... + + def get_macro_resolver(self) -> Optional[MacroResolverProtocol]: + ... + + def clear_macro_resolver(self) -> None: + ... + + def set_macro_context_generator( + self, + macro_context_generator: MacroContextGeneratorCallable, + ) -> None: + ... + @classmethod def type(cls) -> str: pass - def set_query_header(self, manifest: Manifest) -> None: + def set_query_header(self, query_header_context: Dict[str, Any]) -> None: ... @staticmethod @@ -153,6 +164,3 @@ def execute( self, sql: str, auto_begin: bool = False, fetch: bool = False ) -> Tuple[AdapterResponse, agate.Table]: ... - - def get_compiler(self) -> Compiler_T: - ... diff --git a/core/dbt/adapters/relation_configs/config_base.py b/core/dbt/adapters/relation_configs/config_base.py index 9d0cddb0d21..5bfaa8de233 100644 --- a/core/dbt/adapters/relation_configs/config_base.py +++ b/core/dbt/adapters/relation_configs/config_base.py @@ -2,7 +2,7 @@ from typing import Union, Dict import agate -from dbt.utils import filter_null_values +from dbt.common.utils import filter_null_values """ diff --git a/core/dbt/adapters/relation_configs/config_change.py b/core/dbt/adapters/relation_configs/config_change.py index 2eb612196ba..1e89cde1442 100644 --- a/core/dbt/adapters/relation_configs/config_change.py +++ b/core/dbt/adapters/relation_configs/config_change.py @@ -3,7 +3,7 @@ from typing import Hashable from dbt.adapters.relation_configs.config_base import RelationConfigBase -from dbt.dataclass_schema import StrEnum +from dbt.common.dataclass_schema import StrEnum class RelationConfigChangeAction(StrEnum): diff --git a/core/dbt/adapters/relation_configs/config_validation.py b/core/dbt/adapters/relation_configs/config_validation.py index 17bf74bf3e7..ef7b18bb7bb 100644 --- a/core/dbt/adapters/relation_configs/config_validation.py +++ b/core/dbt/adapters/relation_configs/config_validation.py @@ -1,7 +1,7 @@ from dataclasses import dataclass from typing import Set, Optional -from dbt.exceptions import DbtRuntimeError +from dbt.common.exceptions import DbtRuntimeError @dataclass(frozen=True, eq=True, unsafe_hash=True) diff --git a/core/dbt/adapters/sql/connections.py b/core/dbt/adapters/sql/connections.py index 7347d961d15..86de2dfbddc 100644 --- a/core/dbt/adapters/sql/connections.py +++ b/core/dbt/adapters/sql/connections.py @@ -4,14 +4,14 @@ import agate -import dbt.clients.agate_helper -import dbt.exceptions +from dbt.adapters.events.types import ConnectionUsed, SQLQuery, SQLCommit, SQLQueryStatus +import dbt.common.clients.agate_helper +import dbt.common.exceptions from dbt.adapters.base import BaseConnectionManager -from dbt.contracts.connection import Connection, ConnectionState, AdapterResponse -from dbt.events.functions import fire_event -from dbt.events.types import ConnectionUsed, SQLQuery, SQLCommit, SQLQueryStatus -from dbt.events.contextvars import get_node_info -from dbt.utils import cast_to_str +from dbt.adapters.contracts.connection import Connection, ConnectionState, AdapterResponse +from dbt.common.events.functions import fire_event +from dbt.common.events.contextvars import get_node_info +from dbt.common.utils import cast_to_str class SQLConnectionManager(BaseConnectionManager): @@ -27,7 +27,9 @@ class SQLConnectionManager(BaseConnectionManager): @abc.abstractmethod def cancel(self, connection: Connection): """Cancel the given connection.""" - raise dbt.exceptions.NotImplementedError("`cancel` is not implemented for this adapter!") + raise dbt.common.exceptions.base.NotImplementedError( + "`cancel` is not implemented for this adapter!" + ) def cancel_open(self) -> List[str]: names = [] @@ -93,7 +95,7 @@ def add_query( @abc.abstractmethod def get_response(cls, cursor: Any) -> AdapterResponse: """Get the status of the cursor.""" - raise dbt.exceptions.NotImplementedError( + raise dbt.common.exceptions.base.NotImplementedError( "`get_response` is not implemented for this adapter!" ) @@ -129,7 +131,7 @@ def get_result_from_cursor(cls, cursor: Any, limit: Optional[int]) -> agate.Tabl rows = cursor.fetchall() data = cls.process_results(column_names, rows) - return dbt.clients.agate_helper.table_from_data_flat(data, column_names) + return dbt.common.clients.agate_helper.table_from_data_flat(data, column_names) def execute( self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None @@ -140,7 +142,7 @@ def execute( if fetch: table = self.get_result_from_cursor(cursor, limit) else: - table = dbt.clients.agate_helper.empty_table() + table = dbt.common.clients.agate_helper.empty_table() return response, table def add_begin_query(self): @@ -156,7 +158,7 @@ def add_select_query(self, sql: str) -> Tuple[Connection, Any]: def begin(self): connection = self.get_thread_connection() if connection.transaction_open is True: - raise dbt.exceptions.DbtInternalError( + raise dbt.common.exceptions.DbtInternalError( 'Tried to begin a new transaction on connection "{}", but ' "it already had one open!".format(connection.name) ) @@ -169,7 +171,7 @@ def begin(self): def commit(self): connection = self.get_thread_connection() if connection.transaction_open is False: - raise dbt.exceptions.DbtInternalError( + raise dbt.common.exceptions.DbtInternalError( 'Tried to commit transaction on connection "{}", but ' "it does not have one open!".format(connection.name) ) diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py index de4c109bb54..e8a712774de 100644 --- a/core/dbt/adapters/sql/impl.py +++ b/core/dbt/adapters/sql/impl.py @@ -1,13 +1,13 @@ import agate from typing import Any, Optional, Tuple, Type, List -from dbt.contracts.connection import Connection, AdapterResponse -from dbt.exceptions import RelationTypeNullError +from dbt.adapters.contracts.connection import Connection, AdapterResponse +from dbt.adapters.events.types import ColTypeChange, SchemaCreation, SchemaDrop +from dbt.adapters.exceptions import RelationTypeNullError from dbt.adapters.base import BaseAdapter, available from dbt.adapters.cache import _make_ref_key_dict from dbt.adapters.sql import SQLConnectionManager -from dbt.events.functions import fire_event -from dbt.events.types import ColTypeChange, SchemaCreation, SchemaDrop +from dbt.common.events.functions import fire_event from dbt.adapters.base.relation import BaseRelation diff --git a/core/dbt/adapters/utils.py b/core/dbt/adapters/utils.py new file mode 100644 index 00000000000..44e6160a837 --- /dev/null +++ b/core/dbt/adapters/utils.py @@ -0,0 +1,68 @@ +from typing import Mapping, Sequence, Any, Dict, List +from dbt.adapters.exceptions import DuplicateAliasError + + +class Translator: + def __init__(self, aliases: Mapping[str, str], recursive: bool = False) -> None: + self.aliases = aliases + self.recursive = recursive + + def translate_mapping(self, kwargs: Mapping[str, Any]) -> Dict[str, Any]: + result: Dict[str, Any] = {} + + for key, value in kwargs.items(): + canonical_key = self.aliases.get(key, key) + if canonical_key in result: + raise DuplicateAliasError(kwargs, self.aliases, canonical_key) + result[canonical_key] = self.translate_value(value) + return result + + def translate_sequence(self, value: Sequence[Any]) -> List[Any]: + return [self.translate_value(v) for v in value] + + def translate_value(self, value: Any) -> Any: + if self.recursive: + if isinstance(value, Mapping): + return self.translate_mapping(value) + elif isinstance(value, (list, tuple)): + return self.translate_sequence(value) + return value + + def translate(self, value: Mapping[str, Any]) -> Dict[str, Any]: + try: + return self.translate_mapping(value) + except RuntimeError as exc: + if "maximum recursion depth exceeded" in str(exc): + raise RecursionError("Cycle detected in a value passed to translate!") + raise + + +def translate_aliases( + kwargs: Dict[str, Any], + aliases: Dict[str, str], + recurse: bool = False, +) -> Dict[str, Any]: + """Given a dict of keyword arguments and a dict mapping aliases to their + canonical values, canonicalize the keys in the kwargs dict. + + If recurse is True, perform this operation recursively. + + :returns: A dict containing all the values in kwargs referenced by their + canonical key. + :raises: `AliasError`, if a canonical key is defined more than once. + """ + translator = Translator(aliases, recurse) + return translator.translate(kwargs) + + +# some types need to make constants available to the jinja context as +# attributes, and regular properties only work with objects. maybe this should +# be handled by the RelationProxy? + + +class classproperty(object): + def __init__(self, func) -> None: + self.func = func + + def __get__(self, obj, objtype): + return self.func(objtype) diff --git a/core/dbt/cli/flags.py b/core/dbt/cli/flags.py index 2678d53b6dd..6e3cf075d8b 100644 --- a/core/dbt/cli/flags.py +++ b/core/dbt/cli/flags.py @@ -2,7 +2,6 @@ import sys from dataclasses import dataclass from importlib import import_module -from multiprocessing import get_context from pprint import pformat as pf from typing import Any, Callable, Dict, List, Optional, Set, Union @@ -11,11 +10,14 @@ from dbt.cli.exceptions import DbtUsageException from dbt.cli.resolvers import default_log_path, default_project_dir from dbt.cli.types import Command as CliCommand +from dbt.common import ui +from dbt.common.events import functions +from dbt.common.exceptions import DbtInternalError +from dbt.common.clients import jinja from dbt.config.profile import read_user_config from dbt.contracts.project import UserConfig -from dbt.exceptions import DbtInternalError from dbt.deprecations import renamed_env_var -from dbt.helper_types import WarnErrorOptions +from dbt.common.helper_types import WarnErrorOptions if os.name != "nt": # https://bugs.python.org/issue41567 @@ -224,7 +226,6 @@ def _assign_params( # Set hard coded flags. object.__setattr__(self, "WHICH", invoked_subcommand_name or ctx.info_name) - object.__setattr__(self, "MP_CONTEXT", get_context("spawn")) # Apply the lead/follow relationship between some parameters. self._override_if_set("USE_COLORS", "USE_COLORS_FILE", params_assigned_from_default) @@ -255,6 +256,8 @@ def _assign_params( for param in params: object.__setattr__(self, param.lower(), getattr(self, param)) + self.set_common_global_flags() + def __str__(self) -> str: return str(pf(self.__dict__)) @@ -295,6 +298,22 @@ def from_dict(cls, command: CliCommand, args_dict: Dict[str, Any]) -> "Flags": flags.fire_deprecations() return flags + def set_common_global_flags(self): + # Set globals for common.ui + if getattr(self, "PRINTER_WIDTH", None) is not None: + ui.PRINTER_WIDTH = getattr(self, "PRINTER_WIDTH") + if getattr(self, "USE_COLORS", None) is not None: + ui.USE_COLOR = getattr(self, "USE_COLORS") + + # Set globals for common.events.functions + functions.WARN_ERROR = getattr(self, "WARN_ERROR", False) + if getattr(self, "WARN_ERROR_OPTIONS", None) is not None: + functions.WARN_ERROR_OPTIONS = getattr(self, "WARN_ERROR_OPTIONS") + + # Set globals for common.jinja + if getattr(self, "MACRO_DEBUGGING", None) is not None: + jinja.MACRO_DEBUGGING = getattr(self, "MACRO_DEBUGGING") + CommandParams = List[str] diff --git a/core/dbt/cli/main.py b/core/dbt/cli/main.py index 61ff8d03773..748612acee9 100644 --- a/core/dbt/cli/main.py +++ b/core/dbt/cli/main.py @@ -21,22 +21,22 @@ CatalogArtifact, RunExecutionResult, ) -from dbt.events.base_types import EventMsg +from dbt.common.events.base_types import EventMsg from dbt.task.build import BuildTask from dbt.task.clean import CleanTask from dbt.task.clone import CloneTask from dbt.task.compile import CompileTask from dbt.task.debug import DebugTask from dbt.task.deps import DepsTask +from dbt.task.docs.generate import GenerateTask +from dbt.task.docs.serve import ServeTask from dbt.task.freshness import FreshnessTask -from dbt.task.generate import GenerateTask from dbt.task.init import InitTask from dbt.task.list import ListTask from dbt.task.retry import RetryTask from dbt.task.run import RunTask from dbt.task.run_operation import RunOperationTask from dbt.task.seed import SeedTask -from dbt.task.serve import ServeTask from dbt.task.show import ShowTask from dbt.task.snapshot import SnapshotTask from dbt.task.test import TestTask diff --git a/core/dbt/cli/option_types.py b/core/dbt/cli/option_types.py index fbb8ceb76c9..f56740161be 100644 --- a/core/dbt/cli/option_types.py +++ b/core/dbt/cli/option_types.py @@ -1,9 +1,10 @@ from click import ParamType, Choice from dbt.config.utils import parse_cli_yaml_string -from dbt.exceptions import ValidationError, DbtValidationError, OptionNotYamlDictError +from dbt.exceptions import ValidationError, OptionNotYamlDictError +from dbt.common.exceptions import DbtValidationError -from dbt.helper_types import WarnErrorOptions +from dbt.common.helper_types import WarnErrorOptions class YAML(ParamType): diff --git a/core/dbt/cli/requires.py b/core/dbt/cli/requires.py index b8e359b8a17..37e17bb3bd2 100644 --- a/core/dbt/cli/requires.py +++ b/core/dbt/cli/requires.py @@ -1,6 +1,8 @@ import dbt.tracking +from dbt.common.invocation import reset_invocation_id +from dbt.mp_context import get_mp_context from dbt.version import installed as installed_version -from dbt.adapters.factory import adapter_management, register_adapter +from dbt.adapters.factory import adapter_management, register_adapter, get_adapter from dbt.flags import set_flags, get_flag_dict from dbt.cli.exceptions import ( ExceptionExit, @@ -9,22 +11,29 @@ from dbt.cli.flags import Flags from dbt.config import RuntimeConfig from dbt.config.runtime import load_project, load_profile, UnsetProfile -from dbt.events.base_types import EventLevel -from dbt.events.functions import fire_event, LOG_VERSION, set_invocation_id, setup_event_logger -from dbt.events.types import ( +from dbt.context.providers import generate_runtime_macro_context + +from dbt.common.events.base_types import EventLevel +from dbt.common.events.functions import ( + fire_event, + LOG_VERSION, +) +from dbt.events.logging import setup_event_logger +from dbt.common.events.types import ( CommandCompleted, MainReportVersion, MainReportArgs, MainTrackingUserState, ResourceReport, ) -from dbt.events.helpers import get_json_string_utcnow -from dbt.events.types import MainEncounteredError, MainStackTrace -from dbt.exceptions import Exception as DbtException, DbtProjectError, FailFastError +from dbt.common.events.helpers import get_json_string_utcnow +from dbt.common.events.types import MainEncounteredError, MainStackTrace +from dbt.common.exceptions import DbtBaseException as DbtException +from dbt.exceptions import DbtProjectError, FailFastError from dbt.parser.manifest import ManifestLoader, write_manifest from dbt.profiler import profiler from dbt.tracking import active_user, initialize_from_flags, track_run -from dbt.utils import cast_dict_to_dict_of_strings +from dbt.common.utils import cast_dict_to_dict_of_strings from dbt.plugins import set_up_plugin_manager, get_plugin_manager from click import Context @@ -45,9 +54,11 @@ def wrapper(*args, **kwargs): ctx.obj["flags"] = flags set_flags(flags) + # Reset invocation_id for each 'invocation' of a dbt command (can happen multiple times in a single process) + reset_invocation_id() + # Logging callbacks = ctx.obj.get("callbacks", []) - set_invocation_id() setup_event_logger(flags=flags, callbacks=callbacks) # Tracking @@ -264,7 +275,9 @@ def wrapper(*args, **kwargs): raise DbtProjectError("profile, project, and runtime_config required for manifest") runtime_config = ctx.obj["runtime_config"] - register_adapter(runtime_config) + register_adapter(runtime_config, get_mp_context()) + adapter = get_adapter(runtime_config) + adapter.set_macro_context_generator(generate_runtime_macro_context) # a manifest has already been set on the context, so don't overwrite it if ctx.obj.get("manifest") is None: diff --git a/core/dbt/cli/types.py b/core/dbt/cli/types.py index 14028a69451..f43314c873f 100644 --- a/core/dbt/cli/types.py +++ b/core/dbt/cli/types.py @@ -1,7 +1,7 @@ from enum import Enum from typing import List -from dbt.exceptions import DbtInternalError +from dbt.common.exceptions import DbtInternalError class Command(Enum): diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py index 2fde25c3f23..886be2811d7 100644 --- a/core/dbt/clients/git.py +++ b/core/dbt/clients/git.py @@ -1,9 +1,9 @@ import re import os.path -from dbt.clients.system import run_cmd, rmdir -from dbt.events.functions import fire_event -from dbt.events.types import ( +from dbt.common.clients.system import run_cmd, rmdir +from dbt.common.events.functions import fire_event +from dbt.common.events.types import ( GitSparseCheckoutSubdirectory, GitProgressCheckoutRevision, GitProgressUpdatingExistingDependency, diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py index 8178e710bec..15bc8d87f30 100644 --- a/core/dbt/clients/jinja.py +++ b/core/dbt/clients/jinja.py @@ -1,13 +1,7 @@ -import codecs -import linecache -import os import re -import tempfile import threading -from ast import literal_eval from contextlib import contextmanager -from itertools import chain, islice -from typing import List, Union, Set, Optional, Dict, Any, Iterator, Type, NoReturn, Tuple, Callable +from typing import List, Union, Optional, Dict, Any, NoReturn, Tuple import jinja2 import jinja2.ext @@ -16,249 +10,26 @@ import jinja2.parser import jinja2.sandbox -from dbt.utils import ( - get_dbt_macro_name, - get_docs_macro_name, - get_materialization_macro_name, - get_test_macro_name, - deep_map_render, +from dbt.common.clients.jinja import ( + render_template, + get_template, + CallableMacroGenerator, + MacroProtocol, ) - -from dbt.clients._jinja_blocks import BlockIterator, BlockData, BlockTag +from dbt.common.utils import deep_map_render from dbt.contracts.graph.nodes import GenericTestNode from dbt.exceptions import ( - CaughtMacroError, - CaughtMacroErrorWithNodeError, - CompilationError, DbtInternalError, - MaterializationArgError, - JinjaRenderingError, - MacroReturn, MaterializtionMacroNotUsedError, NoSupportedLanguagesFoundError, - UndefinedCompilationError, - UndefinedMacroError, ) -from dbt.flags import get_flags from dbt.node_types import ModelLanguage SUPPORTED_LANG_ARG = jinja2.nodes.Name("supported_languages", "param") -def _linecache_inject(source, write): - if write: - # this is the only reliable way to accomplish this. Obviously, it's - # really darn noisy and will fill your temporary directory - tmp_file = tempfile.NamedTemporaryFile( - prefix="dbt-macro-compiled-", - suffix=".py", - delete=False, - mode="w+", - encoding="utf-8", - ) - tmp_file.write(source) - filename = tmp_file.name - else: - # `codecs.encode` actually takes a `bytes` as the first argument if - # the second argument is 'hex' - mypy does not know this. - rnd = codecs.encode(os.urandom(12), "hex") # type: ignore - filename = rnd.decode("ascii") - - # put ourselves in the cache - cache_entry = (len(source), None, [line + "\n" for line in source.splitlines()], filename) - # linecache does in fact have an attribute `cache`, thanks - linecache.cache[filename] = cache_entry # type: ignore - return filename - - -class MacroFuzzParser(jinja2.parser.Parser): - def parse_macro(self): - node = jinja2.nodes.Macro(lineno=next(self.stream).lineno) - - # modified to fuzz macros defined in the same file. this way - # dbt can understand the stack of macros being called. - # - @cmcarthur - node.name = get_dbt_macro_name(self.parse_assign_target(name_only=True).name) - - self.parse_signature(node) - node.body = self.parse_statements(("name:endmacro",), drop_needle=True) - return node - - -class MacroFuzzEnvironment(jinja2.sandbox.SandboxedEnvironment): - def _parse(self, source, name, filename): - return MacroFuzzParser(self, source, name, filename).parse() - - def _compile(self, source, filename): - """Override jinja's compilation to stash the rendered source inside - the python linecache for debugging when the appropriate environment - variable is set. - - If the value is 'write', also write the files to disk. - WARNING: This can write a ton of data if you aren't careful. - """ - macro_debugging = get_flags().MACRO_DEBUGGING - if filename == "