diff --git a/python/lsst/daf/butler/datastore/_datastore.py b/python/lsst/daf/butler/datastore/_datastore.py index f3f300232c..271b8af7fe 100644 --- a/python/lsst/daf/butler/datastore/_datastore.py +++ b/python/lsst/daf/butler/datastore/_datastore.py @@ -30,13 +30,13 @@ from __future__ import annotations __all__ = ( - "DatastoreConfig", + "DatasetRefURIs", "Datastore", + "DatastoreConfig", + "DatastoreOpaqueTable", "DatastoreValidationError", - "DatasetRefURIs", "NullDatastore", "DatastoreTransaction", - "OpaqueTableDefinition", ) import contextlib @@ -97,7 +97,7 @@ class Event: @dataclasses.dataclass(frozen=True) -class OpaqueTableDefinition: +class DatastoreOpaqueTable: """Definition of the opaque table which stores datastore records. Table definition contains `.ddl.TableSpec` for a table and a class @@ -1264,7 +1264,7 @@ def set_retrieve_dataset_type_method(self, method: Callable[[str], DatasetType | pass @abstractmethod - def opaque_table_definitions(self) -> Mapping[str, OpaqueTableDefinition]: + def opaque_table_definitions(self) -> Mapping[str, DatastoreOpaqueTable]: """Make definitions of the opaque tables used by this Datastore. Returns @@ -1398,5 +1398,5 @@ def export_records( ) -> Mapping[str, DatastoreRecordData]: raise NotImplementedError("This is a no-op datastore that can not access a real datastore") - def opaque_table_definitions(self) -> Mapping[str, OpaqueTableDefinition]: + def opaque_table_definitions(self) -> Mapping[str, DatastoreOpaqueTable]: return {} diff --git a/python/lsst/daf/butler/datastore/generic_base.py b/python/lsst/daf/butler/datastore/generic_base.py index 0433466310..9fe2c71f96 100644 --- a/python/lsst/daf/butler/datastore/generic_base.py +++ b/python/lsst/daf/butler/datastore/generic_base.py @@ -32,18 +32,16 @@ __all__ = ("GenericBaseDatastore",) import logging -from abc import abstractmethod -from collections.abc import Iterable, Mapping +from collections.abc import Mapping from typing import TYPE_CHECKING, Any, Generic, TypeVar from .._exceptions import DatasetTypeNotSupportedError from ..datastore._datastore import Datastore -from ..registry.interfaces import DatabaseInsertMode, DatastoreRegistryBridge +from .stored_file_info import StoredDatastoreItemInfo if TYPE_CHECKING: from .._dataset_ref import DatasetRef from .._storage_class import StorageClass - from .stored_file_info import StoredDatastoreItemInfo log = logging.getLogger(__name__) @@ -56,107 +54,6 @@ class GenericBaseDatastore(Datastore, Generic[_InfoType]): Should always be sub-classed since key abstract methods are missing. """ - @property - @abstractmethod - def bridge(self) -> DatastoreRegistryBridge: - """Object that manages the interface between this `Datastore` and the - `Registry` (`DatastoreRegistryBridge`). - """ - raise NotImplementedError() - - @abstractmethod - def addStoredItemInfo( - self, - refs: Iterable[DatasetRef], - infos: Iterable[Any], - insert_mode: DatabaseInsertMode = DatabaseInsertMode.INSERT, - ) -> None: - """Record internal storage information associated with one or more - datasets. - - Parameters - ---------- - refs : sequence of `DatasetRef` - The datasets that have been stored. - infos : sequence of `StoredDatastoreItemInfo` - Metadata associated with the stored datasets. - insert_mode : `~lsst.daf.butler.registry.interfaces.DatabaseInsertMode` - Mode to use to insert the new records into the table. The - options are ``INSERT`` (error if pre-existing), ``REPLACE`` - (replace content with new values), and ``ENSURE`` (skip if the row - already exists). - """ - raise NotImplementedError() - - @abstractmethod - def getStoredItemsInfo(self, ref: DatasetRef) -> Iterable[_InfoType]: - """Retrieve information associated with files stored in this - `Datastore` associated with this dataset ref. - - Parameters - ---------- - ref : `DatasetRef` - The dataset that is to be queried. - - Returns - ------- - items : `~collections.abc.Iterable` [`StoredDatastoreItemInfo`] - Stored information about the files and associated formatters - associated with this dataset. Only one file will be returned - if the dataset has not been disassembled. Can return an empty - list if no matching datasets can be found. - """ - raise NotImplementedError() - - @abstractmethod - def removeStoredItemInfo(self, ref: DatasetRef) -> None: - """Remove information about the file associated with this dataset. - - Parameters - ---------- - ref : `DatasetRef` - The dataset that has been removed. - """ - raise NotImplementedError() - - def _register_datasets( - self, - refsAndInfos: Iterable[tuple[DatasetRef, StoredDatastoreItemInfo]], - insert_mode: DatabaseInsertMode = DatabaseInsertMode.INSERT, - ) -> None: - """Update registry to indicate that one or more datasets have been - stored. - - Parameters - ---------- - refsAndInfos : sequence `tuple` [`DatasetRef`, - `StoredDatastoreItemInfo`] - Datasets to register and the internal datastore metadata associated - with them. - insert_mode : `str`, optional - Indicate whether the new records should be new ("insert", default), - or allowed to exists ("ensure") or be replaced if already present - ("replace"). - """ - expandedRefs: list[DatasetRef] = [] - expandedItemInfos = [] - - for ref, itemInfo in refsAndInfos: - expandedRefs.append(ref) - expandedItemInfos.append(itemInfo) - - # Dataset location only cares about registry ID so if we have - # disassembled in datastore we have to deduplicate. Since they - # will have different datasetTypes we can't use a set - registryRefs = {r.id: r for r in expandedRefs} - if insert_mode == DatabaseInsertMode.INSERT: - self.bridge.insert(registryRefs.values()) - else: - # There are only two columns and all that matters is the - # dataset ID. - self.bridge.ensure(registryRefs.values()) - self.addStoredItemInfo(expandedRefs, expandedItemInfos, insert_mode=insert_mode) - def _post_process_get( self, inMemoryDataset: object, @@ -272,7 +169,6 @@ def transfer(self, inputDatastore: Datastore, ref: DatasetRef) -> None: The external `Datastore` from which to retreive the Dataset. ref : `DatasetRef` Reference to the required dataset in the input data store. - """ assert inputDatastore is not self # unless we want it for renames? inMemoryDataset = inputDatastore.get(ref) diff --git a/python/lsst/daf/butler/datastores/chainedDatastore.py b/python/lsst/daf/butler/datastores/chainedDatastore.py index c0fb253fcd..5edacd217d 100644 --- a/python/lsst/daf/butler/datastores/chainedDatastore.py +++ b/python/lsst/daf/butler/datastores/chainedDatastore.py @@ -43,8 +43,8 @@ DatasetRefURIs, Datastore, DatastoreConfig, + DatastoreOpaqueTable, DatastoreValidationError, - OpaqueTableDefinition, ) from lsst.daf.butler.datastore.constraints import Constraints from lsst.daf.butler.datastore.record_data import DatastoreRecordData @@ -1179,9 +1179,9 @@ def transfer_from( return all_accepted, remaining_refs - def opaque_table_definitions(self) -> Mapping[str, OpaqueTableDefinition]: + def opaque_table_definitions(self) -> Mapping[str, DatastoreOpaqueTable]: # Docstring inherited from the base class. - tables: dict[str, OpaqueTableDefinition] = {} + tables: dict[str, DatastoreOpaqueTable] = {} for datastore in self.datastores: tables.update(datastore.opaque_table_definitions()) return tables diff --git a/python/lsst/daf/butler/datastores/fileDatastore.py b/python/lsst/daf/butler/datastores/fileDatastore.py index 4f203e0aef..fce89aa00a 100644 --- a/python/lsst/daf/butler/datastores/fileDatastore.py +++ b/python/lsst/daf/butler/datastores/fileDatastore.py @@ -45,7 +45,6 @@ DatasetRef, DatasetType, DatasetTypeNotSupportedError, - Datastore, FileDataset, FileDescriptor, Formatter, @@ -58,9 +57,10 @@ ) from lsst.daf.butler.datastore import ( DatasetRefURIs, + Datastore, DatastoreConfig, + DatastoreOpaqueTable, DatastoreValidationError, - OpaqueTableDefinition, ) from lsst.daf.butler.datastore.cache_manager import ( AbstractDatastoreCacheManager, @@ -382,7 +382,21 @@ def addStoredItemInfo( infos: Iterable[StoredFileInfo], insert_mode: DatabaseInsertMode = DatabaseInsertMode.INSERT, ) -> None: - # Docstring inherited from GenericBaseDatastore + """Record internal storage information associated with one or more + datasets. + + Parameters + ---------- + refs : sequence of `DatasetRef` + The datasets that have been stored. + infos : sequence of `StoredDatastoreItemInfo` + Metadata associated with the stored datasets. + insert_mode : `~lsst.daf.butler.registry.interfaces.DatabaseInsertMode` + Mode to use to insert the new records into the table. The + options are ``INSERT`` (error if pre-existing), ``REPLACE`` + (replace content with new values), and ``ENSURE`` (skip if the row + already exists). + """ records = [ info.rebase(ref).to_record(dataset_id=ref.id) for ref, info in zip(refs, infos, strict=True) ] @@ -397,8 +411,22 @@ def addStoredItemInfo( raise ValueError(f"Unknown insert mode of '{insert_mode}'") def getStoredItemsInfo(self, ref: DatasetIdRef) -> list[StoredFileInfo]: - # Docstring inherited from GenericBaseDatastore + """Retrieve information associated with files stored in this + `Datastore` associated with this dataset ref. + Parameters + ---------- + ref : `DatasetRef` + The dataset that is to be queried. + + Returns + ------- + items : `~collections.abc.Iterable` [`StoredDatastoreItemInfo`] + Stored information about the files and associated formatters + associated with this dataset. Only one file will be returned + if the dataset has not been disassembled. Can return an empty + list if no matching datasets can be found. + """ # Try to get them from the ref first. if ref.datastore_records is not None: if (ref_records := ref.datastore_records.get(self._table.name)) is not None: @@ -413,6 +441,44 @@ def getStoredItemsInfo(self, ref: DatasetIdRef) -> list[StoredFileInfo]: records = self._table.fetch(dataset_id=ref.id) return [StoredFileInfo.from_record(record) for record in records] + def _register_datasets( + self, + refsAndInfos: Iterable[tuple[DatasetRef, StoredFileInfo]], + insert_mode: DatabaseInsertMode = DatabaseInsertMode.INSERT, + ) -> None: + """Update registry to indicate that one or more datasets have been + stored. + + Parameters + ---------- + refsAndInfos : sequence `tuple` [`DatasetRef`, + `StoredDatastoreItemInfo`] + Datasets to register and the internal datastore metadata associated + with them. + insert_mode : `str`, optional + Indicate whether the new records should be new ("insert", default), + or allowed to exists ("ensure") or be replaced if already present + ("replace"). + """ + expandedRefs: list[DatasetRef] = [] + expandedItemInfos: list[StoredFileInfo] = [] + + for ref, itemInfo in refsAndInfos: + expandedRefs.append(ref) + expandedItemInfos.append(itemInfo) + + # Dataset location only cares about registry ID so if we have + # disassembled in datastore we have to deduplicate. Since they + # will have different datasetTypes we can't use a set + registryRefs = {r.id: r for r in expandedRefs} + if insert_mode == DatabaseInsertMode.INSERT: + self.bridge.insert(registryRefs.values()) + else: + # There are only two columns and all that matters is the + # dataset ID. + self.bridge.ensure(registryRefs.values()) + self.addStoredItemInfo(expandedRefs, expandedItemInfos, insert_mode=insert_mode) + def _get_stored_records_associated_with_refs( self, refs: Iterable[DatasetIdRef] ) -> dict[DatasetId, list[StoredFileInfo]]: @@ -491,8 +557,13 @@ def _registered_refs_per_artifact(self, pathInStore: ResourcePath) -> set[Datase return ids def removeStoredItemInfo(self, ref: DatasetIdRef) -> None: - # Docstring inherited from GenericBaseDatastore + """Remove information about the file associated with this dataset. + Parameters + ---------- + ref : `DatasetRef` + The dataset that has been removed. + """ # Note that this method is actually not used by this implementation, # we depend on bridge to delete opaque records. But there are some # tests that check that this method works, so we keep it for now. @@ -3093,6 +3164,6 @@ def _cast_storage_class(self, ref: DatasetRef) -> DatasetRef: ref = ref.overrideStorageClass(dataset_type.storageClass) return ref - def opaque_table_definitions(self) -> Mapping[str, OpaqueTableDefinition]: + def opaque_table_definitions(self) -> Mapping[str, DatastoreOpaqueTable]: # Docstring inherited from the base class. - return {self._opaque_table_name: OpaqueTableDefinition(self.makeTableSpec(ddl.GUID), StoredFileInfo)} + return {self._opaque_table_name: DatastoreOpaqueTable(self.makeTableSpec(ddl.GUID), StoredFileInfo)} diff --git a/python/lsst/daf/butler/datastores/inMemoryDatastore.py b/python/lsst/daf/butler/datastores/inMemoryDatastore.py index eead0519cd..6773a5e274 100644 --- a/python/lsst/daf/butler/datastores/inMemoryDatastore.py +++ b/python/lsst/daf/butler/datastores/inMemoryDatastore.py @@ -43,13 +43,12 @@ from lsst.daf.butler.datastore.generic_base import GenericBaseDatastore from lsst.daf.butler.datastore.record_data import DatastoreRecordData from lsst.daf.butler.datastore.stored_file_info import StoredDatastoreItemInfo -from lsst.daf.butler.registry.interfaces import DatabaseInsertMode, DatastoreRegistryBridge from lsst.daf.butler.utils import transactional from lsst.resources import ResourcePath if TYPE_CHECKING: from lsst.daf.butler import Config, DatasetType, LookupKey - from lsst.daf.butler.datastore import OpaqueTableDefinition + from lsst.daf.butler.datastore import DatastoreOpaqueTable from lsst.daf.butler.registry.interfaces import DatasetIdRef, DatastoreRegistryBridgeManager log = logging.getLogger(__name__) @@ -135,7 +134,7 @@ def __init__( # Related records that share the same parent self.related: dict[DatasetId, set[DatasetId]] = {} - self._bridge = bridgeManager.register(self.name, ephemeral=True) + self._trashedIds: set[DatasetId] = set() @classmethod def setConfigRoot(cls, root: str, config: Config, full: Config, overwrite: bool = True) -> None: @@ -172,42 +171,37 @@ def setConfigRoot(cls, root: str, config: Config, full: Config, overwrite: bool """ return - @property - def bridge(self) -> DatastoreRegistryBridge: + def _get_stored_item_info(self, dataset_id: DatasetId) -> StoredMemoryItemInfo: # Docstring inherited from GenericBaseDatastore. - return self._bridge + return self.records[dataset_id] - def addStoredItemInfo( - self, - refs: Iterable[DatasetRef], - infos: Iterable[StoredMemoryItemInfo], - insert_mode: DatabaseInsertMode = DatabaseInsertMode.INSERT, - ) -> None: - # Docstring inherited from GenericBaseDatastore. - for ref, info in zip(refs, infos, strict=True): - self.records[ref.id] = info - self.related.setdefault(info.parentID, set()).add(ref.id) - - def getStoredItemInfo(self, ref: DatasetIdRef) -> StoredMemoryItemInfo: - # Docstring inherited from GenericBaseDatastore. - return self.records[ref.id] - - def getStoredItemsInfo(self, ref: DatasetIdRef) -> list[StoredMemoryItemInfo]: - # Docstring inherited from GenericBaseDatastore. - return [self.getStoredItemInfo(ref)] - - def removeStoredItemInfo(self, ref: DatasetIdRef) -> None: + def _remove_stored_item_info(self, dataset_id: DatasetId) -> None: # Docstring inherited from GenericBaseDatastore. # If a component has been removed previously then we can sometimes # be asked to remove it again. Other datastores ignore this # so also ignore here - if ref.id not in self.records: + if dataset_id not in self.records: return - record = self.records[ref.id] - del self.records[ref.id] - self.related[record.parentID].remove(ref.id) + record = self.records[dataset_id] + del self.records[dataset_id] + self.related[record.parentID].remove(dataset_id) - def _get_dataset_info(self, ref: DatasetIdRef) -> tuple[DatasetId, StoredMemoryItemInfo]: + def removeStoredItemInfo(self, ref: DatasetIdRef) -> None: + """Remove information about the file associated with this dataset. + + Parameters + ---------- + ref : `DatasetRef` + The dataset that has been removed. + + Notes + ----- + This method is actually not used by this implementation, but there are + some tests that check that this method works, so we keep it for now. + """ + self._remove_stored_item_info(ref.id) + + def _get_dataset_info(self, dataset_id: DatasetId) -> tuple[DatasetId, StoredMemoryItemInfo]: """Check that the dataset is present and return the real ID and associated information. @@ -230,15 +224,15 @@ def _get_dataset_info(self, ref: DatasetIdRef) -> tuple[DatasetId, StoredMemoryI Raised if the dataset is not present in this datastore. """ try: - storedItemInfo = self.getStoredItemInfo(ref) + storedItemInfo = self._get_stored_item_info(dataset_id) except KeyError: - raise FileNotFoundError(f"No such file dataset in memory: {ref}") from None - realID = ref.id + raise FileNotFoundError(f"No such file dataset in memory: {dataset_id}") from None + realID = dataset_id if storedItemInfo.parentID is not None: realID = storedItemInfo.parentID if realID not in self.datasets: - raise FileNotFoundError(f"No such file dataset in memory: {ref}") + raise FileNotFoundError(f"No such file dataset in memory: {dataset_id}") return realID, storedItemInfo @@ -274,7 +268,7 @@ def exists(self, ref: DatasetRef) -> bool: `True` if the entity exists in the `Datastore`. """ try: - self._get_dataset_info(ref) + self._get_dataset_info(ref.id) except FileNotFoundError: return False return True @@ -317,7 +311,7 @@ def get( """ log.debug("Retrieve %s from %s with parameters %s", ref, self.name, parameters) - realID, storedItemInfo = self._get_dataset_info(ref) + realID, storedItemInfo = self._get_dataset_info(ref.id) # We have a write storage class and a read storage class and they # can be different for concrete composites or if overridden. @@ -405,13 +399,15 @@ def put(self, inMemoryDataset: Any, ref: DatasetRef) -> None: # We have to register this content with registry. # Currently this assumes we have a file so we need to use stub entries - # TODO: Add to ephemeral part of registry - self._register_datasets([(ref, itemInfo)]) + self.records[ref.id] = itemInfo + self.related.setdefault(itemInfo.parentID, set()).add(ref.id) if self._transaction is not None: self._transaction.registerUndo("put", self.remove, ref) def put_new(self, inMemoryDataset: Any, ref: DatasetRef) -> Mapping[str, DatasetRef]: + # It is OK to call put() here because registry is not populating + # bridges as we return empty dict from this method. self.put(inMemoryDataset, ref) # As ephemeral we return empty dict. return {} @@ -450,7 +446,7 @@ def getURIs(self, ref: DatasetRef, predict: bool = False) -> DatasetRefURIs: name = f"{ref.datasetType.name}" fragment = "#predicted" else: - realID, _ = self._get_dataset_info(ref) + realID, _ = self._get_dataset_info(ref.id) name = f"{id(self.datasets[realID])}?{query}" fragment = "" @@ -516,9 +512,8 @@ def retrieveArtifacts( def forget(self, refs: Iterable[DatasetRef]) -> None: # Docstring inherited. refs = list(refs) - self._bridge.forget(refs) for ref in refs: - self.removeStoredItemInfo(ref) + self._remove_stored_item_info(ref.id) @transactional def trash(self, ref: DatasetRef | Iterable[DatasetRef], ignore_errors: bool = False) -> None: @@ -543,26 +538,30 @@ def trash(self, ref: DatasetRef | Iterable[DatasetRef], ignore_errors: bool = Fa since all internal changes are isolated to solely this process and the registry only changes rows associated with this process. """ - if not isinstance(ref, DatasetRef): + if isinstance(ref, DatasetRef): + # Check that this dataset is known to datastore + try: + self._get_dataset_info(ref.id) + except Exception as e: + if ignore_errors: + log.warning( + "Error encountered moving dataset %s to trash in datastore %s: %s", ref, self.name, e + ) + else: + raise + log.debug("Trash %s in datastore %s", ref, self.name) + ref_list = [ref] + else: + ref_list = list(ref) log.debug("Bulk trashing of datasets in datastore %s", self.name) - self.bridge.moveToTrash(ref, transaction=self._transaction) - return - log.debug("Trash %s in datastore %s", ref, self.name) + def _rollbackMoveToTrash(refs: Iterable[DatasetIdRef]) -> None: + for ref in refs: + self._trashedIds.remove(ref.id) - # Check that this dataset is known to datastore - try: - self._get_dataset_info(ref) - - # Move datasets to trash table - self.bridge.moveToTrash([ref], transaction=self._transaction) - except Exception as e: - if ignore_errors: - log.warning( - "Error encountered moving dataset %s to trash in datastore %s: %s", ref, self.name, e - ) - else: - raise + assert self._transaction is not None, "Must be in transaction" + with self._transaction.undoWith(f"Trash {len(ref_list)} datasets", _rollbackMoveToTrash, ref_list): + self._trashedIds.update(ref.id for ref in ref_list) def emptyTrash(self, ignore_errors: bool = False) -> None: """Remove all datasets from the trash. @@ -583,36 +582,38 @@ def emptyTrash(self, ignore_errors: bool = False) -> None: the registry only changes rows associated with this process. """ log.debug("Emptying trash in datastore %s", self.name) - with self._bridge.emptyTrash() as trash_data: - trashed, _ = trash_data - for ref, _ in trashed: - try: - realID, _ = self._get_dataset_info(ref) - except FileNotFoundError: - # Dataset already removed so ignore it + + for dataset_id in self._trashedIds: + try: + realID, _ = self._get_dataset_info(dataset_id) + except FileNotFoundError: + # Dataset already removed so ignore it + continue + except Exception as e: + if ignore_errors: + log.warning( + "Emptying trash in datastore %s but encountered an error with dataset %s: %s", + self.name, + dataset_id, + e, + ) continue - except Exception as e: - if ignore_errors: - log.warning( - "Emptying trash in datastore %s but encountered an error with dataset %s: %s", - self.name, - ref.id, - e, - ) - continue - else: - raise - - # Determine whether all references to this dataset have been - # removed and we can delete the dataset itself - allRefs = self.related[realID] - remainingRefs = allRefs - {ref.id} - if not remainingRefs: - log.debug("Removing artifact %s from datastore %s", realID, self.name) - del self.datasets[realID] - - # Remove this entry - self.removeStoredItemInfo(ref) + else: + raise + + # Determine whether all references to this dataset have been + # removed and we can delete the dataset itself + allRefs = self.related[realID] + remainingRefs = allRefs - {dataset_id} + if not remainingRefs: + log.debug("Removing artifact %s from datastore %s", realID, self.name) + del self.datasets[realID] + + # Remove this entry + self._remove_stored_item_info(dataset_id) + + # Empty the trash table + self._trashedIds = set() def validateConfiguration( self, entities: Iterable[DatasetRef | DatasetType | StorageClass], logFailures: bool = False @@ -670,6 +671,6 @@ def export_records(self, refs: Iterable[DatasetIdRef]) -> Mapping[str, Datastore # In-memory Datastore records cannot be exported or imported return {} - def opaque_table_definitions(self) -> Mapping[str, OpaqueTableDefinition]: + def opaque_table_definitions(self) -> Mapping[str, DatastoreOpaqueTable]: # Docstring inherited from the base class. return {} diff --git a/python/lsst/daf/butler/registries/remote.py b/python/lsst/daf/butler/registries/remote.py index 24695b2c66..94653119ef 100644 --- a/python/lsst/daf/butler/registries/remote.py +++ b/python/lsst/daf/butler/registries/remote.py @@ -70,7 +70,7 @@ if TYPE_CHECKING: from .._butler_config import ButlerConfig - from ..datastore._datastore import OpaqueTableDefinition + from ..datastore._datastore import DatastoreOpaqueTable from ..registry._registry import CollectionArgType from ..registry.interfaces import CollectionRecord, DatastoreRegistryBridgeManager @@ -677,7 +677,7 @@ def store_datastore_records(self, refs: Mapping[str, DatasetRef]) -> None: # Docstring inherited from base class. return - def make_datastore_tables(self, tables: Mapping[str, OpaqueTableDefinition]) -> None: + def make_datastore_tables(self, tables: Mapping[str, DatastoreOpaqueTable]) -> None: # Docstring inherited from base class. return diff --git a/python/lsst/daf/butler/registries/sql.py b/python/lsst/daf/butler/registries/sql.py index c14eafe344..b1a56345b9 100644 --- a/python/lsst/daf/butler/registries/sql.py +++ b/python/lsst/daf/butler/registries/sql.py @@ -88,7 +88,7 @@ if TYPE_CHECKING: from .._butler_config import ButlerConfig - from ..datastore._datastore import OpaqueTableDefinition + from ..datastore._datastore import DatastoreOpaqueTable from ..datastore.stored_file_info import StoredDatastoreItemInfo from ..registry._registry import CollectionArgType from ..registry.interfaces import ( @@ -1373,7 +1373,7 @@ def store_datastore_records(self, refs: Mapping[str, DatasetRef]) -> None: assert opaque_table is not None, f"Unexpected opaque table name {table_name}" opaque_table.insert(*(record.to_record(dataset_id=ref.id) for record in records)) - def make_datastore_tables(self, tables: Mapping[str, OpaqueTableDefinition]) -> None: + def make_datastore_tables(self, tables: Mapping[str, DatastoreOpaqueTable]) -> None: # Docstring inherited from base class. datastore_record_classes = {} diff --git a/python/lsst/daf/butler/registry/_butler_registry.py b/python/lsst/daf/butler/registry/_butler_registry.py index 3c5f20720f..96b3128373 100644 --- a/python/lsst/daf/butler/registry/_butler_registry.py +++ b/python/lsst/daf/butler/registry/_butler_registry.py @@ -44,7 +44,7 @@ if TYPE_CHECKING: from .._butler_config import ButlerConfig from .._dataset_ref import DatasetRef - from ..datastore import OpaqueTableDefinition + from ..datastore import DatastoreOpaqueTable from .interfaces import CollectionRecord, DatastoreRegistryBridgeManager @@ -255,7 +255,7 @@ def store_datastore_records(self, refs: Mapping[str, DatasetRef]) -> None: raise NotImplementedError() @abstractmethod - def make_datastore_tables(self, tables: Mapping[str, OpaqueTableDefinition]) -> None: + def make_datastore_tables(self, tables: Mapping[str, DatastoreOpaqueTable]) -> None: """Create opaque tables used by datastores. Parameters