From 5f3f141c55103ae949f726ccd88d1568e592dc69 Mon Sep 17 00:00:00 2001 From: Hammad Bashir Date: Wed, 27 Mar 2024 17:51:57 -0700 Subject: [PATCH] [CLN] Restructure EmbeddingRecord -> LogRecord. Use 'log_offset' as term instead of 'id' (#1934) ## Description of changes *Summarize the changes made by this PR.* - Improvements & Bug fixes - This PR follows #1933 and restructures EmbeddingRecord to be a nested type around OperationRecord. We rename EmbeddingRecord to be LogRecord. A LogRecord is a model of an OperationRecord stored on a log, and just keeps a log_offset The log is assumed to be per-collection. - Rename the Logservices use of "ID" to instead be "log_offset" to standardize on the "log_offset" terminology. - Rename RecordLog -> LogRecord for conceptual clarity and to align with other types. - Fixes a proto style guide violation where we used camelcase for fieldnames - https://protobuf.dev/programming-guides/style/#message_and_field_names. - In GetColllectionCompactionInfo rename first_log_id to first_log_offset - which is much clearer. Also rename first_log_id_ts to first_log_offset. What is a id_ts? Confusing name. - New functionality - None ## Test plan *How are these changes tested?* - [x] Tests pass locally with `pytest` for python, `yarn test` for js, `cargo test` for rust ## Documentation Changes None --- chromadb/db/mixins/embeddings_queue.py | 45 +- chromadb/ingest/__init__.py | 4 +- chromadb/logservice/logservice.py | 4 +- chromadb/proto/chroma_pb2.py | 72 +- chromadb/proto/chroma_pb2.pyi | 8 +- chromadb/proto/convert.py | 25 +- chromadb/proto/coordinator_pb2.py | 92 +-- chromadb/proto/coordinator_pb2.pyi | 26 +- chromadb/proto/logservice_pb2.py | 26 +- chromadb/proto/logservice_pb2.pyi | 14 +- chromadb/segment/impl/metadata/sqlite.py | 59 +- chromadb/segment/impl/vector/batch.py | 40 +- .../segment/impl/vector/brute_force_index.py | 14 +- chromadb/segment/impl/vector/local_hnsw.py | 16 +- .../impl/vector/local_persistent_hnsw.py | 24 +- .../test/ingest/test_producer_consumer.py | 28 +- chromadb/types.py | 23 +- go/migrations/20240327172649.sql | 4 + go/migrations/atlas.sum | 3 +- go/mocks/Catalog.go | 526 +++++++++++++++ go/mocks/CollectionMetadataValueType.go | 50 ++ go/mocks/Component.go | 60 ++ go/mocks/GrpcProvider.go | 58 ++ go/mocks/GrpcServer.go | 60 ++ go/mocks/ICollectionDb.go | 167 +++++ go/mocks/ICollectionMetadataDb.go | 91 +++ go/mocks/ICoordinator.go | 490 ++++++++++++++ go/mocks/IDatabaseDb.go | 123 ++++ go/mocks/IMemberlistManager.go | 60 ++ go/mocks/IMemberlistStore.go | 84 +++ go/mocks/IMetaDomain.go | 189 ++++++ go/mocks/INotificationDb.go | 141 ++++ go/mocks/IRecordLog.go | 156 +++++ go/mocks/IRecordLogDb.go | 117 ++++ go/mocks/ISegmentDb.go | 151 +++++ go/mocks/ISegmentMetadataDb.go | 99 +++ go/mocks/ITenantDb.go | 171 +++++ go/mocks/ITransaction.go | 46 ++ go/mocks/IWatcher.go | 96 +++ go/mocks/LogServiceClient.go | 143 ++++ go/mocks/LogServiceServer.go | 124 ++++ go/mocks/NodeWatcherCallback.go | 29 + go/mocks/NotificationProcessor.go | 88 +++ go/mocks/NotificationStore.go | 125 ++++ go/mocks/Notifier.go | 47 ++ go/mocks/SegmentMetadataValueType.go | 29 + go/mocks/SysDBClient.go | 625 ++++++++++++++++++ go/mocks/SysDBServer.go | 516 +++++++++++++++ go/mocks/UnsafeLogServiceServer.go | 29 + go/mocks/UnsafeSysDBServer.go | 29 + go/mocks/UnsafeVectorReaderServer.go | 29 + go/mocks/VectorReaderClient.go | 105 +++ go/mocks/VectorReaderServer.go | 94 +++ ...sUpdateCollectionRequest_MetadataUpdate.go | 29 + go/mocks/isUpdateMetadataValue_Value.go | 29 + ...isUpdateSegmentRequest_CollectionUpdate.go | 29 + .../isUpdateSegmentRequest_MetadataUpdate.go | 29 + go/pkg/logservice/grpc/record_log_service.go | 16 +- .../grpc/record_log_service_test.go | 28 +- go/pkg/metastore/db/dao/record_log.go | 26 +- go/pkg/metastore/db/dao/record_log_test.go | 22 +- go/pkg/metastore/db/dbcore/core.go | 3 +- go/pkg/metastore/db/dbmodel/record_log.go | 2 +- go/pkg/proto/coordinatorpb/chroma.pb.go | 234 +++---- go/pkg/proto/logservicepb/logservice.pb.go | 169 ++--- idl/chromadb/proto/chroma.proto | 4 +- idl/chromadb/proto/logservice.proto | 16 +- rust/worker/src/compactor/scheduler.rs | 12 +- .../src/execution/operators/pull_log.rs | 8 +- rust/worker/src/log/log.rs | 37 +- rust/worker/src/types/embedding_record.rs | 11 +- 71 files changed, 5632 insertions(+), 546 deletions(-) create mode 100644 go/migrations/20240327172649.sql create mode 100644 go/mocks/Catalog.go create mode 100644 go/mocks/CollectionMetadataValueType.go create mode 100644 go/mocks/Component.go create mode 100644 go/mocks/GrpcProvider.go create mode 100644 go/mocks/GrpcServer.go create mode 100644 go/mocks/ICollectionDb.go create mode 100644 go/mocks/ICollectionMetadataDb.go create mode 100644 go/mocks/ICoordinator.go create mode 100644 go/mocks/IDatabaseDb.go create mode 100644 go/mocks/IMemberlistManager.go create mode 100644 go/mocks/IMemberlistStore.go create mode 100644 go/mocks/IMetaDomain.go create mode 100644 go/mocks/INotificationDb.go create mode 100644 go/mocks/IRecordLog.go create mode 100644 go/mocks/IRecordLogDb.go create mode 100644 go/mocks/ISegmentDb.go create mode 100644 go/mocks/ISegmentMetadataDb.go create mode 100644 go/mocks/ITenantDb.go create mode 100644 go/mocks/ITransaction.go create mode 100644 go/mocks/IWatcher.go create mode 100644 go/mocks/LogServiceClient.go create mode 100644 go/mocks/LogServiceServer.go create mode 100644 go/mocks/NodeWatcherCallback.go create mode 100644 go/mocks/NotificationProcessor.go create mode 100644 go/mocks/NotificationStore.go create mode 100644 go/mocks/Notifier.go create mode 100644 go/mocks/SegmentMetadataValueType.go create mode 100644 go/mocks/SysDBClient.go create mode 100644 go/mocks/SysDBServer.go create mode 100644 go/mocks/UnsafeLogServiceServer.go create mode 100644 go/mocks/UnsafeSysDBServer.go create mode 100644 go/mocks/UnsafeVectorReaderServer.go create mode 100644 go/mocks/VectorReaderClient.go create mode 100644 go/mocks/VectorReaderServer.go create mode 100644 go/mocks/isUpdateCollectionRequest_MetadataUpdate.go create mode 100644 go/mocks/isUpdateMetadataValue_Value.go create mode 100644 go/mocks/isUpdateSegmentRequest_CollectionUpdate.go create mode 100644 go/mocks/isUpdateSegmentRequest_MetadataUpdate.go diff --git a/chromadb/db/mixins/embeddings_queue.py b/chromadb/db/mixins/embeddings_queue.py index 8a55209b70e..913e6dc347d 100644 --- a/chromadb/db/mixins/embeddings_queue.py +++ b/chromadb/db/mixins/embeddings_queue.py @@ -9,7 +9,7 @@ ) from chromadb.types import ( OperationRecord, - EmbeddingRecord, + LogRecord, ScalarEncoding, SeqId, Operation, @@ -188,14 +188,15 @@ def submit_embeddings( submit_embedding_record = embeddings[id_to_idx[id]] # We allow notifying consumers out of order relative to one call to # submit_embeddings so we do not reorder the records before submitting them - embedding_record = EmbeddingRecord( - id=id, - seq_id=seq_id, - embedding=submit_embedding_record["embedding"], - encoding=submit_embedding_record["encoding"], - metadata=submit_embedding_record["metadata"], - operation=submit_embedding_record["operation"], - collection_id=collection_id, + embedding_record = LogRecord( + log_offset=seq_id, + operation_record=OperationRecord( + id=id, + embedding=submit_embedding_record["embedding"], + encoding=submit_embedding_record["encoding"], + metadata=submit_embedding_record["metadata"], + operation=submit_embedding_record["operation"], + ), ) embedding_records.append(embedding_record) self._notify_all(topic_name, embedding_records) @@ -318,13 +319,15 @@ def _backfill(self, subscription: Subscription) -> None: self._notify_one( subscription, [ - EmbeddingRecord( - seq_id=row[0], - operation=_operation_codes_inv[row[1]], - id=row[2], - embedding=vector, - encoding=encoding, - metadata=json.loads(row[5]) if row[5] else None, + LogRecord( + log_offset=row[0], + operation_record=OperationRecord( + operation=_operation_codes_inv[row[1]], + id=row[2], + embedding=vector, + encoding=encoding, + metadata=json.loads(row[5]) if row[5] else None, + ), ) ], ) @@ -353,24 +356,22 @@ def _next_seq_id(self) -> int: return int(cur.fetchone()[0]) + 1 @trace_method("SqlEmbeddingsQueue._notify_all", OpenTelemetryGranularity.ALL) - def _notify_all(self, topic: str, embeddings: Sequence[EmbeddingRecord]) -> None: + def _notify_all(self, topic: str, embeddings: Sequence[LogRecord]) -> None: """Send a notification to each subscriber of the given topic.""" if self._running: for sub in self._subscriptions[topic]: self._notify_one(sub, embeddings) @trace_method("SqlEmbeddingsQueue._notify_one", OpenTelemetryGranularity.ALL) - def _notify_one( - self, sub: Subscription, embeddings: Sequence[EmbeddingRecord] - ) -> None: + def _notify_one(self, sub: Subscription, embeddings: Sequence[LogRecord]) -> None: """Send a notification to a single subscriber.""" # Filter out any embeddings that are not in the subscription range should_unsubscribe = False filtered_embeddings = [] for embedding in embeddings: - if embedding["seq_id"] <= sub.start: + if embedding["log_offset"] <= sub.start: continue - if embedding["seq_id"] > sub.end: + if embedding["log_offset"] > sub.end: should_unsubscribe = True break filtered_embeddings.append(embedding) diff --git a/chromadb/ingest/__init__.py b/chromadb/ingest/__init__.py index 39cda71525d..32873369420 100644 --- a/chromadb/ingest/__init__.py +++ b/chromadb/ingest/__init__.py @@ -2,7 +2,7 @@ from typing import Callable, Optional, Sequence from chromadb.types import ( OperationRecord, - EmbeddingRecord, + LogRecord, SeqId, Vector, ScalarEncoding, @@ -67,7 +67,7 @@ def max_batch_size(self) -> int: pass -ConsumerCallbackFn = Callable[[Sequence[EmbeddingRecord]], None] +ConsumerCallbackFn = Callable[[Sequence[LogRecord]], None] class Consumer(Component): diff --git a/chromadb/logservice/logservice.py b/chromadb/logservice/logservice.py index 61adb9a881c..e975a9d7f92 100644 --- a/chromadb/logservice/logservice.py +++ b/chromadb/logservice/logservice.py @@ -8,7 +8,7 @@ ConsumerCallbackFn, ) from chromadb.proto.convert import to_proto_submit -from chromadb.proto.logservice_pb2 import PushLogsRequest, PullLogsRequest, RecordLog +from chromadb.proto.logservice_pb2 import PushLogsRequest, PullLogsRequest, LogRecord from chromadb.proto.logservice_pb2_grpc import LogServiceStub from chromadb.telemetry.opentelemetry.grpc import OtelInterceptor from chromadb.types import ( @@ -149,7 +149,7 @@ def push_logs(self, collection_id: UUID, records: Sequence[OperationRecord]) -> def pull_logs( self, collection_id: UUID, start_id: int, batch_size: int - ) -> Sequence[RecordLog]: + ) -> Sequence[LogRecord]: request = PullLogsRequest( collection_id=str(collection_id), start_from_id=start_id, diff --git a/chromadb/proto/chroma_pb2.py b/chromadb/proto/chroma_pb2.py index df92b355aff..86b5a410a91 100644 --- a/chromadb/proto/chroma_pb2.py +++ b/chromadb/proto/chroma_pb2.py @@ -13,7 +13,7 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x63hromadb/proto/chroma.proto\x12\x06\x63hroma\"&\n\x06Status\x12\x0e\n\x06reason\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\x05\"U\n\x06Vector\x12\x11\n\tdimension\x18\x01 \x01(\x05\x12\x0e\n\x06vector\x18\x02 \x01(\x0c\x12(\n\x08\x65ncoding\x18\x03 \x01(\x0e\x32\x16.chroma.ScalarEncoding\"\x1a\n\tFilePaths\x12\r\n\x05paths\x18\x01 \x03(\t\"\xa5\x02\n\x07Segment\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12#\n\x05scope\x18\x03 \x01(\x0e\x32\x14.chroma.SegmentScope\x12\x17\n\ncollection\x18\x05 \x01(\tH\x00\x88\x01\x01\x12-\n\x08metadata\x18\x06 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x01\x88\x01\x01\x12\x32\n\nfile_paths\x18\x07 \x03(\x0b\x32\x1e.chroma.Segment.FilePathsEntry\x1a\x43\n\x0e\x46ilePathsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12 \n\x05value\x18\x02 \x01(\x0b\x32\x11.chroma.FilePaths:\x02\x38\x01\x42\r\n\x0b_collectionB\x0b\n\t_metadata\"\xd0\x01\n\nCollection\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x08metadata\x18\x04 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x00\x88\x01\x01\x12\x16\n\tdimension\x18\x05 \x01(\x05H\x01\x88\x01\x01\x12\x0e\n\x06tenant\x18\x06 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x07 \x01(\t\x12\x13\n\x0blogPosition\x18\x08 \x01(\x03\x12\x0f\n\x07version\x18\t \x01(\x05\x42\x0b\n\t_metadataB\x0c\n\n_dimension\"4\n\x08\x44\x61tabase\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06tenant\x18\x03 \x01(\t\"\x16\n\x06Tenant\x12\x0c\n\x04name\x18\x01 \x01(\t\"b\n\x13UpdateMetadataValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x13\n\tint_value\x18\x02 \x01(\x03H\x00\x12\x15\n\x0b\x66loat_value\x18\x03 \x01(\x01H\x00\x42\x07\n\x05value\"\x96\x01\n\x0eUpdateMetadata\x12\x36\n\x08metadata\x18\x01 \x03(\x0b\x32$.chroma.UpdateMetadata.MetadataEntry\x1aL\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0b\x32\x1b.chroma.UpdateMetadataValue:\x02\x38\x01\"\xaf\x01\n\x0fOperationRecord\x12\n\n\x02id\x18\x01 \x01(\t\x12#\n\x06vector\x18\x02 \x01(\x0b\x32\x0e.chroma.VectorH\x00\x88\x01\x01\x12-\n\x08metadata\x18\x03 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x01\x88\x01\x01\x12$\n\toperation\x18\x04 \x01(\x0e\x32\x11.chroma.OperationB\t\n\x07_vectorB\x0b\n\t_metadata\"S\n\x15VectorEmbeddingRecord\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06seq_id\x18\x02 \x01(\x0c\x12\x1e\n\x06vector\x18\x03 \x01(\x0b\x32\x0e.chroma.Vector\"q\n\x11VectorQueryResult\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06seq_id\x18\x02 \x01(\x0c\x12\x10\n\x08\x64istance\x18\x03 \x01(\x02\x12#\n\x06vector\x18\x04 \x01(\x0b\x32\x0e.chroma.VectorH\x00\x88\x01\x01\x42\t\n\x07_vector\"@\n\x12VectorQueryResults\x12*\n\x07results\x18\x01 \x03(\x0b\x32\x19.chroma.VectorQueryResult\"4\n\x11GetVectorsRequest\x12\x0b\n\x03ids\x18\x01 \x03(\t\x12\x12\n\nsegment_id\x18\x02 \x01(\t\"D\n\x12GetVectorsResponse\x12.\n\x07records\x18\x01 \x03(\x0b\x32\x1d.chroma.VectorEmbeddingRecord\"\x86\x01\n\x13QueryVectorsRequest\x12\x1f\n\x07vectors\x18\x01 \x03(\x0b\x32\x0e.chroma.Vector\x12\t\n\x01k\x18\x02 \x01(\x05\x12\x13\n\x0b\x61llowed_ids\x18\x03 \x03(\t\x12\x1a\n\x12include_embeddings\x18\x04 \x01(\x08\x12\x12\n\nsegment_id\x18\x05 \x01(\t\"C\n\x14QueryVectorsResponse\x12+\n\x07results\x18\x01 \x03(\x0b\x32\x1a.chroma.VectorQueryResults*8\n\tOperation\x12\x07\n\x03\x41\x44\x44\x10\x00\x12\n\n\x06UPDATE\x10\x01\x12\n\n\x06UPSERT\x10\x02\x12\n\n\x06\x44\x45LETE\x10\x03*(\n\x0eScalarEncoding\x12\x0b\n\x07\x46LOAT32\x10\x00\x12\t\n\x05INT32\x10\x01*(\n\x0cSegmentScope\x12\n\n\x06VECTOR\x10\x00\x12\x0c\n\x08METADATA\x10\x01\x32\xa2\x01\n\x0cVectorReader\x12\x45\n\nGetVectors\x12\x19.chroma.GetVectorsRequest\x1a\x1a.chroma.GetVectorsResponse\"\x00\x12K\n\x0cQueryVectors\x12\x1b.chroma.QueryVectorsRequest\x1a\x1c.chroma.QueryVectorsResponse\"\x00\x42:Z8github.com/chroma-core/chroma/go/pkg/proto/coordinatorpbb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x63hromadb/proto/chroma.proto\x12\x06\x63hroma\"&\n\x06Status\x12\x0e\n\x06reason\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\x05\"U\n\x06Vector\x12\x11\n\tdimension\x18\x01 \x01(\x05\x12\x0e\n\x06vector\x18\x02 \x01(\x0c\x12(\n\x08\x65ncoding\x18\x03 \x01(\x0e\x32\x16.chroma.ScalarEncoding\"\x1a\n\tFilePaths\x12\r\n\x05paths\x18\x01 \x03(\t\"\xa5\x02\n\x07Segment\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12#\n\x05scope\x18\x03 \x01(\x0e\x32\x14.chroma.SegmentScope\x12\x17\n\ncollection\x18\x05 \x01(\tH\x00\x88\x01\x01\x12-\n\x08metadata\x18\x06 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x01\x88\x01\x01\x12\x32\n\nfile_paths\x18\x07 \x03(\x0b\x32\x1e.chroma.Segment.FilePathsEntry\x1a\x43\n\x0e\x46ilePathsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12 \n\x05value\x18\x02 \x01(\x0b\x32\x11.chroma.FilePaths:\x02\x38\x01\x42\r\n\x0b_collectionB\x0b\n\t_metadata\"\xd1\x01\n\nCollection\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x08metadata\x18\x04 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x00\x88\x01\x01\x12\x16\n\tdimension\x18\x05 \x01(\x05H\x01\x88\x01\x01\x12\x0e\n\x06tenant\x18\x06 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x07 \x01(\t\x12\x14\n\x0clog_position\x18\x08 \x01(\x03\x12\x0f\n\x07version\x18\t \x01(\x05\x42\x0b\n\t_metadataB\x0c\n\n_dimension\"4\n\x08\x44\x61tabase\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06tenant\x18\x03 \x01(\t\"\x16\n\x06Tenant\x12\x0c\n\x04name\x18\x01 \x01(\t\"b\n\x13UpdateMetadataValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x13\n\tint_value\x18\x02 \x01(\x03H\x00\x12\x15\n\x0b\x66loat_value\x18\x03 \x01(\x01H\x00\x42\x07\n\x05value\"\x96\x01\n\x0eUpdateMetadata\x12\x36\n\x08metadata\x18\x01 \x03(\x0b\x32$.chroma.UpdateMetadata.MetadataEntry\x1aL\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0b\x32\x1b.chroma.UpdateMetadataValue:\x02\x38\x01\"\xaf\x01\n\x0fOperationRecord\x12\n\n\x02id\x18\x01 \x01(\t\x12#\n\x06vector\x18\x02 \x01(\x0b\x32\x0e.chroma.VectorH\x00\x88\x01\x01\x12-\n\x08metadata\x18\x03 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x01\x88\x01\x01\x12$\n\toperation\x18\x04 \x01(\x0e\x32\x11.chroma.OperationB\t\n\x07_vectorB\x0b\n\t_metadata\"S\n\x15VectorEmbeddingRecord\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06seq_id\x18\x02 \x01(\x0c\x12\x1e\n\x06vector\x18\x03 \x01(\x0b\x32\x0e.chroma.Vector\"q\n\x11VectorQueryResult\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06seq_id\x18\x02 \x01(\x0c\x12\x10\n\x08\x64istance\x18\x03 \x01(\x02\x12#\n\x06vector\x18\x04 \x01(\x0b\x32\x0e.chroma.VectorH\x00\x88\x01\x01\x42\t\n\x07_vector\"@\n\x12VectorQueryResults\x12*\n\x07results\x18\x01 \x03(\x0b\x32\x19.chroma.VectorQueryResult\"4\n\x11GetVectorsRequest\x12\x0b\n\x03ids\x18\x01 \x03(\t\x12\x12\n\nsegment_id\x18\x02 \x01(\t\"D\n\x12GetVectorsResponse\x12.\n\x07records\x18\x01 \x03(\x0b\x32\x1d.chroma.VectorEmbeddingRecord\"\x86\x01\n\x13QueryVectorsRequest\x12\x1f\n\x07vectors\x18\x01 \x03(\x0b\x32\x0e.chroma.Vector\x12\t\n\x01k\x18\x02 \x01(\x05\x12\x13\n\x0b\x61llowed_ids\x18\x03 \x03(\t\x12\x1a\n\x12include_embeddings\x18\x04 \x01(\x08\x12\x12\n\nsegment_id\x18\x05 \x01(\t\"C\n\x14QueryVectorsResponse\x12+\n\x07results\x18\x01 \x03(\x0b\x32\x1a.chroma.VectorQueryResults*8\n\tOperation\x12\x07\n\x03\x41\x44\x44\x10\x00\x12\n\n\x06UPDATE\x10\x01\x12\n\n\x06UPSERT\x10\x02\x12\n\n\x06\x44\x45LETE\x10\x03*(\n\x0eScalarEncoding\x12\x0b\n\x07\x46LOAT32\x10\x00\x12\t\n\x05INT32\x10\x01*(\n\x0cSegmentScope\x12\n\n\x06VECTOR\x10\x00\x12\x0c\n\x08METADATA\x10\x01\x32\xa2\x01\n\x0cVectorReader\x12\x45\n\nGetVectors\x12\x19.chroma.GetVectorsRequest\x1a\x1a.chroma.GetVectorsResponse\"\x00\x12K\n\x0cQueryVectors\x12\x1b.chroma.QueryVectorsRequest\x1a\x1c.chroma.QueryVectorsResponse\"\x00\x42:Z8github.com/chroma-core/chroma/go/pkg/proto/coordinatorpbb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -25,12 +25,12 @@ _SEGMENT_FILEPATHSENTRY._serialized_options = b'8\001' _UPDATEMETADATA_METADATAENTRY._options = None _UPDATEMETADATA_METADATAENTRY._serialized_options = b'8\001' - _globals['_OPERATION']._serialized_start=1806 - _globals['_OPERATION']._serialized_end=1862 - _globals['_SCALARENCODING']._serialized_start=1864 - _globals['_SCALARENCODING']._serialized_end=1904 - _globals['_SEGMENTSCOPE']._serialized_start=1906 - _globals['_SEGMENTSCOPE']._serialized_end=1946 + _globals['_OPERATION']._serialized_start=1807 + _globals['_OPERATION']._serialized_end=1863 + _globals['_SCALARENCODING']._serialized_start=1865 + _globals['_SCALARENCODING']._serialized_end=1905 + _globals['_SEGMENTSCOPE']._serialized_start=1907 + _globals['_SEGMENTSCOPE']._serialized_end=1947 _globals['_STATUS']._serialized_start=39 _globals['_STATUS']._serialized_end=77 _globals['_VECTOR']._serialized_start=79 @@ -42,33 +42,33 @@ _globals['_SEGMENT_FILEPATHSENTRY']._serialized_start=393 _globals['_SEGMENT_FILEPATHSENTRY']._serialized_end=460 _globals['_COLLECTION']._serialized_start=491 - _globals['_COLLECTION']._serialized_end=699 - _globals['_DATABASE']._serialized_start=701 - _globals['_DATABASE']._serialized_end=753 - _globals['_TENANT']._serialized_start=755 - _globals['_TENANT']._serialized_end=777 - _globals['_UPDATEMETADATAVALUE']._serialized_start=779 - _globals['_UPDATEMETADATAVALUE']._serialized_end=877 - _globals['_UPDATEMETADATA']._serialized_start=880 - _globals['_UPDATEMETADATA']._serialized_end=1030 - _globals['_UPDATEMETADATA_METADATAENTRY']._serialized_start=954 - _globals['_UPDATEMETADATA_METADATAENTRY']._serialized_end=1030 - _globals['_OPERATIONRECORD']._serialized_start=1033 - _globals['_OPERATIONRECORD']._serialized_end=1208 - _globals['_VECTOREMBEDDINGRECORD']._serialized_start=1210 - _globals['_VECTOREMBEDDINGRECORD']._serialized_end=1293 - _globals['_VECTORQUERYRESULT']._serialized_start=1295 - _globals['_VECTORQUERYRESULT']._serialized_end=1408 - _globals['_VECTORQUERYRESULTS']._serialized_start=1410 - _globals['_VECTORQUERYRESULTS']._serialized_end=1474 - _globals['_GETVECTORSREQUEST']._serialized_start=1476 - _globals['_GETVECTORSREQUEST']._serialized_end=1528 - _globals['_GETVECTORSRESPONSE']._serialized_start=1530 - _globals['_GETVECTORSRESPONSE']._serialized_end=1598 - _globals['_QUERYVECTORSREQUEST']._serialized_start=1601 - _globals['_QUERYVECTORSREQUEST']._serialized_end=1735 - _globals['_QUERYVECTORSRESPONSE']._serialized_start=1737 - _globals['_QUERYVECTORSRESPONSE']._serialized_end=1804 - _globals['_VECTORREADER']._serialized_start=1949 - _globals['_VECTORREADER']._serialized_end=2111 + _globals['_COLLECTION']._serialized_end=700 + _globals['_DATABASE']._serialized_start=702 + _globals['_DATABASE']._serialized_end=754 + _globals['_TENANT']._serialized_start=756 + _globals['_TENANT']._serialized_end=778 + _globals['_UPDATEMETADATAVALUE']._serialized_start=780 + _globals['_UPDATEMETADATAVALUE']._serialized_end=878 + _globals['_UPDATEMETADATA']._serialized_start=881 + _globals['_UPDATEMETADATA']._serialized_end=1031 + _globals['_UPDATEMETADATA_METADATAENTRY']._serialized_start=955 + _globals['_UPDATEMETADATA_METADATAENTRY']._serialized_end=1031 + _globals['_OPERATIONRECORD']._serialized_start=1034 + _globals['_OPERATIONRECORD']._serialized_end=1209 + _globals['_VECTOREMBEDDINGRECORD']._serialized_start=1211 + _globals['_VECTOREMBEDDINGRECORD']._serialized_end=1294 + _globals['_VECTORQUERYRESULT']._serialized_start=1296 + _globals['_VECTORQUERYRESULT']._serialized_end=1409 + _globals['_VECTORQUERYRESULTS']._serialized_start=1411 + _globals['_VECTORQUERYRESULTS']._serialized_end=1475 + _globals['_GETVECTORSREQUEST']._serialized_start=1477 + _globals['_GETVECTORSREQUEST']._serialized_end=1529 + _globals['_GETVECTORSRESPONSE']._serialized_start=1531 + _globals['_GETVECTORSRESPONSE']._serialized_end=1599 + _globals['_QUERYVECTORSREQUEST']._serialized_start=1602 + _globals['_QUERYVECTORSREQUEST']._serialized_end=1736 + _globals['_QUERYVECTORSRESPONSE']._serialized_start=1738 + _globals['_QUERYVECTORSRESPONSE']._serialized_end=1805 + _globals['_VECTORREADER']._serialized_start=1950 + _globals['_VECTORREADER']._serialized_end=2112 # @@protoc_insertion_point(module_scope) diff --git a/chromadb/proto/chroma_pb2.pyi b/chromadb/proto/chroma_pb2.pyi index 1e4ac4979a2..edf3004e0c1 100644 --- a/chromadb/proto/chroma_pb2.pyi +++ b/chromadb/proto/chroma_pb2.pyi @@ -79,14 +79,14 @@ class Segment(_message.Message): def __init__(self, id: _Optional[str] = ..., type: _Optional[str] = ..., scope: _Optional[_Union[SegmentScope, str]] = ..., collection: _Optional[str] = ..., metadata: _Optional[_Union[UpdateMetadata, _Mapping]] = ..., file_paths: _Optional[_Mapping[str, FilePaths]] = ...) -> None: ... class Collection(_message.Message): - __slots__ = ["id", "name", "metadata", "dimension", "tenant", "database", "logPosition", "version"] + __slots__ = ["id", "name", "metadata", "dimension", "tenant", "database", "log_position", "version"] ID_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] METADATA_FIELD_NUMBER: _ClassVar[int] DIMENSION_FIELD_NUMBER: _ClassVar[int] TENANT_FIELD_NUMBER: _ClassVar[int] DATABASE_FIELD_NUMBER: _ClassVar[int] - LOGPOSITION_FIELD_NUMBER: _ClassVar[int] + LOG_POSITION_FIELD_NUMBER: _ClassVar[int] VERSION_FIELD_NUMBER: _ClassVar[int] id: str name: str @@ -94,9 +94,9 @@ class Collection(_message.Message): dimension: int tenant: str database: str - logPosition: int + log_position: int version: int - def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., metadata: _Optional[_Union[UpdateMetadata, _Mapping]] = ..., dimension: _Optional[int] = ..., tenant: _Optional[str] = ..., database: _Optional[str] = ..., logPosition: _Optional[int] = ..., version: _Optional[int] = ...) -> None: ... + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., metadata: _Optional[_Union[UpdateMetadata, _Mapping]] = ..., dimension: _Optional[int] = ..., tenant: _Optional[str] = ..., database: _Optional[str] = ..., log_position: _Optional[int] = ..., version: _Optional[int] = ...) -> None: ... class Database(_message.Message): __slots__ = ["id", "name", "tenant"] diff --git a/chromadb/proto/convert.py b/chromadb/proto/convert.py index 6a5f93d31dd..e81b58ea781 100644 --- a/chromadb/proto/convert.py +++ b/chromadb/proto/convert.py @@ -6,7 +6,7 @@ from chromadb.utils.messageid import bytes_to_int, int_to_bytes from chromadb.types import ( Collection, - EmbeddingRecord, + LogRecord, Metadata, Operation, ScalarEncoding, @@ -112,17 +112,18 @@ def to_proto_update_metadata(metadata: UpdateMetadata) -> proto.UpdateMetadata: def from_proto_submit( - submit_embedding_record: proto.OperationRecord, seq_id: SeqId -) -> EmbeddingRecord: - embedding, encoding = from_proto_vector(submit_embedding_record.vector) - record = EmbeddingRecord( - id=submit_embedding_record.id, - seq_id=seq_id, - embedding=embedding, - encoding=encoding, - metadata=from_proto_update_metadata(submit_embedding_record.metadata), - operation=from_proto_operation(submit_embedding_record.operation), - collection_id=UUID(hex=submit_embedding_record.collection_id), + operation_record: proto.OperationRecord, seq_id: SeqId +) -> LogRecord: + embedding, encoding = from_proto_vector(operation_record.vector) + record = LogRecord( + log_offset=seq_id, + operation_record=OperationRecord( + id=operation_record.id, + embedding=embedding, + encoding=encoding, + metadata=from_proto_update_metadata(operation_record.metadata), + operation=from_proto_operation(operation_record.operation), + ), ) return record diff --git a/chromadb/proto/coordinator_pb2.py b/chromadb/proto/coordinator_pb2.py index 7264a86f038..fde4981b6c2 100644 --- a/chromadb/proto/coordinator_pb2.py +++ b/chromadb/proto/coordinator_pb2.py @@ -15,7 +15,7 @@ from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n chromadb/proto/coordinator.proto\x12\x06\x63hroma\x1a\x1b\x63hromadb/proto/chroma.proto\x1a\x1bgoogle/protobuf/empty.proto\"A\n\x15\x43reateDatabaseRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06tenant\x18\x03 \x01(\t\"8\n\x16\x43reateDatabaseResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"2\n\x12GetDatabaseRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06tenant\x18\x02 \x01(\t\"Y\n\x13GetDatabaseResponse\x12\"\n\x08\x64\x61tabase\x18\x01 \x01(\x0b\x32\x10.chroma.Database\x12\x1e\n\x06status\x18\x02 \x01(\x0b\x32\x0e.chroma.Status\"#\n\x13\x43reateTenantRequest\x12\x0c\n\x04name\x18\x02 \x01(\t\"6\n\x14\x43reateTenantResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\" \n\x10GetTenantRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"S\n\x11GetTenantResponse\x12\x1e\n\x06tenant\x18\x01 \x01(\x0b\x32\x0e.chroma.Tenant\x12\x1e\n\x06status\x18\x02 \x01(\x0b\x32\x0e.chroma.Status\"8\n\x14\x43reateSegmentRequest\x12 \n\x07segment\x18\x01 \x01(\x0b\x32\x0f.chroma.Segment\"7\n\x15\x43reateSegmentResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"\"\n\x14\x44\x65leteSegmentRequest\x12\n\n\x02id\x18\x01 \x01(\t\"7\n\x15\x44\x65leteSegmentResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"\xc2\x01\n\x12GetSegmentsRequest\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x11\n\x04type\x18\x02 \x01(\tH\x01\x88\x01\x01\x12(\n\x05scope\x18\x03 \x01(\x0e\x32\x14.chroma.SegmentScopeH\x02\x88\x01\x01\x12\x12\n\x05topic\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x17\n\ncollection\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x05\n\x03_idB\x07\n\x05_typeB\x08\n\x06_scopeB\x08\n\x06_topicB\r\n\x0b_collection\"X\n\x13GetSegmentsResponse\x12!\n\x08segments\x18\x01 \x03(\x0b\x32\x0f.chroma.Segment\x12\x1e\n\x06status\x18\x02 \x01(\x0b\x32\x0e.chroma.Status\"\xfa\x01\n\x14UpdateSegmentRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x05topic\x18\x02 \x01(\tH\x00\x12\x15\n\x0breset_topic\x18\x03 \x01(\x08H\x00\x12\x14\n\ncollection\x18\x04 \x01(\tH\x01\x12\x1a\n\x10reset_collection\x18\x05 \x01(\x08H\x01\x12*\n\x08metadata\x18\x06 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x02\x12\x18\n\x0ereset_metadata\x18\x07 \x01(\x08H\x02\x42\x0e\n\x0ctopic_updateB\x13\n\x11\x63ollection_updateB\x11\n\x0fmetadata_update\"7\n\x15UpdateSegmentResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"\xe5\x01\n\x17\x43reateCollectionRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x08metadata\x18\x03 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x00\x88\x01\x01\x12\x16\n\tdimension\x18\x04 \x01(\x05H\x01\x88\x01\x01\x12\x1a\n\rget_or_create\x18\x05 \x01(\x08H\x02\x88\x01\x01\x12\x0e\n\x06tenant\x18\x06 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x07 \x01(\tB\x0b\n\t_metadataB\x0c\n\n_dimensionB\x10\n\x0e_get_or_create\"s\n\x18\x43reateCollectionResponse\x12&\n\ncollection\x18\x01 \x01(\x0b\x32\x12.chroma.Collection\x12\x0f\n\x07\x63reated\x18\x02 \x01(\x08\x12\x1e\n\x06status\x18\x03 \x01(\x0b\x32\x0e.chroma.Status\"G\n\x17\x44\x65leteCollectionRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06tenant\x18\x02 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x03 \x01(\t\":\n\x18\x44\x65leteCollectionResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"\x8b\x01\n\x15GetCollectionsRequest\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x11\n\x04name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x12\n\x05topic\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x0e\n\x06tenant\x18\x04 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x05 \x01(\tB\x05\n\x03_idB\x07\n\x05_nameB\x08\n\x06_topic\"a\n\x16GetCollectionsResponse\x12\'\n\x0b\x63ollections\x18\x01 \x03(\x0b\x32\x12.chroma.Collection\x12\x1e\n\x06status\x18\x02 \x01(\x0b\x32\x0e.chroma.Status\"\xde\x01\n\x17UpdateCollectionRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x12\n\x05topic\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x11\n\x04name\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x16\n\tdimension\x18\x04 \x01(\x05H\x03\x88\x01\x01\x12*\n\x08metadata\x18\x05 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x00\x12\x18\n\x0ereset_metadata\x18\x06 \x01(\x08H\x00\x42\x11\n\x0fmetadata_updateB\x08\n\x06_topicB\x07\n\x05_nameB\x0c\n\n_dimension\":\n\x18UpdateCollectionResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"O\n\x0cNotification\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x0c\n\x04type\x18\x03 \x01(\t\x12\x0e\n\x06status\x18\x04 \x01(\t\"4\n\x12ResetStateResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\":\n%GetLastCompactionTimeForTenantRequest\x12\x11\n\ttenant_id\x18\x01 \x03(\t\"K\n\x18TenantLastCompactionTime\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x1c\n\x14last_compaction_time\x18\x02 \x01(\x03\"o\n&GetLastCompactionTimeForTenantResponse\x12\x45\n\x1btenant_last_compaction_time\x18\x01 \x03(\x0b\x32 .chroma.TenantLastCompactionTime\"n\n%SetLastCompactionTimeForTenantRequest\x12\x45\n\x1btenant_last_compaction_time\x18\x01 \x01(\x0b\x32 .chroma.TenantLastCompactionTime\"\xbc\x01\n\x1a\x46lushSegmentCompactionInfo\x12\x12\n\nsegment_id\x18\x01 \x01(\t\x12\x45\n\nfile_paths\x18\x02 \x03(\x0b\x32\x31.chroma.FlushSegmentCompactionInfo.FilePathsEntry\x1a\x43\n\x0e\x46ilePathsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12 \n\x05value\x18\x02 \x01(\x0b\x32\x11.chroma.FilePaths:\x02\x38\x01\"\xc3\x01\n FlushCollectionCompactionRequest\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x14\n\x0clog_position\x18\x03 \x01(\x03\x12\x1a\n\x12\x63ollection_version\x18\x04 \x01(\x05\x12\x43\n\x17segment_compaction_info\x18\x05 \x03(\x0b\x32\".chroma.FlushSegmentCompactionInfo\"t\n!FlushCollectionCompactionResponse\x12\x15\n\rcollection_id\x18\x01 \x01(\t\x12\x1a\n\x12\x63ollection_version\x18\x02 \x01(\x05\x12\x1c\n\x14last_compaction_time\x18\x03 \x01(\x03\x32\xf4\n\n\x05SysDB\x12Q\n\x0e\x43reateDatabase\x12\x1d.chroma.CreateDatabaseRequest\x1a\x1e.chroma.CreateDatabaseResponse\"\x00\x12H\n\x0bGetDatabase\x12\x1a.chroma.GetDatabaseRequest\x1a\x1b.chroma.GetDatabaseResponse\"\x00\x12K\n\x0c\x43reateTenant\x12\x1b.chroma.CreateTenantRequest\x1a\x1c.chroma.CreateTenantResponse\"\x00\x12\x42\n\tGetTenant\x12\x18.chroma.GetTenantRequest\x1a\x19.chroma.GetTenantResponse\"\x00\x12N\n\rCreateSegment\x12\x1c.chroma.CreateSegmentRequest\x1a\x1d.chroma.CreateSegmentResponse\"\x00\x12N\n\rDeleteSegment\x12\x1c.chroma.DeleteSegmentRequest\x1a\x1d.chroma.DeleteSegmentResponse\"\x00\x12H\n\x0bGetSegments\x12\x1a.chroma.GetSegmentsRequest\x1a\x1b.chroma.GetSegmentsResponse\"\x00\x12N\n\rUpdateSegment\x12\x1c.chroma.UpdateSegmentRequest\x1a\x1d.chroma.UpdateSegmentResponse\"\x00\x12W\n\x10\x43reateCollection\x12\x1f.chroma.CreateCollectionRequest\x1a .chroma.CreateCollectionResponse\"\x00\x12W\n\x10\x44\x65leteCollection\x12\x1f.chroma.DeleteCollectionRequest\x1a .chroma.DeleteCollectionResponse\"\x00\x12Q\n\x0eGetCollections\x12\x1d.chroma.GetCollectionsRequest\x1a\x1e.chroma.GetCollectionsResponse\"\x00\x12W\n\x10UpdateCollection\x12\x1f.chroma.UpdateCollectionRequest\x1a .chroma.UpdateCollectionResponse\"\x00\x12\x42\n\nResetState\x12\x16.google.protobuf.Empty\x1a\x1a.chroma.ResetStateResponse\"\x00\x12\x81\x01\n\x1eGetLastCompactionTimeForTenant\x12-.chroma.GetLastCompactionTimeForTenantRequest\x1a..chroma.GetLastCompactionTimeForTenantResponse\"\x00\x12i\n\x1eSetLastCompactionTimeForTenant\x12-.chroma.SetLastCompactionTimeForTenantRequest\x1a\x16.google.protobuf.Empty\"\x00\x12r\n\x19\x46lushCollectionCompaction\x12(.chroma.FlushCollectionCompactionRequest\x1a).chroma.FlushCollectionCompactionResponse\"\x00\x42:Z8github.com/chroma-core/chroma/go/pkg/proto/coordinatorpbb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n chromadb/proto/coordinator.proto\x12\x06\x63hroma\x1a\x1b\x63hromadb/proto/chroma.proto\x1a\x1bgoogle/protobuf/empty.proto\"A\n\x15\x43reateDatabaseRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06tenant\x18\x03 \x01(\t\"8\n\x16\x43reateDatabaseResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"2\n\x12GetDatabaseRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06tenant\x18\x02 \x01(\t\"Y\n\x13GetDatabaseResponse\x12\"\n\x08\x64\x61tabase\x18\x01 \x01(\x0b\x32\x10.chroma.Database\x12\x1e\n\x06status\x18\x02 \x01(\x0b\x32\x0e.chroma.Status\"#\n\x13\x43reateTenantRequest\x12\x0c\n\x04name\x18\x02 \x01(\t\"6\n\x14\x43reateTenantResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\" \n\x10GetTenantRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"S\n\x11GetTenantResponse\x12\x1e\n\x06tenant\x18\x01 \x01(\x0b\x32\x0e.chroma.Tenant\x12\x1e\n\x06status\x18\x02 \x01(\x0b\x32\x0e.chroma.Status\"8\n\x14\x43reateSegmentRequest\x12 \n\x07segment\x18\x01 \x01(\x0b\x32\x0f.chroma.Segment\"7\n\x15\x43reateSegmentResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"\"\n\x14\x44\x65leteSegmentRequest\x12\n\n\x02id\x18\x01 \x01(\t\"7\n\x15\x44\x65leteSegmentResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"\xa4\x01\n\x12GetSegmentsRequest\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x11\n\x04type\x18\x02 \x01(\tH\x01\x88\x01\x01\x12(\n\x05scope\x18\x03 \x01(\x0e\x32\x14.chroma.SegmentScopeH\x02\x88\x01\x01\x12\x17\n\ncollection\x18\x05 \x01(\tH\x03\x88\x01\x01\x42\x05\n\x03_idB\x07\n\x05_typeB\x08\n\x06_scopeB\r\n\x0b_collection\"X\n\x13GetSegmentsResponse\x12!\n\x08segments\x18\x01 \x03(\x0b\x32\x0f.chroma.Segment\x12\x1e\n\x06status\x18\x02 \x01(\x0b\x32\x0e.chroma.Status\"\xc2\x01\n\x14UpdateSegmentRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x14\n\ncollection\x18\x04 \x01(\tH\x00\x12\x1a\n\x10reset_collection\x18\x05 \x01(\x08H\x00\x12*\n\x08metadata\x18\x06 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x01\x12\x18\n\x0ereset_metadata\x18\x07 \x01(\x08H\x01\x42\x13\n\x11\x63ollection_updateB\x11\n\x0fmetadata_update\"7\n\x15UpdateSegmentResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"\xe5\x01\n\x17\x43reateCollectionRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12-\n\x08metadata\x18\x03 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x00\x88\x01\x01\x12\x16\n\tdimension\x18\x04 \x01(\x05H\x01\x88\x01\x01\x12\x1a\n\rget_or_create\x18\x05 \x01(\x08H\x02\x88\x01\x01\x12\x0e\n\x06tenant\x18\x06 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x07 \x01(\tB\x0b\n\t_metadataB\x0c\n\n_dimensionB\x10\n\x0e_get_or_create\"s\n\x18\x43reateCollectionResponse\x12&\n\ncollection\x18\x01 \x01(\x0b\x32\x12.chroma.Collection\x12\x0f\n\x07\x63reated\x18\x02 \x01(\x08\x12\x1e\n\x06status\x18\x03 \x01(\x0b\x32\x0e.chroma.Status\"G\n\x17\x44\x65leteCollectionRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06tenant\x18\x02 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x03 \x01(\t\":\n\x18\x44\x65leteCollectionResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"m\n\x15GetCollectionsRequest\x12\x0f\n\x02id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x11\n\x04name\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\x0e\n\x06tenant\x18\x04 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x05 \x01(\tB\x05\n\x03_idB\x07\n\x05_name\"a\n\x16GetCollectionsResponse\x12\'\n\x0b\x63ollections\x18\x01 \x03(\x0b\x32\x12.chroma.Collection\x12\x1e\n\x06status\x18\x02 \x01(\x0b\x32\x0e.chroma.Status\"\xc0\x01\n\x17UpdateCollectionRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\x04name\x18\x03 \x01(\tH\x01\x88\x01\x01\x12\x16\n\tdimension\x18\x04 \x01(\x05H\x02\x88\x01\x01\x12*\n\x08metadata\x18\x05 \x01(\x0b\x32\x16.chroma.UpdateMetadataH\x00\x12\x18\n\x0ereset_metadata\x18\x06 \x01(\x08H\x00\x42\x11\n\x0fmetadata_updateB\x07\n\x05_nameB\x0c\n\n_dimension\":\n\x18UpdateCollectionResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\"O\n\x0cNotification\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x0c\n\x04type\x18\x03 \x01(\t\x12\x0e\n\x06status\x18\x04 \x01(\t\"4\n\x12ResetStateResponse\x12\x1e\n\x06status\x18\x01 \x01(\x0b\x32\x0e.chroma.Status\":\n%GetLastCompactionTimeForTenantRequest\x12\x11\n\ttenant_id\x18\x01 \x03(\t\"K\n\x18TenantLastCompactionTime\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x1c\n\x14last_compaction_time\x18\x02 \x01(\x03\"o\n&GetLastCompactionTimeForTenantResponse\x12\x45\n\x1btenant_last_compaction_time\x18\x01 \x03(\x0b\x32 .chroma.TenantLastCompactionTime\"n\n%SetLastCompactionTimeForTenantRequest\x12\x45\n\x1btenant_last_compaction_time\x18\x01 \x01(\x0b\x32 .chroma.TenantLastCompactionTime\"\xbc\x01\n\x1a\x46lushSegmentCompactionInfo\x12\x12\n\nsegment_id\x18\x01 \x01(\t\x12\x45\n\nfile_paths\x18\x02 \x03(\x0b\x32\x31.chroma.FlushSegmentCompactionInfo.FilePathsEntry\x1a\x43\n\x0e\x46ilePathsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12 \n\x05value\x18\x02 \x01(\x0b\x32\x11.chroma.FilePaths:\x02\x38\x01\"\xc3\x01\n FlushCollectionCompactionRequest\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x14\n\x0clog_position\x18\x03 \x01(\x03\x12\x1a\n\x12\x63ollection_version\x18\x04 \x01(\x05\x12\x43\n\x17segment_compaction_info\x18\x05 \x03(\x0b\x32\".chroma.FlushSegmentCompactionInfo\"t\n!FlushCollectionCompactionResponse\x12\x15\n\rcollection_id\x18\x01 \x01(\t\x12\x1a\n\x12\x63ollection_version\x18\x02 \x01(\x05\x12\x1c\n\x14last_compaction_time\x18\x03 \x01(\x03\x32\xf4\n\n\x05SysDB\x12Q\n\x0e\x43reateDatabase\x12\x1d.chroma.CreateDatabaseRequest\x1a\x1e.chroma.CreateDatabaseResponse\"\x00\x12H\n\x0bGetDatabase\x12\x1a.chroma.GetDatabaseRequest\x1a\x1b.chroma.GetDatabaseResponse\"\x00\x12K\n\x0c\x43reateTenant\x12\x1b.chroma.CreateTenantRequest\x1a\x1c.chroma.CreateTenantResponse\"\x00\x12\x42\n\tGetTenant\x12\x18.chroma.GetTenantRequest\x1a\x19.chroma.GetTenantResponse\"\x00\x12N\n\rCreateSegment\x12\x1c.chroma.CreateSegmentRequest\x1a\x1d.chroma.CreateSegmentResponse\"\x00\x12N\n\rDeleteSegment\x12\x1c.chroma.DeleteSegmentRequest\x1a\x1d.chroma.DeleteSegmentResponse\"\x00\x12H\n\x0bGetSegments\x12\x1a.chroma.GetSegmentsRequest\x1a\x1b.chroma.GetSegmentsResponse\"\x00\x12N\n\rUpdateSegment\x12\x1c.chroma.UpdateSegmentRequest\x1a\x1d.chroma.UpdateSegmentResponse\"\x00\x12W\n\x10\x43reateCollection\x12\x1f.chroma.CreateCollectionRequest\x1a .chroma.CreateCollectionResponse\"\x00\x12W\n\x10\x44\x65leteCollection\x12\x1f.chroma.DeleteCollectionRequest\x1a .chroma.DeleteCollectionResponse\"\x00\x12Q\n\x0eGetCollections\x12\x1d.chroma.GetCollectionsRequest\x1a\x1e.chroma.GetCollectionsResponse\"\x00\x12W\n\x10UpdateCollection\x12\x1f.chroma.UpdateCollectionRequest\x1a .chroma.UpdateCollectionResponse\"\x00\x12\x42\n\nResetState\x12\x16.google.protobuf.Empty\x1a\x1a.chroma.ResetStateResponse\"\x00\x12\x81\x01\n\x1eGetLastCompactionTimeForTenant\x12-.chroma.GetLastCompactionTimeForTenantRequest\x1a..chroma.GetLastCompactionTimeForTenantResponse\"\x00\x12i\n\x1eSetLastCompactionTimeForTenant\x12-.chroma.SetLastCompactionTimeForTenantRequest\x1a\x16.google.protobuf.Empty\"\x00\x12r\n\x19\x46lushCollectionCompaction\x12(.chroma.FlushCollectionCompactionRequest\x1a).chroma.FlushCollectionCompactionResponse\"\x00\x42:Z8github.com/chroma-core/chroma/go/pkg/proto/coordinatorpbb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -50,49 +50,49 @@ _globals['_DELETESEGMENTRESPONSE']._serialized_start=733 _globals['_DELETESEGMENTRESPONSE']._serialized_end=788 _globals['_GETSEGMENTSREQUEST']._serialized_start=791 - _globals['_GETSEGMENTSREQUEST']._serialized_end=985 - _globals['_GETSEGMENTSRESPONSE']._serialized_start=987 - _globals['_GETSEGMENTSRESPONSE']._serialized_end=1075 - _globals['_UPDATESEGMENTREQUEST']._serialized_start=1078 - _globals['_UPDATESEGMENTREQUEST']._serialized_end=1328 - _globals['_UPDATESEGMENTRESPONSE']._serialized_start=1330 - _globals['_UPDATESEGMENTRESPONSE']._serialized_end=1385 - _globals['_CREATECOLLECTIONREQUEST']._serialized_start=1388 - _globals['_CREATECOLLECTIONREQUEST']._serialized_end=1617 - _globals['_CREATECOLLECTIONRESPONSE']._serialized_start=1619 - _globals['_CREATECOLLECTIONRESPONSE']._serialized_end=1734 - _globals['_DELETECOLLECTIONREQUEST']._serialized_start=1736 - _globals['_DELETECOLLECTIONREQUEST']._serialized_end=1807 - _globals['_DELETECOLLECTIONRESPONSE']._serialized_start=1809 - _globals['_DELETECOLLECTIONRESPONSE']._serialized_end=1867 - _globals['_GETCOLLECTIONSREQUEST']._serialized_start=1870 - _globals['_GETCOLLECTIONSREQUEST']._serialized_end=2009 - _globals['_GETCOLLECTIONSRESPONSE']._serialized_start=2011 - _globals['_GETCOLLECTIONSRESPONSE']._serialized_end=2108 - _globals['_UPDATECOLLECTIONREQUEST']._serialized_start=2111 - _globals['_UPDATECOLLECTIONREQUEST']._serialized_end=2333 - _globals['_UPDATECOLLECTIONRESPONSE']._serialized_start=2335 - _globals['_UPDATECOLLECTIONRESPONSE']._serialized_end=2393 - _globals['_NOTIFICATION']._serialized_start=2395 - _globals['_NOTIFICATION']._serialized_end=2474 - _globals['_RESETSTATERESPONSE']._serialized_start=2476 - _globals['_RESETSTATERESPONSE']._serialized_end=2528 - _globals['_GETLASTCOMPACTIONTIMEFORTENANTREQUEST']._serialized_start=2530 - _globals['_GETLASTCOMPACTIONTIMEFORTENANTREQUEST']._serialized_end=2588 - _globals['_TENANTLASTCOMPACTIONTIME']._serialized_start=2590 - _globals['_TENANTLASTCOMPACTIONTIME']._serialized_end=2665 - _globals['_GETLASTCOMPACTIONTIMEFORTENANTRESPONSE']._serialized_start=2667 - _globals['_GETLASTCOMPACTIONTIMEFORTENANTRESPONSE']._serialized_end=2778 - _globals['_SETLASTCOMPACTIONTIMEFORTENANTREQUEST']._serialized_start=2780 - _globals['_SETLASTCOMPACTIONTIMEFORTENANTREQUEST']._serialized_end=2890 - _globals['_FLUSHSEGMENTCOMPACTIONINFO']._serialized_start=2893 - _globals['_FLUSHSEGMENTCOMPACTIONINFO']._serialized_end=3081 - _globals['_FLUSHSEGMENTCOMPACTIONINFO_FILEPATHSENTRY']._serialized_start=3014 - _globals['_FLUSHSEGMENTCOMPACTIONINFO_FILEPATHSENTRY']._serialized_end=3081 - _globals['_FLUSHCOLLECTIONCOMPACTIONREQUEST']._serialized_start=3084 - _globals['_FLUSHCOLLECTIONCOMPACTIONREQUEST']._serialized_end=3279 - _globals['_FLUSHCOLLECTIONCOMPACTIONRESPONSE']._serialized_start=3281 - _globals['_FLUSHCOLLECTIONCOMPACTIONRESPONSE']._serialized_end=3397 - _globals['_SYSDB']._serialized_start=3400 - _globals['_SYSDB']._serialized_end=4796 + _globals['_GETSEGMENTSREQUEST']._serialized_end=955 + _globals['_GETSEGMENTSRESPONSE']._serialized_start=957 + _globals['_GETSEGMENTSRESPONSE']._serialized_end=1045 + _globals['_UPDATESEGMENTREQUEST']._serialized_start=1048 + _globals['_UPDATESEGMENTREQUEST']._serialized_end=1242 + _globals['_UPDATESEGMENTRESPONSE']._serialized_start=1244 + _globals['_UPDATESEGMENTRESPONSE']._serialized_end=1299 + _globals['_CREATECOLLECTIONREQUEST']._serialized_start=1302 + _globals['_CREATECOLLECTIONREQUEST']._serialized_end=1531 + _globals['_CREATECOLLECTIONRESPONSE']._serialized_start=1533 + _globals['_CREATECOLLECTIONRESPONSE']._serialized_end=1648 + _globals['_DELETECOLLECTIONREQUEST']._serialized_start=1650 + _globals['_DELETECOLLECTIONREQUEST']._serialized_end=1721 + _globals['_DELETECOLLECTIONRESPONSE']._serialized_start=1723 + _globals['_DELETECOLLECTIONRESPONSE']._serialized_end=1781 + _globals['_GETCOLLECTIONSREQUEST']._serialized_start=1783 + _globals['_GETCOLLECTIONSREQUEST']._serialized_end=1892 + _globals['_GETCOLLECTIONSRESPONSE']._serialized_start=1894 + _globals['_GETCOLLECTIONSRESPONSE']._serialized_end=1991 + _globals['_UPDATECOLLECTIONREQUEST']._serialized_start=1994 + _globals['_UPDATECOLLECTIONREQUEST']._serialized_end=2186 + _globals['_UPDATECOLLECTIONRESPONSE']._serialized_start=2188 + _globals['_UPDATECOLLECTIONRESPONSE']._serialized_end=2246 + _globals['_NOTIFICATION']._serialized_start=2248 + _globals['_NOTIFICATION']._serialized_end=2327 + _globals['_RESETSTATERESPONSE']._serialized_start=2329 + _globals['_RESETSTATERESPONSE']._serialized_end=2381 + _globals['_GETLASTCOMPACTIONTIMEFORTENANTREQUEST']._serialized_start=2383 + _globals['_GETLASTCOMPACTIONTIMEFORTENANTREQUEST']._serialized_end=2441 + _globals['_TENANTLASTCOMPACTIONTIME']._serialized_start=2443 + _globals['_TENANTLASTCOMPACTIONTIME']._serialized_end=2518 + _globals['_GETLASTCOMPACTIONTIMEFORTENANTRESPONSE']._serialized_start=2520 + _globals['_GETLASTCOMPACTIONTIMEFORTENANTRESPONSE']._serialized_end=2631 + _globals['_SETLASTCOMPACTIONTIMEFORTENANTREQUEST']._serialized_start=2633 + _globals['_SETLASTCOMPACTIONTIMEFORTENANTREQUEST']._serialized_end=2743 + _globals['_FLUSHSEGMENTCOMPACTIONINFO']._serialized_start=2746 + _globals['_FLUSHSEGMENTCOMPACTIONINFO']._serialized_end=2934 + _globals['_FLUSHSEGMENTCOMPACTIONINFO_FILEPATHSENTRY']._serialized_start=2867 + _globals['_FLUSHSEGMENTCOMPACTIONINFO_FILEPATHSENTRY']._serialized_end=2934 + _globals['_FLUSHCOLLECTIONCOMPACTIONREQUEST']._serialized_start=2937 + _globals['_FLUSHCOLLECTIONCOMPACTIONREQUEST']._serialized_end=3132 + _globals['_FLUSHCOLLECTIONCOMPACTIONRESPONSE']._serialized_start=3134 + _globals['_FLUSHCOLLECTIONCOMPACTIONRESPONSE']._serialized_end=3250 + _globals['_SYSDB']._serialized_start=3253 + _globals['_SYSDB']._serialized_end=4649 # @@protoc_insertion_point(module_scope) diff --git a/chromadb/proto/coordinator_pb2.pyi b/chromadb/proto/coordinator_pb2.pyi index 6175b63917e..b00a5be9b79 100644 --- a/chromadb/proto/coordinator_pb2.pyi +++ b/chromadb/proto/coordinator_pb2.pyi @@ -90,18 +90,16 @@ class DeleteSegmentResponse(_message.Message): def __init__(self, status: _Optional[_Union[_chroma_pb2.Status, _Mapping]] = ...) -> None: ... class GetSegmentsRequest(_message.Message): - __slots__ = ["id", "type", "scope", "topic", "collection"] + __slots__ = ["id", "type", "scope", "collection"] ID_FIELD_NUMBER: _ClassVar[int] TYPE_FIELD_NUMBER: _ClassVar[int] SCOPE_FIELD_NUMBER: _ClassVar[int] - TOPIC_FIELD_NUMBER: _ClassVar[int] COLLECTION_FIELD_NUMBER: _ClassVar[int] id: str type: str scope: _chroma_pb2.SegmentScope - topic: str collection: str - def __init__(self, id: _Optional[str] = ..., type: _Optional[str] = ..., scope: _Optional[_Union[_chroma_pb2.SegmentScope, str]] = ..., topic: _Optional[str] = ..., collection: _Optional[str] = ...) -> None: ... + def __init__(self, id: _Optional[str] = ..., type: _Optional[str] = ..., scope: _Optional[_Union[_chroma_pb2.SegmentScope, str]] = ..., collection: _Optional[str] = ...) -> None: ... class GetSegmentsResponse(_message.Message): __slots__ = ["segments", "status"] @@ -112,22 +110,18 @@ class GetSegmentsResponse(_message.Message): def __init__(self, segments: _Optional[_Iterable[_Union[_chroma_pb2.Segment, _Mapping]]] = ..., status: _Optional[_Union[_chroma_pb2.Status, _Mapping]] = ...) -> None: ... class UpdateSegmentRequest(_message.Message): - __slots__ = ["id", "topic", "reset_topic", "collection", "reset_collection", "metadata", "reset_metadata"] + __slots__ = ["id", "collection", "reset_collection", "metadata", "reset_metadata"] ID_FIELD_NUMBER: _ClassVar[int] - TOPIC_FIELD_NUMBER: _ClassVar[int] - RESET_TOPIC_FIELD_NUMBER: _ClassVar[int] COLLECTION_FIELD_NUMBER: _ClassVar[int] RESET_COLLECTION_FIELD_NUMBER: _ClassVar[int] METADATA_FIELD_NUMBER: _ClassVar[int] RESET_METADATA_FIELD_NUMBER: _ClassVar[int] id: str - topic: str - reset_topic: bool collection: str reset_collection: bool metadata: _chroma_pb2.UpdateMetadata reset_metadata: bool - def __init__(self, id: _Optional[str] = ..., topic: _Optional[str] = ..., reset_topic: bool = ..., collection: _Optional[str] = ..., reset_collection: bool = ..., metadata: _Optional[_Union[_chroma_pb2.UpdateMetadata, _Mapping]] = ..., reset_metadata: bool = ...) -> None: ... + def __init__(self, id: _Optional[str] = ..., collection: _Optional[str] = ..., reset_collection: bool = ..., metadata: _Optional[_Union[_chroma_pb2.UpdateMetadata, _Mapping]] = ..., reset_metadata: bool = ...) -> None: ... class UpdateSegmentResponse(_message.Message): __slots__ = ["status"] @@ -180,18 +174,16 @@ class DeleteCollectionResponse(_message.Message): def __init__(self, status: _Optional[_Union[_chroma_pb2.Status, _Mapping]] = ...) -> None: ... class GetCollectionsRequest(_message.Message): - __slots__ = ["id", "name", "topic", "tenant", "database"] + __slots__ = ["id", "name", "tenant", "database"] ID_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] - TOPIC_FIELD_NUMBER: _ClassVar[int] TENANT_FIELD_NUMBER: _ClassVar[int] DATABASE_FIELD_NUMBER: _ClassVar[int] id: str name: str - topic: str tenant: str database: str - def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., topic: _Optional[str] = ..., tenant: _Optional[str] = ..., database: _Optional[str] = ...) -> None: ... + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., tenant: _Optional[str] = ..., database: _Optional[str] = ...) -> None: ... class GetCollectionsResponse(_message.Message): __slots__ = ["collections", "status"] @@ -202,20 +194,18 @@ class GetCollectionsResponse(_message.Message): def __init__(self, collections: _Optional[_Iterable[_Union[_chroma_pb2.Collection, _Mapping]]] = ..., status: _Optional[_Union[_chroma_pb2.Status, _Mapping]] = ...) -> None: ... class UpdateCollectionRequest(_message.Message): - __slots__ = ["id", "topic", "name", "dimension", "metadata", "reset_metadata"] + __slots__ = ["id", "name", "dimension", "metadata", "reset_metadata"] ID_FIELD_NUMBER: _ClassVar[int] - TOPIC_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] DIMENSION_FIELD_NUMBER: _ClassVar[int] METADATA_FIELD_NUMBER: _ClassVar[int] RESET_METADATA_FIELD_NUMBER: _ClassVar[int] id: str - topic: str name: str dimension: int metadata: _chroma_pb2.UpdateMetadata reset_metadata: bool - def __init__(self, id: _Optional[str] = ..., topic: _Optional[str] = ..., name: _Optional[str] = ..., dimension: _Optional[int] = ..., metadata: _Optional[_Union[_chroma_pb2.UpdateMetadata, _Mapping]] = ..., reset_metadata: bool = ...) -> None: ... + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., dimension: _Optional[int] = ..., metadata: _Optional[_Union[_chroma_pb2.UpdateMetadata, _Mapping]] = ..., reset_metadata: bool = ...) -> None: ... class UpdateCollectionResponse(_message.Message): __slots__ = ["status"] diff --git a/chromadb/proto/logservice_pb2.py b/chromadb/proto/logservice_pb2.py index 0c7ca972ebe..36b4d9521e2 100644 --- a/chromadb/proto/logservice_pb2.py +++ b/chromadb/proto/logservice_pb2.py @@ -16,7 +16,7 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x1f\x63hromadb/proto/logservice.proto\x12\x06\x63hroma\x1a\x1b\x63hromadb/proto/chroma.proto"R\n\x0fPushLogsRequest\x12\x15\n\rcollection_id\x18\x01 \x01(\t\x12(\n\x07records\x18\x02 \x03(\x0b\x32\x17.chroma.OperationRecord"(\n\x10PushLogsResponse\x12\x14\n\x0crecord_count\x18\x01 \x01(\x05"j\n\x0fPullLogsRequest\x12\x15\n\rcollection_id\x18\x01 \x01(\t\x12\x15\n\rstart_from_id\x18\x02 \x01(\x03\x12\x12\n\nbatch_size\x18\x03 \x01(\x05\x12\x15\n\rend_timestamp\x18\x04 \x01(\x03"D\n\tRecordLog\x12\x0e\n\x06log_id\x18\x01 \x01(\x03\x12\'\n\x06record\x18\x02 \x01(\x0b\x32\x17.chroma.OperationRecord"6\n\x10PullLogsResponse\x12"\n\x07records\x18\x01 \x03(\x0b\x32\x11.chroma.RecordLog"V\n\x0e\x43ollectionInfo\x12\x15\n\rcollection_id\x18\x01 \x01(\t\x12\x14\n\x0c\x66irst_log_id\x18\x02 \x01(\x03\x12\x17\n\x0f\x66irst_log_id_ts\x18\x03 \x01(\x03"&\n$GetAllCollectionInfoToCompactRequest"\\\n%GetAllCollectionInfoToCompactResponse\x12\x33\n\x13\x61ll_collection_info\x18\x01 \x03(\x0b\x32\x16.chroma.CollectionInfo2\x8e\x02\n\nLogService\x12?\n\x08PushLogs\x12\x17.chroma.PushLogsRequest\x1a\x18.chroma.PushLogsResponse"\x00\x12?\n\x08PullLogs\x12\x17.chroma.PullLogsRequest\x1a\x18.chroma.PullLogsResponse"\x00\x12~\n\x1dGetAllCollectionInfoToCompact\x12,.chroma.GetAllCollectionInfoToCompactRequest\x1a-.chroma.GetAllCollectionInfoToCompactResponse"\x00\x42\x39Z7github.com/chroma-core/chroma/go/pkg/proto/logservicepbb\x06proto3' + b'\n\x1f\x63hromadb/proto/logservice.proto\x12\x06\x63hroma\x1a\x1b\x63hromadb/proto/chroma.proto"R\n\x0fPushLogsRequest\x12\x15\n\rcollection_id\x18\x01 \x01(\t\x12(\n\x07records\x18\x02 \x03(\x0b\x32\x17.chroma.OperationRecord"(\n\x10PushLogsResponse\x12\x14\n\x0crecord_count\x18\x01 \x01(\x05"j\n\x0fPullLogsRequest\x12\x15\n\rcollection_id\x18\x01 \x01(\t\x12\x15\n\rstart_from_id\x18\x02 \x01(\x03\x12\x12\n\nbatch_size\x18\x03 \x01(\x05\x12\x15\n\rend_timestamp\x18\x04 \x01(\x03"H\n\tLogRecord\x12\x12\n\nlog_offset\x18\x01 \x01(\x03\x12\'\n\x06record\x18\x02 \x01(\x0b\x32\x17.chroma.OperationRecord"6\n\x10PullLogsResponse\x12"\n\x07records\x18\x01 \x03(\x0b\x32\x11.chroma.LogRecord"V\n\x0e\x43ollectionInfo\x12\x15\n\rcollection_id\x18\x01 \x01(\t\x12\x14\n\x0c\x66irst_log_id\x18\x02 \x01(\x03\x12\x17\n\x0f\x66irst_log_id_ts\x18\x03 \x01(\x03"&\n$GetAllCollectionInfoToCompactRequest"\\\n%GetAllCollectionInfoToCompactResponse\x12\x33\n\x13\x61ll_collection_info\x18\x01 \x03(\x0b\x32\x16.chroma.CollectionInfo2\x8e\x02\n\nLogService\x12?\n\x08PushLogs\x12\x17.chroma.PushLogsRequest\x1a\x18.chroma.PushLogsResponse"\x00\x12?\n\x08PullLogs\x12\x17.chroma.PullLogsRequest\x1a\x18.chroma.PullLogsResponse"\x00\x12~\n\x1dGetAllCollectionInfoToCompact\x12,.chroma.GetAllCollectionInfoToCompactRequest\x1a-.chroma.GetAllCollectionInfoToCompactResponse"\x00\x42\x39Z7github.com/chroma-core/chroma/go/pkg/proto/logservicepbb\x06proto3' ) _globals = globals() @@ -35,16 +35,16 @@ _globals["_PUSHLOGSRESPONSE"]._serialized_end = 196 _globals["_PULLLOGSREQUEST"]._serialized_start = 198 _globals["_PULLLOGSREQUEST"]._serialized_end = 304 - _globals["_RECORDLOG"]._serialized_start = 306 - _globals["_RECORDLOG"]._serialized_end = 374 - _globals["_PULLLOGSRESPONSE"]._serialized_start = 376 - _globals["_PULLLOGSRESPONSE"]._serialized_end = 430 - _globals["_COLLECTIONINFO"]._serialized_start = 432 - _globals["_COLLECTIONINFO"]._serialized_end = 518 - _globals["_GETALLCOLLECTIONINFOTOCOMPACTREQUEST"]._serialized_start = 520 - _globals["_GETALLCOLLECTIONINFOTOCOMPACTREQUEST"]._serialized_end = 558 - _globals["_GETALLCOLLECTIONINFOTOCOMPACTRESPONSE"]._serialized_start = 560 - _globals["_GETALLCOLLECTIONINFOTOCOMPACTRESPONSE"]._serialized_end = 652 - _globals["_LOGSERVICE"]._serialized_start = 655 - _globals["_LOGSERVICE"]._serialized_end = 925 + _globals["_LOGRECORD"]._serialized_start = 306 + _globals["_LOGRECORD"]._serialized_end = 378 + _globals["_PULLLOGSRESPONSE"]._serialized_start = 380 + _globals["_PULLLOGSRESPONSE"]._serialized_end = 434 + _globals["_COLLECTIONINFO"]._serialized_start = 436 + _globals["_COLLECTIONINFO"]._serialized_end = 522 + _globals["_GETALLCOLLECTIONINFOTOCOMPACTREQUEST"]._serialized_start = 524 + _globals["_GETALLCOLLECTIONINFOTOCOMPACTREQUEST"]._serialized_end = 562 + _globals["_GETALLCOLLECTIONINFOTOCOMPACTRESPONSE"]._serialized_start = 564 + _globals["_GETALLCOLLECTIONINFOTOCOMPACTRESPONSE"]._serialized_end = 656 + _globals["_LOGSERVICE"]._serialized_start = 659 + _globals["_LOGSERVICE"]._serialized_end = 929 # @@protoc_insertion_point(module_scope) diff --git a/chromadb/proto/logservice_pb2.pyi b/chromadb/proto/logservice_pb2.pyi index 78680253a6d..b7076df35a5 100644 --- a/chromadb/proto/logservice_pb2.pyi +++ b/chromadb/proto/logservice_pb2.pyi @@ -50,24 +50,24 @@ class PullLogsRequest(_message.Message): end_timestamp: _Optional[int] = ..., ) -> None: ... -class RecordLog(_message.Message): - __slots__ = ["log_id", "record"] - LOG_ID_FIELD_NUMBER: _ClassVar[int] +class LogRecord(_message.Message): + __slots__ = ["log_offset", "record"] + LOG_OFFSET_FIELD_NUMBER: _ClassVar[int] RECORD_FIELD_NUMBER: _ClassVar[int] - log_id: int + log_offset: int record: _chroma_pb2.OperationRecord def __init__( self, - log_id: _Optional[int] = ..., + log_offset: _Optional[int] = ..., record: _Optional[_Union[_chroma_pb2.OperationRecord, _Mapping]] = ..., ) -> None: ... class PullLogsResponse(_message.Message): __slots__ = ["records"] RECORDS_FIELD_NUMBER: _ClassVar[int] - records: _containers.RepeatedCompositeFieldContainer[RecordLog] + records: _containers.RepeatedCompositeFieldContainer[LogRecord] def __init__( - self, records: _Optional[_Iterable[_Union[RecordLog, _Mapping]]] = ... + self, records: _Optional[_Iterable[_Union[LogRecord, _Mapping]]] = ... ) -> None: ... class CollectionInfo(_message.Message): diff --git a/chromadb/segment/impl/metadata/sqlite.py b/chromadb/segment/impl/metadata/sqlite.py index 996c97e1c78..2ac81ec9e33 100644 --- a/chromadb/segment/impl/metadata/sqlite.py +++ b/chromadb/segment/impl/metadata/sqlite.py @@ -19,7 +19,7 @@ Where, WhereDocument, MetadataEmbeddingRecord, - EmbeddingRecord, + LogRecord, SeqId, Operation, UpdateMetadata, @@ -257,9 +257,7 @@ def _record(self, rows: Sequence[Tuple[Any, ...]]) -> MetadataEmbeddingRecord: ) @trace_method("SqliteMetadataSegment._insert_record", OpenTelemetryGranularity.ALL) - def _insert_record( - self, cur: Cursor, record: EmbeddingRecord, upsert: bool - ) -> None: + def _insert_record(self, cur: Cursor, record: LogRecord, upsert: bool) -> None: """Add or update a single EmbeddingRecord into the DB""" t = Table("embeddings") @@ -268,11 +266,11 @@ def _insert_record( .into(t) .columns(t.segment_id, t.embedding_id, t.seq_id) .where(t.segment_id == ParameterValue(self._db.uuid_to_db(self._id))) - .where(t.embedding_id == ParameterValue(record["id"])) + .where(t.embedding_id == ParameterValue(record["operation_record"]["id"])) ).insert( ParameterValue(self._db.uuid_to_db(self._id)), - ParameterValue(record["id"]), - ParameterValue(_encode_seq_id(record["seq_id"])), + ParameterValue(record["operation_record"]["id"]), + ParameterValue(_encode_seq_id(record["log_offset"])), ) sql, params = get_sql(q) sql = sql + "RETURNING id" @@ -284,13 +282,15 @@ def _insert_record( # Cast here because the OpenTel decorators obfuscate the type return cast(None, self._update_record(cur, record)) else: - logger.warning(f"Insert of existing embedding ID: {record['id']}") + logger.warning( + f"Insert of existing embedding ID: {record['operation_record']['id']}" + ) # We are trying to add for a record that already exists. Fail the call. # We don't throw an exception since this is in principal an async path return - if record["metadata"]: - self._update_metadata(cur, id, record["metadata"]) + if record["operation_record"]["metadata"]: + self._update_metadata(cur, id, record["operation_record"]["metadata"]) @trace_method( "SqliteMetadataSegment._update_metadata", OpenTelemetryGranularity.ALL @@ -404,7 +404,7 @@ def insert_into_fulltext_search() -> None: insert_into_fulltext_search() @trace_method("SqliteMetadataSegment._delete_record", OpenTelemetryGranularity.ALL) - def _delete_record(self, cur: Cursor, record: EmbeddingRecord) -> None: + def _delete_record(self, cur: Cursor, record: LogRecord) -> None: """Delete a single EmbeddingRecord from the DB""" t = Table("embeddings") fts_t = Table("embedding_fulltext_search") @@ -412,7 +412,7 @@ def _delete_record(self, cur: Cursor, record: EmbeddingRecord) -> None: self._db.querybuilder() .from_(t) .where(t.segment_id == ParameterValue(self._db.uuid_to_db(self._id))) - .where(t.embedding_id == ParameterValue(record["id"])) + .where(t.embedding_id == ParameterValue(record["operation_record"]["id"])) .delete() ) q_fts = ( @@ -427,7 +427,10 @@ def _delete_record(self, cur: Cursor, record: EmbeddingRecord) -> None: .where( t.segment_id == ParameterValue(self._db.uuid_to_db(self._id)) ) - .where(t.embedding_id == ParameterValue(record["id"])) + .where( + t.embedding_id + == ParameterValue(record["operation_record"]["id"]) + ) ) ) ) @@ -436,7 +439,9 @@ def _delete_record(self, cur: Cursor, record: EmbeddingRecord) -> None: sql = sql + " RETURNING id" result = cur.execute(sql, params).fetchone() if result is None: - logger.warning(f"Delete of nonexisting embedding ID: {record['id']}") + logger.warning( + f"Delete of nonexisting embedding ID: {record['operation_record']['id']}" + ) else: id = result[0] @@ -454,28 +459,30 @@ def _delete_record(self, cur: Cursor, record: EmbeddingRecord) -> None: cur.execute(sql, params) @trace_method("SqliteMetadataSegment._update_record", OpenTelemetryGranularity.ALL) - def _update_record(self, cur: Cursor, record: EmbeddingRecord) -> None: + def _update_record(self, cur: Cursor, record: LogRecord) -> None: """Update a single EmbeddingRecord in the DB""" t = Table("embeddings") q = ( self._db.querybuilder() .update(t) - .set(t.seq_id, ParameterValue(_encode_seq_id(record["seq_id"]))) + .set(t.seq_id, ParameterValue(_encode_seq_id(record["log_offset"]))) .where(t.segment_id == ParameterValue(self._db.uuid_to_db(self._id))) - .where(t.embedding_id == ParameterValue(record["id"])) + .where(t.embedding_id == ParameterValue(record["operation_record"]["id"])) ) sql, params = get_sql(q) sql = sql + " RETURNING id" result = cur.execute(sql, params).fetchone() if result is None: - logger.warning(f"Update of nonexisting embedding ID: {record['id']}") + logger.warning( + f"Update of nonexisting embedding ID: {record['operation_record']['id']}" + ) else: id = result[0] - if record["metadata"]: - self._update_metadata(cur, id, record["metadata"]) + if record["operation_record"]["metadata"]: + self._update_metadata(cur, id, record["operation_record"]["metadata"]) @trace_method("SqliteMetadataSegment._write_metadata", OpenTelemetryGranularity.ALL) - def _write_metadata(self, records: Sequence[EmbeddingRecord]) -> None: + def _write_metadata(self, records: Sequence[LogRecord]) -> None: """Write embedding metadata to the database. Care should be taken to ensure records are append-only (that is, that seq-ids should increase monotonically)""" with self._db.tx() as cur: @@ -486,20 +493,20 @@ def _write_metadata(self, records: Sequence[EmbeddingRecord]) -> None: .columns("segment_id", "seq_id") .insert( ParameterValue(self._db.uuid_to_db(self._id)), - ParameterValue(_encode_seq_id(record["seq_id"])), + ParameterValue(_encode_seq_id(record["log_offset"])), ) ) sql, params = get_sql(q) sql = sql.replace("INSERT", "INSERT OR REPLACE") cur.execute(sql, params) - if record["operation"] == Operation.ADD: + if record["operation_record"]["operation"] == Operation.ADD: self._insert_record(cur, record, False) - elif record["operation"] == Operation.UPSERT: + elif record["operation_record"]["operation"] == Operation.UPSERT: self._insert_record(cur, record, True) - elif record["operation"] == Operation.DELETE: + elif record["operation_record"]["operation"] == Operation.DELETE: self._delete_record(cur, record) - elif record["operation"] == Operation.UPDATE: + elif record["operation_record"]["operation"] == Operation.UPDATE: self._update_record(cur, record) @trace_method( diff --git a/chromadb/segment/impl/vector/batch.py b/chromadb/segment/impl/vector/batch.py index aac533b918f..43cbe886005 100644 --- a/chromadb/segment/impl/vector/batch.py +++ b/chromadb/segment/impl/vector/batch.py @@ -1,12 +1,12 @@ from typing import Dict, List, Set, cast -from chromadb.types import EmbeddingRecord, Operation, SeqId, Vector +from chromadb.types import LogRecord, Operation, SeqId, Vector class Batch: """Used to model the set of changes as an atomic operation""" - _ids_to_records: Dict[str, EmbeddingRecord] + _ids_to_records: Dict[str, LogRecord] _deleted_ids: Set[str] _written_ids: Set[str] _upsert_add_ids: Set[str] # IDs that are being added in an upsert @@ -37,9 +37,12 @@ def get_written_ids(self) -> List[str]: def get_written_vectors(self, ids: List[str]) -> List[Vector]: """Get the list of vectors to write in this batch""" - return [cast(Vector, self._ids_to_records[id]["embedding"]) for id in ids] + return [ + cast(Vector, self._ids_to_records[id]["operation_record"]["embedding"]) + for id in ids + ] - def get_record(self, id: str) -> EmbeddingRecord: + def get_record(self, id: str) -> LogRecord: """Get the record for a given ID""" return self._ids_to_records[id] @@ -51,24 +54,33 @@ def is_deleted(self, id: str) -> bool: def delete_count(self) -> int: return len(self._deleted_ids) - def apply(self, record: EmbeddingRecord, exists_already: bool = False) -> None: + def apply(self, record: LogRecord, exists_already: bool = False) -> None: """Apply an embedding record to this batch. Records passed to this method are assumed to be validated for correctness. For example, a delete or update presumes the ID exists in the index. An add presumes the ID does not exist in the index. The exists_already flag should be set to True if the ID does exist in the index, and False otherwise. """ - id = record["id"] - if record["operation"] == Operation.DELETE: + id = record["operation_record"]["id"] + if record["operation_record"]["operation"] == Operation.DELETE: # If the ID was previously written, remove it from the written set # And update the add/update/delete counts if id in self._written_ids: self._written_ids.remove(id) - if self._ids_to_records[id]["operation"] == Operation.ADD: + if ( + self._ids_to_records[id]["operation_record"]["operation"] + == Operation.ADD + ): self.add_count -= 1 - elif self._ids_to_records[id]["operation"] == Operation.UPDATE: + elif ( + self._ids_to_records[id]["operation_record"]["operation"] + == Operation.UPDATE + ): self.update_count -= 1 self._deleted_ids.add(id) - elif self._ids_to_records[id]["operation"] == Operation.UPSERT: + elif ( + self._ids_to_records[id]["operation_record"]["operation"] + == Operation.UPSERT + ): if id in self._upsert_add_ids: self.add_count -= 1 self._upsert_add_ids.remove(id) @@ -92,15 +104,15 @@ def apply(self, record: EmbeddingRecord, exists_already: bool = False) -> None: self._deleted_ids.remove(id) # Update the add/update counts - if record["operation"] == Operation.UPSERT: + if record["operation_record"]["operation"] == Operation.UPSERT: if not exists_already: self.add_count += 1 self._upsert_add_ids.add(id) else: self.update_count += 1 - elif record["operation"] == Operation.ADD: + elif record["operation_record"]["operation"] == Operation.ADD: self.add_count += 1 - elif record["operation"] == Operation.UPDATE: + elif record["operation_record"]["operation"] == Operation.UPDATE: self.update_count += 1 - self.max_seq_id = max(self.max_seq_id, record["seq_id"]) + self.max_seq_id = max(self.max_seq_id, record["log_offset"]) diff --git a/chromadb/segment/impl/vector/brute_force_index.py b/chromadb/segment/impl/vector/brute_force_index.py index f9466e3f3d4..b43555c36c3 100644 --- a/chromadb/segment/impl/vector/brute_force_index.py +++ b/chromadb/segment/impl/vector/brute_force_index.py @@ -2,7 +2,7 @@ import numpy as np import numpy.typing as npt from chromadb.types import ( - EmbeddingRecord, + LogRecord, VectorEmbeddingRecord, VectorQuery, VectorQueryResult, @@ -59,7 +59,7 @@ def clear(self) -> None: self.free_indices = list(range(self.size)) self.vectors.fill(0) - def upsert(self, records: List[EmbeddingRecord]) -> None: + def upsert(self, records: List[LogRecord]) -> None: if len(records) + len(self) > self.size: raise Exception( "Index with capacity {} and {} current entries cannot add {} records".format( @@ -68,9 +68,9 @@ def upsert(self, records: List[EmbeddingRecord]) -> None: ) for i, record in enumerate(records): - id = record["id"] - vector = record["embedding"] - self.id_to_seq_id[id] = record["seq_id"] + id = record["operation_record"]["id"] + vector = record["operation_record"]["embedding"] + self.id_to_seq_id[id] = record["log_offset"] if id in self.deleted_ids: self.deleted_ids.remove(id) @@ -86,9 +86,9 @@ def upsert(self, records: List[EmbeddingRecord]) -> None: self.index_to_id[next_index] = id self.vectors[next_index] = vector - def delete(self, records: List[EmbeddingRecord]) -> None: + def delete(self, records: List[LogRecord]) -> None: for record in records: - id = record["id"] + id = record["operation_record"]["id"] if id in self.id_to_index: index = self.id_to_index[id] self.deleted_ids.add(id) diff --git a/chromadb/segment/impl/vector/local_hnsw.py b/chromadb/segment/impl/vector/local_hnsw.py index ec163279ab2..560dc9b2bd8 100644 --- a/chromadb/segment/impl/vector/local_hnsw.py +++ b/chromadb/segment/impl/vector/local_hnsw.py @@ -12,7 +12,7 @@ trace_method, ) from chromadb.types import ( - EmbeddingRecord, + LogRecord, VectorEmbeddingRecord, VectorQuery, VectorQueryResult, @@ -272,7 +272,7 @@ def _apply_batch(self, batch: Batch) -> None: # If that succeeds, update the mappings for i, id in enumerate(written_ids): - self._id_to_seq_id[id] = batch.get_record(id)["seq_id"] + self._id_to_seq_id[id] = batch.get_record(id)["log_offset"] self._id_to_label[id] = labels_to_write[i] self._label_to_id[labels_to_write[i]] = id @@ -283,7 +283,7 @@ def _apply_batch(self, batch: Batch) -> None: self._max_seq_id = batch.max_seq_id @trace_method("LocalHnswSegment._write_records", OpenTelemetryGranularity.ALL) - def _write_records(self, records: Sequence[EmbeddingRecord]) -> None: + def _write_records(self, records: Sequence[LogRecord]) -> None: """Add a batch of embeddings to the index""" if not self._running: raise RuntimeError("Cannot add embeddings to stopped component") @@ -293,9 +293,9 @@ def _write_records(self, records: Sequence[EmbeddingRecord]) -> None: batch = Batch() for record in records: - self._max_seq_id = max(self._max_seq_id, record["seq_id"]) - id = record["id"] - op = record["operation"] + self._max_seq_id = max(self._max_seq_id, record["log_offset"]) + id = record["operation_record"]["id"] + op = record["operation_record"]["operation"] label = self._id_to_label.get(id, None) if op == Operation.DELETE: @@ -305,12 +305,12 @@ def _write_records(self, records: Sequence[EmbeddingRecord]) -> None: logger.warning(f"Delete of nonexisting embedding ID: {id}") elif op == Operation.UPDATE: - if record["embedding"] is not None: + if record["operation_record"]["embedding"] is not None: if label is not None: batch.apply(record) else: logger.warning( - f"Update of nonexisting embedding ID: {record['id']}" + f"Update of nonexisting embedding ID: {record['operation_record']['id']}" ) elif op == Operation.ADD: if not label: diff --git a/chromadb/segment/impl/vector/local_persistent_hnsw.py b/chromadb/segment/impl/vector/local_persistent_hnsw.py index 4ab60a1725d..950ee53be9f 100644 --- a/chromadb/segment/impl/vector/local_persistent_hnsw.py +++ b/chromadb/segment/impl/vector/local_persistent_hnsw.py @@ -17,7 +17,7 @@ trace_method, ) from chromadb.types import ( - EmbeddingRecord, + LogRecord, Metadata, Operation, Segment, @@ -222,14 +222,16 @@ def _apply_batch(self, batch: Batch) -> None: "PersistentLocalHnswSegment._write_records", OpenTelemetryGranularity.ALL ) @override - def _write_records(self, records: Sequence[EmbeddingRecord]) -> None: + def _write_records(self, records: Sequence[LogRecord]) -> None: """Add a batch of embeddings to the index""" if not self._running: raise RuntimeError("Cannot add embeddings to stopped component") with WriteRWLock(self._lock): for record in records: - if record["embedding"] is not None: - self._ensure_index(len(records), len(record["embedding"])) + if record["operation_record"]["embedding"] is not None: + self._ensure_index( + len(records), len(record["operation_record"]["embedding"]) + ) if not self._index_initialized: # If the index is not initialized here, it means that we have # not yet added any records to the index. So we can just @@ -237,9 +239,9 @@ def _write_records(self, records: Sequence[EmbeddingRecord]) -> None: continue self._brute_force_index = cast(BruteForceIndex, self._brute_force_index) - self._max_seq_id = max(self._max_seq_id, record["seq_id"]) - id = record["id"] - op = record["operation"] + self._max_seq_id = max(self._max_seq_id, record["log_offset"]) + id = record["operation_record"]["id"] + op = record["operation_record"]["operation"] exists_in_index = self._id_to_label.get( id, None ) is not None or self._brute_force_index.has_id(id) @@ -254,23 +256,23 @@ def _write_records(self, records: Sequence[EmbeddingRecord]) -> None: logger.warning(f"Delete of nonexisting embedding ID: {id}") elif op == Operation.UPDATE: - if record["embedding"] is not None: + if record["operation_record"]["embedding"] is not None: if exists_in_index: self._curr_batch.apply(record) self._brute_force_index.upsert([record]) else: logger.warning( - f"Update of nonexisting embedding ID: {record['id']}" + f"Update of nonexisting embedding ID: {record['operation_record']['id']}" ) elif op == Operation.ADD: - if record["embedding"] is not None: + if record["operation_record"]["embedding"] is not None: if not exists_in_index: self._curr_batch.apply(record, not exists_in_index) self._brute_force_index.upsert([record]) else: logger.warning(f"Add of existing embedding ID: {id}") elif op == Operation.UPSERT: - if record["embedding"] is not None: + if record["operation_record"]["embedding"] is not None: self._curr_batch.apply(record, exists_in_index) self._brute_force_index.upsert([record]) if len(self._curr_batch) >= self._batch_size: diff --git a/chromadb/test/ingest/test_producer_consumer.py b/chromadb/test/ingest/test_producer_consumer.py index 7b2a5c23c8c..d44faca5894 100644 --- a/chromadb/test/ingest/test_producer_consumer.py +++ b/chromadb/test/ingest/test_producer_consumer.py @@ -22,7 +22,7 @@ from chromadb.types import ( OperationRecord, Operation, - EmbeddingRecord, + LogRecord, ScalarEncoding, ) from chromadb.config import System, Settings @@ -92,7 +92,7 @@ def create_record(i: int) -> OperationRecord: class CapturingConsumeFn: - embeddings: List[EmbeddingRecord] + embeddings: List[LogRecord] waiters: List[Tuple[int, Event]] def __init__(self) -> None: @@ -104,14 +104,14 @@ def __init__(self) -> None: self.waiters = [] self._loop = asyncio.get_event_loop() - def __call__(self, embeddings: Sequence[EmbeddingRecord]) -> None: + def __call__(self, embeddings: Sequence[LogRecord]) -> None: self.embeddings.extend(embeddings) for n, event in self.waiters: if len(self.embeddings) >= n: # event.set() is not thread safe, so we need to call it in the main event loop self._loop.call_soon_threadsafe(event.set) - async def get(self, n: int, timeout_secs: int = 10) -> Sequence[EmbeddingRecord]: + async def get(self, n: int, timeout_secs: int = 10) -> Sequence[LogRecord]: "Wait until at least N embeddings are available, then return all embeddings" if len(self.embeddings) >= n: return self.embeddings[:n] @@ -130,19 +130,21 @@ def assert_approx_equal(a: Sequence[float], b: Sequence[float]) -> None: def assert_records_match( inserted_records: Sequence[OperationRecord], - consumed_records: Sequence[EmbeddingRecord], + consumed_records: Sequence[LogRecord], ) -> None: """Given a list of inserted and consumed records, make sure they match""" assert len(consumed_records) == len(inserted_records) for inserted, consumed in zip(inserted_records, consumed_records): - assert inserted["id"] == consumed["id"] - assert inserted["operation"] == consumed["operation"] - assert inserted["encoding"] == consumed["encoding"] - assert inserted["metadata"] == consumed["metadata"] + assert inserted["id"] == consumed["operation_record"]["id"] + assert inserted["operation"] == consumed["operation_record"]["operation"] + assert inserted["encoding"] == consumed["operation_record"]["encoding"] + assert inserted["metadata"] == consumed["operation_record"]["metadata"] if inserted["embedding"] is not None: - assert consumed["embedding"] is not None - assert_approx_equal(inserted["embedding"], consumed["embedding"]) + assert consumed["operation_record"]["embedding"] is not None + assert_approx_equal( + inserted["embedding"], consumed["operation_record"]["embedding"] + ) @pytest.mark.asyncio @@ -243,7 +245,7 @@ async def test_start_seq_id( results_1 = await consume_fn_1.get(5) assert_records_match(embeddings, results_1) - start = consume_fn_1.embeddings[-1]["seq_id"] + start = consume_fn_1.embeddings[-1]["log_offset"] consumer.subscribe(collection, consume_fn_2, start=start) second_embeddings = produce_fns(producer, collection, sample_embeddings, 5)[0] assert isinstance(embeddings, list) @@ -273,7 +275,7 @@ async def test_end_seq_id( results_1 = await consume_fn_1.get(10) assert_records_match(embeddings, results_1) - end = consume_fn_1.embeddings[-5]["seq_id"] + end = consume_fn_1.embeddings[-5]["log_offset"] consumer.subscribe(collection, consume_fn_2, start=consumer.min_seqid(), end=end) results_2 = await consume_fn_2.get(6) diff --git a/chromadb/types.py b/chromadb/types.py index 1940214b4c9..262e7f87101 100644 --- a/chromadb/types.py +++ b/chromadb/types.py @@ -85,31 +85,24 @@ class MetadataEmbeddingRecord(TypedDict): metadata: Optional[Metadata] -class EmbeddingRecord(TypedDict): +class OperationRecord(TypedDict): id: str - seq_id: SeqId embedding: Optional[Vector] encoding: Optional[ScalarEncoding] metadata: Optional[UpdateMetadata] operation: Operation - # The collection the operation is being performed on - # This is optional because in the single node version, - # topics are 1:1 with collections. So consumers of the ingest queue - # implicitly know this mapping. However, in the multi-node version, - # topics are shared between collections, so we need to explicitly - # specify the collection. - # For backwards compatability reasons, we can't make this a required field on - # single node, since data written with older versions of the code won't be able to - # populate it. - collection_id: Optional[UUID] -class OperationRecord(TypedDict): +class LogRecord(TypedDict): + log_offset: int + operation_record: OperationRecord + + +class DataRecord(TypedDict): id: str embedding: Optional[Vector] encoding: Optional[ScalarEncoding] - metadata: Optional[UpdateMetadata] - operation: Operation + metadata: Optional[Metadata] class VectorQuery(TypedDict): diff --git a/go/migrations/20240327172649.sql b/go/migrations/20240327172649.sql new file mode 100644 index 00000000000..0af6002b5af --- /dev/null +++ b/go/migrations/20240327172649.sql @@ -0,0 +1,4 @@ +-- Modify "record_logs" table +-- NOTE: This is a destructive migration autogenerated by atlas. +--This is fine for now because we are still in development. +ALTER TABLE "record_logs" DROP CONSTRAINT "record_logs_pkey", DROP COLUMN "id", ADD COLUMN "log_offset" bigint NOT NULL, ADD PRIMARY KEY ("collection_id", "log_offset"); diff --git a/go/migrations/atlas.sum b/go/migrations/atlas.sum index 44d15d1d915..088dd87ccf4 100644 --- a/go/migrations/atlas.sum +++ b/go/migrations/atlas.sum @@ -1,4 +1,5 @@ -h1:Uk5EXzkUN9oinZXA4sMmuQMTXRTtTpkxBMD6Gv2dxw4= +h1:a7siLM7ZTF8njH6u0JLRctDcvTDu9/XNvTJ7hmLPyII= 20240313233558.sql h1:shyeY6BuLGJ1Ia/G/hH+NZS6HZqHxhBJ2Pfdoeerz7I= 20240321194713.sql h1:K5CAwiFb9kx+O8E/3Dq2C7jzMa7P+ZvqGL5HtLKe2YU= 20240327075032.sql h1:zE+/KCknuhtExHiKoZSfhFzahpbs2B7O/JgYbfxkjp0= +20240327172649.sql h1:hIFZlonLfEqJwmjC6nYn5xV6O8s8eA5y5JPc3BBbw+E= diff --git a/go/mocks/Catalog.go b/go/mocks/Catalog.go new file mode 100644 index 00000000000..d7ce72ebec5 --- /dev/null +++ b/go/mocks/Catalog.go @@ -0,0 +1,526 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + dbmodel "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" + + mock "github.com/stretchr/testify/mock" + + model "github.com/chroma-core/chroma/go/pkg/model" + + types "github.com/chroma-core/chroma/go/pkg/types" +) + +// Catalog is an autogenerated mock type for the Catalog type +type Catalog struct { + mock.Mock +} + +// CreateCollection provides a mock function with given fields: ctx, createCollection, ts +func (_m *Catalog) CreateCollection(ctx context.Context, createCollection *model.CreateCollection, ts int64) (*model.Collection, error) { + ret := _m.Called(ctx, createCollection, ts) + + if len(ret) == 0 { + panic("no return value specified for CreateCollection") + } + + var r0 *model.Collection + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.CreateCollection, int64) (*model.Collection, error)); ok { + return rf(ctx, createCollection, ts) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.CreateCollection, int64) *model.Collection); ok { + r0 = rf(ctx, createCollection, ts) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Collection) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.CreateCollection, int64) error); ok { + r1 = rf(ctx, createCollection, ts) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateDatabase provides a mock function with given fields: ctx, createDatabase, ts +func (_m *Catalog) CreateDatabase(ctx context.Context, createDatabase *model.CreateDatabase, ts int64) (*model.Database, error) { + ret := _m.Called(ctx, createDatabase, ts) + + if len(ret) == 0 { + panic("no return value specified for CreateDatabase") + } + + var r0 *model.Database + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.CreateDatabase, int64) (*model.Database, error)); ok { + return rf(ctx, createDatabase, ts) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.CreateDatabase, int64) *model.Database); ok { + r0 = rf(ctx, createDatabase, ts) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Database) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.CreateDatabase, int64) error); ok { + r1 = rf(ctx, createDatabase, ts) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateSegment provides a mock function with given fields: ctx, createSegment, ts +func (_m *Catalog) CreateSegment(ctx context.Context, createSegment *model.CreateSegment, ts int64) (*model.Segment, error) { + ret := _m.Called(ctx, createSegment, ts) + + if len(ret) == 0 { + panic("no return value specified for CreateSegment") + } + + var r0 *model.Segment + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.CreateSegment, int64) (*model.Segment, error)); ok { + return rf(ctx, createSegment, ts) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.CreateSegment, int64) *model.Segment); ok { + r0 = rf(ctx, createSegment, ts) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Segment) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.CreateSegment, int64) error); ok { + r1 = rf(ctx, createSegment, ts) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateTenant provides a mock function with given fields: ctx, createTenant, ts +func (_m *Catalog) CreateTenant(ctx context.Context, createTenant *model.CreateTenant, ts int64) (*model.Tenant, error) { + ret := _m.Called(ctx, createTenant, ts) + + if len(ret) == 0 { + panic("no return value specified for CreateTenant") + } + + var r0 *model.Tenant + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.CreateTenant, int64) (*model.Tenant, error)); ok { + return rf(ctx, createTenant, ts) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.CreateTenant, int64) *model.Tenant); ok { + r0 = rf(ctx, createTenant, ts) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Tenant) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.CreateTenant, int64) error); ok { + r1 = rf(ctx, createTenant, ts) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteCollection provides a mock function with given fields: ctx, deleteCollection +func (_m *Catalog) DeleteCollection(ctx context.Context, deleteCollection *model.DeleteCollection) error { + ret := _m.Called(ctx, deleteCollection) + + if len(ret) == 0 { + panic("no return value specified for DeleteCollection") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *model.DeleteCollection) error); ok { + r0 = rf(ctx, deleteCollection) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DeleteSegment provides a mock function with given fields: ctx, segmentID +func (_m *Catalog) DeleteSegment(ctx context.Context, segmentID types.UniqueID) error { + ret := _m.Called(ctx, segmentID) + + if len(ret) == 0 { + panic("no return value specified for DeleteSegment") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, types.UniqueID) error); ok { + r0 = rf(ctx, segmentID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// FlushCollectionCompaction provides a mock function with given fields: ctx, flushCollectionCompaction +func (_m *Catalog) FlushCollectionCompaction(ctx context.Context, flushCollectionCompaction *model.FlushCollectionCompaction) (*model.FlushCollectionInfo, error) { + ret := _m.Called(ctx, flushCollectionCompaction) + + if len(ret) == 0 { + panic("no return value specified for FlushCollectionCompaction") + } + + var r0 *model.FlushCollectionInfo + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.FlushCollectionCompaction) (*model.FlushCollectionInfo, error)); ok { + return rf(ctx, flushCollectionCompaction) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.FlushCollectionCompaction) *model.FlushCollectionInfo); ok { + r0 = rf(ctx, flushCollectionCompaction) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.FlushCollectionInfo) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.FlushCollectionCompaction) error); ok { + r1 = rf(ctx, flushCollectionCompaction) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetAllDatabases provides a mock function with given fields: ctx, ts +func (_m *Catalog) GetAllDatabases(ctx context.Context, ts int64) ([]*model.Database, error) { + ret := _m.Called(ctx, ts) + + if len(ret) == 0 { + panic("no return value specified for GetAllDatabases") + } + + var r0 []*model.Database + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, int64) ([]*model.Database, error)); ok { + return rf(ctx, ts) + } + if rf, ok := ret.Get(0).(func(context.Context, int64) []*model.Database); ok { + r0 = rf(ctx, ts) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Database) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, int64) error); ok { + r1 = rf(ctx, ts) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetAllTenants provides a mock function with given fields: ctx, ts +func (_m *Catalog) GetAllTenants(ctx context.Context, ts int64) ([]*model.Tenant, error) { + ret := _m.Called(ctx, ts) + + if len(ret) == 0 { + panic("no return value specified for GetAllTenants") + } + + var r0 []*model.Tenant + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, int64) ([]*model.Tenant, error)); ok { + return rf(ctx, ts) + } + if rf, ok := ret.Get(0).(func(context.Context, int64) []*model.Tenant); ok { + r0 = rf(ctx, ts) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Tenant) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, int64) error); ok { + r1 = rf(ctx, ts) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCollections provides a mock function with given fields: ctx, collectionID, collectionName, tenantID, databaseName +func (_m *Catalog) GetCollections(ctx context.Context, collectionID types.UniqueID, collectionName *string, tenantID string, databaseName string) ([]*model.Collection, error) { + ret := _m.Called(ctx, collectionID, collectionName, tenantID, databaseName) + + if len(ret) == 0 { + panic("no return value specified for GetCollections") + } + + var r0 []*model.Collection + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, types.UniqueID, *string, string, string) ([]*model.Collection, error)); ok { + return rf(ctx, collectionID, collectionName, tenantID, databaseName) + } + if rf, ok := ret.Get(0).(func(context.Context, types.UniqueID, *string, string, string) []*model.Collection); ok { + r0 = rf(ctx, collectionID, collectionName, tenantID, databaseName) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Collection) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, types.UniqueID, *string, string, string) error); ok { + r1 = rf(ctx, collectionID, collectionName, tenantID, databaseName) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetDatabases provides a mock function with given fields: ctx, getDatabase, ts +func (_m *Catalog) GetDatabases(ctx context.Context, getDatabase *model.GetDatabase, ts int64) (*model.Database, error) { + ret := _m.Called(ctx, getDatabase, ts) + + if len(ret) == 0 { + panic("no return value specified for GetDatabases") + } + + var r0 *model.Database + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.GetDatabase, int64) (*model.Database, error)); ok { + return rf(ctx, getDatabase, ts) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.GetDatabase, int64) *model.Database); ok { + r0 = rf(ctx, getDatabase, ts) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Database) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.GetDatabase, int64) error); ok { + r1 = rf(ctx, getDatabase, ts) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetSegments provides a mock function with given fields: ctx, segmentID, segmentType, scope, collectionID +func (_m *Catalog) GetSegments(ctx context.Context, segmentID types.UniqueID, segmentType *string, scope *string, collectionID types.UniqueID) ([]*model.Segment, error) { + ret := _m.Called(ctx, segmentID, segmentType, scope, collectionID) + + if len(ret) == 0 { + panic("no return value specified for GetSegments") + } + + var r0 []*model.Segment + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, types.UniqueID, *string, *string, types.UniqueID) ([]*model.Segment, error)); ok { + return rf(ctx, segmentID, segmentType, scope, collectionID) + } + if rf, ok := ret.Get(0).(func(context.Context, types.UniqueID, *string, *string, types.UniqueID) []*model.Segment); ok { + r0 = rf(ctx, segmentID, segmentType, scope, collectionID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Segment) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, types.UniqueID, *string, *string, types.UniqueID) error); ok { + r1 = rf(ctx, segmentID, segmentType, scope, collectionID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetTenants provides a mock function with given fields: ctx, getTenant, ts +func (_m *Catalog) GetTenants(ctx context.Context, getTenant *model.GetTenant, ts int64) (*model.Tenant, error) { + ret := _m.Called(ctx, getTenant, ts) + + if len(ret) == 0 { + panic("no return value specified for GetTenants") + } + + var r0 *model.Tenant + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.GetTenant, int64) (*model.Tenant, error)); ok { + return rf(ctx, getTenant, ts) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.GetTenant, int64) *model.Tenant); ok { + r0 = rf(ctx, getTenant, ts) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Tenant) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.GetTenant, int64) error); ok { + r1 = rf(ctx, getTenant, ts) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetTenantsLastCompactionTime provides a mock function with given fields: ctx, tenantIDs +func (_m *Catalog) GetTenantsLastCompactionTime(ctx context.Context, tenantIDs []string) ([]*dbmodel.Tenant, error) { + ret := _m.Called(ctx, tenantIDs) + + if len(ret) == 0 { + panic("no return value specified for GetTenantsLastCompactionTime") + } + + var r0 []*dbmodel.Tenant + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, []string) ([]*dbmodel.Tenant, error)); ok { + return rf(ctx, tenantIDs) + } + if rf, ok := ret.Get(0).(func(context.Context, []string) []*dbmodel.Tenant); ok { + r0 = rf(ctx, tenantIDs) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*dbmodel.Tenant) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, []string) error); ok { + r1 = rf(ctx, tenantIDs) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ResetState provides a mock function with given fields: ctx +func (_m *Catalog) ResetState(ctx context.Context) error { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for ResetState") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context) error); ok { + r0 = rf(ctx) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// SetTenantLastCompactionTime provides a mock function with given fields: ctx, tenantID, lastCompactionTime +func (_m *Catalog) SetTenantLastCompactionTime(ctx context.Context, tenantID string, lastCompactionTime int64) error { + ret := _m.Called(ctx, tenantID, lastCompactionTime) + + if len(ret) == 0 { + panic("no return value specified for SetTenantLastCompactionTime") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, int64) error); ok { + r0 = rf(ctx, tenantID, lastCompactionTime) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateCollection provides a mock function with given fields: ctx, updateCollection, ts +func (_m *Catalog) UpdateCollection(ctx context.Context, updateCollection *model.UpdateCollection, ts int64) (*model.Collection, error) { + ret := _m.Called(ctx, updateCollection, ts) + + if len(ret) == 0 { + panic("no return value specified for UpdateCollection") + } + + var r0 *model.Collection + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.UpdateCollection, int64) (*model.Collection, error)); ok { + return rf(ctx, updateCollection, ts) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.UpdateCollection, int64) *model.Collection); ok { + r0 = rf(ctx, updateCollection, ts) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Collection) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.UpdateCollection, int64) error); ok { + r1 = rf(ctx, updateCollection, ts) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateSegment provides a mock function with given fields: ctx, segmentInfo, ts +func (_m *Catalog) UpdateSegment(ctx context.Context, segmentInfo *model.UpdateSegment, ts int64) (*model.Segment, error) { + ret := _m.Called(ctx, segmentInfo, ts) + + if len(ret) == 0 { + panic("no return value specified for UpdateSegment") + } + + var r0 *model.Segment + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.UpdateSegment, int64) (*model.Segment, error)); ok { + return rf(ctx, segmentInfo, ts) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.UpdateSegment, int64) *model.Segment); ok { + r0 = rf(ctx, segmentInfo, ts) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Segment) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.UpdateSegment, int64) error); ok { + r1 = rf(ctx, segmentInfo, ts) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewCatalog creates a new instance of Catalog. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewCatalog(t interface { + mock.TestingT + Cleanup(func()) +}) *Catalog { + mock := &Catalog{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/CollectionMetadataValueType.go b/go/mocks/CollectionMetadataValueType.go new file mode 100644 index 00000000000..2ed88d4b39c --- /dev/null +++ b/go/mocks/CollectionMetadataValueType.go @@ -0,0 +1,50 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + model "github.com/chroma-core/chroma/go/pkg/model" + mock "github.com/stretchr/testify/mock" +) + +// CollectionMetadataValueType is an autogenerated mock type for the CollectionMetadataValueType type +type CollectionMetadataValueType struct { + mock.Mock +} + +// Equals provides a mock function with given fields: other +func (_m *CollectionMetadataValueType) Equals(other model.CollectionMetadataValueType) bool { + ret := _m.Called(other) + + if len(ret) == 0 { + panic("no return value specified for Equals") + } + + var r0 bool + if rf, ok := ret.Get(0).(func(model.CollectionMetadataValueType) bool); ok { + r0 = rf(other) + } else { + r0 = ret.Get(0).(bool) + } + + return r0 +} + +// IsCollectionMetadataValueType provides a mock function with given fields: +func (_m *CollectionMetadataValueType) IsCollectionMetadataValueType() { + _m.Called() +} + +// NewCollectionMetadataValueType creates a new instance of CollectionMetadataValueType. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewCollectionMetadataValueType(t interface { + mock.TestingT + Cleanup(func()) +}) *CollectionMetadataValueType { + mock := &CollectionMetadataValueType{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/Component.go b/go/mocks/Component.go new file mode 100644 index 00000000000..101d3661aca --- /dev/null +++ b/go/mocks/Component.go @@ -0,0 +1,60 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import mock "github.com/stretchr/testify/mock" + +// Component is an autogenerated mock type for the Component type +type Component struct { + mock.Mock +} + +// Start provides a mock function with given fields: +func (_m *Component) Start() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Start") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Stop provides a mock function with given fields: +func (_m *Component) Stop() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Stop") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewComponent creates a new instance of Component. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewComponent(t interface { + mock.TestingT + Cleanup(func()) +}) *Component { + mock := &Component{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/GrpcProvider.go b/go/mocks/GrpcProvider.go new file mode 100644 index 00000000000..2859bcd862a --- /dev/null +++ b/go/mocks/GrpcProvider.go @@ -0,0 +1,58 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + grpcutils "github.com/chroma-core/chroma/go/pkg/grpcutils" + mock "github.com/stretchr/testify/mock" + grpc "google.golang.org/grpc" +) + +// GrpcProvider is an autogenerated mock type for the GrpcProvider type +type GrpcProvider struct { + mock.Mock +} + +// StartGrpcServer provides a mock function with given fields: name, grpcConfig, registerFunc +func (_m *GrpcProvider) StartGrpcServer(name string, grpcConfig *grpcutils.GrpcConfig, registerFunc func(grpc.ServiceRegistrar)) (grpcutils.GrpcServer, error) { + ret := _m.Called(name, grpcConfig, registerFunc) + + if len(ret) == 0 { + panic("no return value specified for StartGrpcServer") + } + + var r0 grpcutils.GrpcServer + var r1 error + if rf, ok := ret.Get(0).(func(string, *grpcutils.GrpcConfig, func(grpc.ServiceRegistrar)) (grpcutils.GrpcServer, error)); ok { + return rf(name, grpcConfig, registerFunc) + } + if rf, ok := ret.Get(0).(func(string, *grpcutils.GrpcConfig, func(grpc.ServiceRegistrar)) grpcutils.GrpcServer); ok { + r0 = rf(name, grpcConfig, registerFunc) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(grpcutils.GrpcServer) + } + } + + if rf, ok := ret.Get(1).(func(string, *grpcutils.GrpcConfig, func(grpc.ServiceRegistrar)) error); ok { + r1 = rf(name, grpcConfig, registerFunc) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewGrpcProvider creates a new instance of GrpcProvider. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewGrpcProvider(t interface { + mock.TestingT + Cleanup(func()) +}) *GrpcProvider { + mock := &GrpcProvider{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/GrpcServer.go b/go/mocks/GrpcServer.go new file mode 100644 index 00000000000..bf55ae5c116 --- /dev/null +++ b/go/mocks/GrpcServer.go @@ -0,0 +1,60 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import mock "github.com/stretchr/testify/mock" + +// GrpcServer is an autogenerated mock type for the GrpcServer type +type GrpcServer struct { + mock.Mock +} + +// Close provides a mock function with given fields: +func (_m *GrpcServer) Close() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Close") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Port provides a mock function with given fields: +func (_m *GrpcServer) Port() int { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Port") + } + + var r0 int + if rf, ok := ret.Get(0).(func() int); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(int) + } + + return r0 +} + +// NewGrpcServer creates a new instance of GrpcServer. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewGrpcServer(t interface { + mock.TestingT + Cleanup(func()) +}) *GrpcServer { + mock := &GrpcServer{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/ICollectionDb.go b/go/mocks/ICollectionDb.go new file mode 100644 index 00000000000..889c99781e4 --- /dev/null +++ b/go/mocks/ICollectionDb.go @@ -0,0 +1,167 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + dbmodel "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" + mock "github.com/stretchr/testify/mock" +) + +// ICollectionDb is an autogenerated mock type for the ICollectionDb type +type ICollectionDb struct { + mock.Mock +} + +// DeleteAll provides a mock function with given fields: +func (_m *ICollectionDb) DeleteAll() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for DeleteAll") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DeleteCollectionByID provides a mock function with given fields: collectionID +func (_m *ICollectionDb) DeleteCollectionByID(collectionID string) (int, error) { + ret := _m.Called(collectionID) + + if len(ret) == 0 { + panic("no return value specified for DeleteCollectionByID") + } + + var r0 int + var r1 error + if rf, ok := ret.Get(0).(func(string) (int, error)); ok { + return rf(collectionID) + } + if rf, ok := ret.Get(0).(func(string) int); ok { + r0 = rf(collectionID) + } else { + r0 = ret.Get(0).(int) + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(collectionID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCollections provides a mock function with given fields: collectionID, collectionName, tenantID, databaseName +func (_m *ICollectionDb) GetCollections(collectionID *string, collectionName *string, tenantID string, databaseName string) ([]*dbmodel.CollectionAndMetadata, error) { + ret := _m.Called(collectionID, collectionName, tenantID, databaseName) + + if len(ret) == 0 { + panic("no return value specified for GetCollections") + } + + var r0 []*dbmodel.CollectionAndMetadata + var r1 error + if rf, ok := ret.Get(0).(func(*string, *string, string, string) ([]*dbmodel.CollectionAndMetadata, error)); ok { + return rf(collectionID, collectionName, tenantID, databaseName) + } + if rf, ok := ret.Get(0).(func(*string, *string, string, string) []*dbmodel.CollectionAndMetadata); ok { + r0 = rf(collectionID, collectionName, tenantID, databaseName) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*dbmodel.CollectionAndMetadata) + } + } + + if rf, ok := ret.Get(1).(func(*string, *string, string, string) error); ok { + r1 = rf(collectionID, collectionName, tenantID, databaseName) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Insert provides a mock function with given fields: in +func (_m *ICollectionDb) Insert(in *dbmodel.Collection) error { + ret := _m.Called(in) + + if len(ret) == 0 { + panic("no return value specified for Insert") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*dbmodel.Collection) error); ok { + r0 = rf(in) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Update provides a mock function with given fields: in +func (_m *ICollectionDb) Update(in *dbmodel.Collection) error { + ret := _m.Called(in) + + if len(ret) == 0 { + panic("no return value specified for Update") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*dbmodel.Collection) error); ok { + r0 = rf(in) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateLogPositionAndVersion provides a mock function with given fields: collectionID, logPosition, currentCollectionVersion +func (_m *ICollectionDb) UpdateLogPositionAndVersion(collectionID string, logPosition int64, currentCollectionVersion int32) (int32, error) { + ret := _m.Called(collectionID, logPosition, currentCollectionVersion) + + if len(ret) == 0 { + panic("no return value specified for UpdateLogPositionAndVersion") + } + + var r0 int32 + var r1 error + if rf, ok := ret.Get(0).(func(string, int64, int32) (int32, error)); ok { + return rf(collectionID, logPosition, currentCollectionVersion) + } + if rf, ok := ret.Get(0).(func(string, int64, int32) int32); ok { + r0 = rf(collectionID, logPosition, currentCollectionVersion) + } else { + r0 = ret.Get(0).(int32) + } + + if rf, ok := ret.Get(1).(func(string, int64, int32) error); ok { + r1 = rf(collectionID, logPosition, currentCollectionVersion) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewICollectionDb creates a new instance of ICollectionDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewICollectionDb(t interface { + mock.TestingT + Cleanup(func()) +}) *ICollectionDb { + mock := &ICollectionDb{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/ICollectionMetadataDb.go b/go/mocks/ICollectionMetadataDb.go new file mode 100644 index 00000000000..d231bf1fe29 --- /dev/null +++ b/go/mocks/ICollectionMetadataDb.go @@ -0,0 +1,91 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + dbmodel "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" + mock "github.com/stretchr/testify/mock" +) + +// ICollectionMetadataDb is an autogenerated mock type for the ICollectionMetadataDb type +type ICollectionMetadataDb struct { + mock.Mock +} + +// DeleteAll provides a mock function with given fields: +func (_m *ICollectionMetadataDb) DeleteAll() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for DeleteAll") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DeleteByCollectionID provides a mock function with given fields: collectionID +func (_m *ICollectionMetadataDb) DeleteByCollectionID(collectionID string) (int, error) { + ret := _m.Called(collectionID) + + if len(ret) == 0 { + panic("no return value specified for DeleteByCollectionID") + } + + var r0 int + var r1 error + if rf, ok := ret.Get(0).(func(string) (int, error)); ok { + return rf(collectionID) + } + if rf, ok := ret.Get(0).(func(string) int); ok { + r0 = rf(collectionID) + } else { + r0 = ret.Get(0).(int) + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(collectionID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Insert provides a mock function with given fields: in +func (_m *ICollectionMetadataDb) Insert(in []*dbmodel.CollectionMetadata) error { + ret := _m.Called(in) + + if len(ret) == 0 { + panic("no return value specified for Insert") + } + + var r0 error + if rf, ok := ret.Get(0).(func([]*dbmodel.CollectionMetadata) error); ok { + r0 = rf(in) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewICollectionMetadataDb creates a new instance of ICollectionMetadataDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewICollectionMetadataDb(t interface { + mock.TestingT + Cleanup(func()) +}) *ICollectionMetadataDb { + mock := &ICollectionMetadataDb{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/ICoordinator.go b/go/mocks/ICoordinator.go new file mode 100644 index 00000000000..02d51811196 --- /dev/null +++ b/go/mocks/ICoordinator.go @@ -0,0 +1,490 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + dbmodel "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" + + mock "github.com/stretchr/testify/mock" + + model "github.com/chroma-core/chroma/go/pkg/model" + + types "github.com/chroma-core/chroma/go/pkg/types" +) + +// ICoordinator is an autogenerated mock type for the ICoordinator type +type ICoordinator struct { + mock.Mock +} + +// CreateCollection provides a mock function with given fields: ctx, createCollection +func (_m *ICoordinator) CreateCollection(ctx context.Context, createCollection *model.CreateCollection) (*model.Collection, error) { + ret := _m.Called(ctx, createCollection) + + if len(ret) == 0 { + panic("no return value specified for CreateCollection") + } + + var r0 *model.Collection + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.CreateCollection) (*model.Collection, error)); ok { + return rf(ctx, createCollection) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.CreateCollection) *model.Collection); ok { + r0 = rf(ctx, createCollection) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Collection) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.CreateCollection) error); ok { + r1 = rf(ctx, createCollection) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateDatabase provides a mock function with given fields: ctx, createDatabase +func (_m *ICoordinator) CreateDatabase(ctx context.Context, createDatabase *model.CreateDatabase) (*model.Database, error) { + ret := _m.Called(ctx, createDatabase) + + if len(ret) == 0 { + panic("no return value specified for CreateDatabase") + } + + var r0 *model.Database + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.CreateDatabase) (*model.Database, error)); ok { + return rf(ctx, createDatabase) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.CreateDatabase) *model.Database); ok { + r0 = rf(ctx, createDatabase) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Database) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.CreateDatabase) error); ok { + r1 = rf(ctx, createDatabase) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateSegment provides a mock function with given fields: ctx, createSegment +func (_m *ICoordinator) CreateSegment(ctx context.Context, createSegment *model.CreateSegment) error { + ret := _m.Called(ctx, createSegment) + + if len(ret) == 0 { + panic("no return value specified for CreateSegment") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *model.CreateSegment) error); ok { + r0 = rf(ctx, createSegment) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// CreateTenant provides a mock function with given fields: ctx, createTenant +func (_m *ICoordinator) CreateTenant(ctx context.Context, createTenant *model.CreateTenant) (*model.Tenant, error) { + ret := _m.Called(ctx, createTenant) + + if len(ret) == 0 { + panic("no return value specified for CreateTenant") + } + + var r0 *model.Tenant + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.CreateTenant) (*model.Tenant, error)); ok { + return rf(ctx, createTenant) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.CreateTenant) *model.Tenant); ok { + r0 = rf(ctx, createTenant) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Tenant) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.CreateTenant) error); ok { + r1 = rf(ctx, createTenant) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteCollection provides a mock function with given fields: ctx, deleteCollection +func (_m *ICoordinator) DeleteCollection(ctx context.Context, deleteCollection *model.DeleteCollection) error { + ret := _m.Called(ctx, deleteCollection) + + if len(ret) == 0 { + panic("no return value specified for DeleteCollection") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *model.DeleteCollection) error); ok { + r0 = rf(ctx, deleteCollection) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DeleteSegment provides a mock function with given fields: ctx, segmentID +func (_m *ICoordinator) DeleteSegment(ctx context.Context, segmentID types.UniqueID) error { + ret := _m.Called(ctx, segmentID) + + if len(ret) == 0 { + panic("no return value specified for DeleteSegment") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, types.UniqueID) error); ok { + r0 = rf(ctx, segmentID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// FlushCollectionCompaction provides a mock function with given fields: ctx, flushCollectionCompaction +func (_m *ICoordinator) FlushCollectionCompaction(ctx context.Context, flushCollectionCompaction *model.FlushCollectionCompaction) (*model.FlushCollectionInfo, error) { + ret := _m.Called(ctx, flushCollectionCompaction) + + if len(ret) == 0 { + panic("no return value specified for FlushCollectionCompaction") + } + + var r0 *model.FlushCollectionInfo + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.FlushCollectionCompaction) (*model.FlushCollectionInfo, error)); ok { + return rf(ctx, flushCollectionCompaction) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.FlushCollectionCompaction) *model.FlushCollectionInfo); ok { + r0 = rf(ctx, flushCollectionCompaction) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.FlushCollectionInfo) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.FlushCollectionCompaction) error); ok { + r1 = rf(ctx, flushCollectionCompaction) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCollections provides a mock function with given fields: ctx, collectionID, collectionName, tenantID, dataName +func (_m *ICoordinator) GetCollections(ctx context.Context, collectionID types.UniqueID, collectionName *string, tenantID string, dataName string) ([]*model.Collection, error) { + ret := _m.Called(ctx, collectionID, collectionName, tenantID, dataName) + + if len(ret) == 0 { + panic("no return value specified for GetCollections") + } + + var r0 []*model.Collection + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, types.UniqueID, *string, string, string) ([]*model.Collection, error)); ok { + return rf(ctx, collectionID, collectionName, tenantID, dataName) + } + if rf, ok := ret.Get(0).(func(context.Context, types.UniqueID, *string, string, string) []*model.Collection); ok { + r0 = rf(ctx, collectionID, collectionName, tenantID, dataName) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Collection) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, types.UniqueID, *string, string, string) error); ok { + r1 = rf(ctx, collectionID, collectionName, tenantID, dataName) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetDatabase provides a mock function with given fields: ctx, getDatabase +func (_m *ICoordinator) GetDatabase(ctx context.Context, getDatabase *model.GetDatabase) (*model.Database, error) { + ret := _m.Called(ctx, getDatabase) + + if len(ret) == 0 { + panic("no return value specified for GetDatabase") + } + + var r0 *model.Database + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.GetDatabase) (*model.Database, error)); ok { + return rf(ctx, getDatabase) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.GetDatabase) *model.Database); ok { + r0 = rf(ctx, getDatabase) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Database) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.GetDatabase) error); ok { + r1 = rf(ctx, getDatabase) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetSegments provides a mock function with given fields: ctx, segmentID, segmentType, scope, collectionID +func (_m *ICoordinator) GetSegments(ctx context.Context, segmentID types.UniqueID, segmentType *string, scope *string, collectionID types.UniqueID) ([]*model.Segment, error) { + ret := _m.Called(ctx, segmentID, segmentType, scope, collectionID) + + if len(ret) == 0 { + panic("no return value specified for GetSegments") + } + + var r0 []*model.Segment + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, types.UniqueID, *string, *string, types.UniqueID) ([]*model.Segment, error)); ok { + return rf(ctx, segmentID, segmentType, scope, collectionID) + } + if rf, ok := ret.Get(0).(func(context.Context, types.UniqueID, *string, *string, types.UniqueID) []*model.Segment); ok { + r0 = rf(ctx, segmentID, segmentType, scope, collectionID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*model.Segment) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, types.UniqueID, *string, *string, types.UniqueID) error); ok { + r1 = rf(ctx, segmentID, segmentType, scope, collectionID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetTenant provides a mock function with given fields: ctx, getTenant +func (_m *ICoordinator) GetTenant(ctx context.Context, getTenant *model.GetTenant) (*model.Tenant, error) { + ret := _m.Called(ctx, getTenant) + + if len(ret) == 0 { + panic("no return value specified for GetTenant") + } + + var r0 *model.Tenant + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.GetTenant) (*model.Tenant, error)); ok { + return rf(ctx, getTenant) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.GetTenant) *model.Tenant); ok { + r0 = rf(ctx, getTenant) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Tenant) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.GetTenant) error); ok { + r1 = rf(ctx, getTenant) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetTenantsLastCompactionTime provides a mock function with given fields: ctx, tenantIDs +func (_m *ICoordinator) GetTenantsLastCompactionTime(ctx context.Context, tenantIDs []string) ([]*dbmodel.Tenant, error) { + ret := _m.Called(ctx, tenantIDs) + + if len(ret) == 0 { + panic("no return value specified for GetTenantsLastCompactionTime") + } + + var r0 []*dbmodel.Tenant + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, []string) ([]*dbmodel.Tenant, error)); ok { + return rf(ctx, tenantIDs) + } + if rf, ok := ret.Get(0).(func(context.Context, []string) []*dbmodel.Tenant); ok { + r0 = rf(ctx, tenantIDs) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*dbmodel.Tenant) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, []string) error); ok { + r1 = rf(ctx, tenantIDs) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ResetState provides a mock function with given fields: ctx +func (_m *ICoordinator) ResetState(ctx context.Context) error { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for ResetState") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context) error); ok { + r0 = rf(ctx) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// SetTenantLastCompactionTime provides a mock function with given fields: ctx, tenantID, lastCompactionTime +func (_m *ICoordinator) SetTenantLastCompactionTime(ctx context.Context, tenantID string, lastCompactionTime int64) error { + ret := _m.Called(ctx, tenantID, lastCompactionTime) + + if len(ret) == 0 { + panic("no return value specified for SetTenantLastCompactionTime") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, string, int64) error); ok { + r0 = rf(ctx, tenantID, lastCompactionTime) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Start provides a mock function with given fields: +func (_m *ICoordinator) Start() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Start") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Stop provides a mock function with given fields: +func (_m *ICoordinator) Stop() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Stop") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateCollection provides a mock function with given fields: ctx, updateCollection +func (_m *ICoordinator) UpdateCollection(ctx context.Context, updateCollection *model.UpdateCollection) (*model.Collection, error) { + ret := _m.Called(ctx, updateCollection) + + if len(ret) == 0 { + panic("no return value specified for UpdateCollection") + } + + var r0 *model.Collection + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.UpdateCollection) (*model.Collection, error)); ok { + return rf(ctx, updateCollection) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.UpdateCollection) *model.Collection); ok { + r0 = rf(ctx, updateCollection) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Collection) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.UpdateCollection) error); ok { + r1 = rf(ctx, updateCollection) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateSegment provides a mock function with given fields: ctx, updateSegment +func (_m *ICoordinator) UpdateSegment(ctx context.Context, updateSegment *model.UpdateSegment) (*model.Segment, error) { + ret := _m.Called(ctx, updateSegment) + + if len(ret) == 0 { + panic("no return value specified for UpdateSegment") + } + + var r0 *model.Segment + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *model.UpdateSegment) (*model.Segment, error)); ok { + return rf(ctx, updateSegment) + } + if rf, ok := ret.Get(0).(func(context.Context, *model.UpdateSegment) *model.Segment); ok { + r0 = rf(ctx, updateSegment) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*model.Segment) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *model.UpdateSegment) error); ok { + r1 = rf(ctx, updateSegment) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewICoordinator creates a new instance of ICoordinator. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewICoordinator(t interface { + mock.TestingT + Cleanup(func()) +}) *ICoordinator { + mock := &ICoordinator{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/IDatabaseDb.go b/go/mocks/IDatabaseDb.go new file mode 100644 index 00000000000..982402e946d --- /dev/null +++ b/go/mocks/IDatabaseDb.go @@ -0,0 +1,123 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + dbmodel "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" + mock "github.com/stretchr/testify/mock" +) + +// IDatabaseDb is an autogenerated mock type for the IDatabaseDb type +type IDatabaseDb struct { + mock.Mock +} + +// DeleteAll provides a mock function with given fields: +func (_m *IDatabaseDb) DeleteAll() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for DeleteAll") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// GetAllDatabases provides a mock function with given fields: +func (_m *IDatabaseDb) GetAllDatabases() ([]*dbmodel.Database, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetAllDatabases") + } + + var r0 []*dbmodel.Database + var r1 error + if rf, ok := ret.Get(0).(func() ([]*dbmodel.Database, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() []*dbmodel.Database); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*dbmodel.Database) + } + } + + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetDatabases provides a mock function with given fields: tenantID, databaseName +func (_m *IDatabaseDb) GetDatabases(tenantID string, databaseName string) ([]*dbmodel.Database, error) { + ret := _m.Called(tenantID, databaseName) + + if len(ret) == 0 { + panic("no return value specified for GetDatabases") + } + + var r0 []*dbmodel.Database + var r1 error + if rf, ok := ret.Get(0).(func(string, string) ([]*dbmodel.Database, error)); ok { + return rf(tenantID, databaseName) + } + if rf, ok := ret.Get(0).(func(string, string) []*dbmodel.Database); ok { + r0 = rf(tenantID, databaseName) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*dbmodel.Database) + } + } + + if rf, ok := ret.Get(1).(func(string, string) error); ok { + r1 = rf(tenantID, databaseName) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Insert provides a mock function with given fields: in +func (_m *IDatabaseDb) Insert(in *dbmodel.Database) error { + ret := _m.Called(in) + + if len(ret) == 0 { + panic("no return value specified for Insert") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*dbmodel.Database) error); ok { + r0 = rf(in) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewIDatabaseDb creates a new instance of IDatabaseDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewIDatabaseDb(t interface { + mock.TestingT + Cleanup(func()) +}) *IDatabaseDb { + mock := &IDatabaseDb{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/IMemberlistManager.go b/go/mocks/IMemberlistManager.go new file mode 100644 index 00000000000..a5bd93a54a1 --- /dev/null +++ b/go/mocks/IMemberlistManager.go @@ -0,0 +1,60 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import mock "github.com/stretchr/testify/mock" + +// IMemberlistManager is an autogenerated mock type for the IMemberlistManager type +type IMemberlistManager struct { + mock.Mock +} + +// Start provides a mock function with given fields: +func (_m *IMemberlistManager) Start() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Start") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Stop provides a mock function with given fields: +func (_m *IMemberlistManager) Stop() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Stop") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewIMemberlistManager creates a new instance of IMemberlistManager. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewIMemberlistManager(t interface { + mock.TestingT + Cleanup(func()) +}) *IMemberlistManager { + mock := &IMemberlistManager{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/IMemberlistStore.go b/go/mocks/IMemberlistStore.go new file mode 100644 index 00000000000..06262ece9b2 --- /dev/null +++ b/go/mocks/IMemberlistStore.go @@ -0,0 +1,84 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + memberlist_manager "github.com/chroma-core/chroma/go/pkg/memberlist_manager" + mock "github.com/stretchr/testify/mock" +) + +// IMemberlistStore is an autogenerated mock type for the IMemberlistStore type +type IMemberlistStore struct { + mock.Mock +} + +// GetMemberlist provides a mock function with given fields: ctx +func (_m *IMemberlistStore) GetMemberlist(ctx context.Context) (*memberlist_manager.Memberlist, string, error) { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for GetMemberlist") + } + + var r0 *memberlist_manager.Memberlist + var r1 string + var r2 error + if rf, ok := ret.Get(0).(func(context.Context) (*memberlist_manager.Memberlist, string, error)); ok { + return rf(ctx) + } + if rf, ok := ret.Get(0).(func(context.Context) *memberlist_manager.Memberlist); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*memberlist_manager.Memberlist) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) string); ok { + r1 = rf(ctx) + } else { + r1 = ret.Get(1).(string) + } + + if rf, ok := ret.Get(2).(func(context.Context) error); ok { + r2 = rf(ctx) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// UpdateMemberlist provides a mock function with given fields: ctx, memberlist, resourceVersion +func (_m *IMemberlistStore) UpdateMemberlist(ctx context.Context, memberlist *memberlist_manager.Memberlist, resourceVersion string) error { + ret := _m.Called(ctx, memberlist, resourceVersion) + + if len(ret) == 0 { + panic("no return value specified for UpdateMemberlist") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *memberlist_manager.Memberlist, string) error); ok { + r0 = rf(ctx, memberlist, resourceVersion) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewIMemberlistStore creates a new instance of IMemberlistStore. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewIMemberlistStore(t interface { + mock.TestingT + Cleanup(func()) +}) *IMemberlistStore { + mock := &IMemberlistStore{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/IMetaDomain.go b/go/mocks/IMetaDomain.go new file mode 100644 index 00000000000..e4b4bb130d5 --- /dev/null +++ b/go/mocks/IMetaDomain.go @@ -0,0 +1,189 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + dbmodel "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" + mock "github.com/stretchr/testify/mock" +) + +// IMetaDomain is an autogenerated mock type for the IMetaDomain type +type IMetaDomain struct { + mock.Mock +} + +// CollectionDb provides a mock function with given fields: ctx +func (_m *IMetaDomain) CollectionDb(ctx context.Context) dbmodel.ICollectionDb { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for CollectionDb") + } + + var r0 dbmodel.ICollectionDb + if rf, ok := ret.Get(0).(func(context.Context) dbmodel.ICollectionDb); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(dbmodel.ICollectionDb) + } + } + + return r0 +} + +// CollectionMetadataDb provides a mock function with given fields: ctx +func (_m *IMetaDomain) CollectionMetadataDb(ctx context.Context) dbmodel.ICollectionMetadataDb { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for CollectionMetadataDb") + } + + var r0 dbmodel.ICollectionMetadataDb + if rf, ok := ret.Get(0).(func(context.Context) dbmodel.ICollectionMetadataDb); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(dbmodel.ICollectionMetadataDb) + } + } + + return r0 +} + +// DatabaseDb provides a mock function with given fields: ctx +func (_m *IMetaDomain) DatabaseDb(ctx context.Context) dbmodel.IDatabaseDb { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for DatabaseDb") + } + + var r0 dbmodel.IDatabaseDb + if rf, ok := ret.Get(0).(func(context.Context) dbmodel.IDatabaseDb); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(dbmodel.IDatabaseDb) + } + } + + return r0 +} + +// NotificationDb provides a mock function with given fields: ctx +func (_m *IMetaDomain) NotificationDb(ctx context.Context) dbmodel.INotificationDb { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for NotificationDb") + } + + var r0 dbmodel.INotificationDb + if rf, ok := ret.Get(0).(func(context.Context) dbmodel.INotificationDb); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(dbmodel.INotificationDb) + } + } + + return r0 +} + +// RecordLogDb provides a mock function with given fields: ctx +func (_m *IMetaDomain) RecordLogDb(ctx context.Context) dbmodel.IRecordLogDb { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for RecordLogDb") + } + + var r0 dbmodel.IRecordLogDb + if rf, ok := ret.Get(0).(func(context.Context) dbmodel.IRecordLogDb); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(dbmodel.IRecordLogDb) + } + } + + return r0 +} + +// SegmentDb provides a mock function with given fields: ctx +func (_m *IMetaDomain) SegmentDb(ctx context.Context) dbmodel.ISegmentDb { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for SegmentDb") + } + + var r0 dbmodel.ISegmentDb + if rf, ok := ret.Get(0).(func(context.Context) dbmodel.ISegmentDb); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(dbmodel.ISegmentDb) + } + } + + return r0 +} + +// SegmentMetadataDb provides a mock function with given fields: ctx +func (_m *IMetaDomain) SegmentMetadataDb(ctx context.Context) dbmodel.ISegmentMetadataDb { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for SegmentMetadataDb") + } + + var r0 dbmodel.ISegmentMetadataDb + if rf, ok := ret.Get(0).(func(context.Context) dbmodel.ISegmentMetadataDb); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(dbmodel.ISegmentMetadataDb) + } + } + + return r0 +} + +// TenantDb provides a mock function with given fields: ctx +func (_m *IMetaDomain) TenantDb(ctx context.Context) dbmodel.ITenantDb { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for TenantDb") + } + + var r0 dbmodel.ITenantDb + if rf, ok := ret.Get(0).(func(context.Context) dbmodel.ITenantDb); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(dbmodel.ITenantDb) + } + } + + return r0 +} + +// NewIMetaDomain creates a new instance of IMetaDomain. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewIMetaDomain(t interface { + mock.TestingT + Cleanup(func()) +}) *IMetaDomain { + mock := &IMetaDomain{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/INotificationDb.go b/go/mocks/INotificationDb.go new file mode 100644 index 00000000000..3a0a3d019cf --- /dev/null +++ b/go/mocks/INotificationDb.go @@ -0,0 +1,141 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + dbmodel "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" + mock "github.com/stretchr/testify/mock" +) + +// INotificationDb is an autogenerated mock type for the INotificationDb type +type INotificationDb struct { + mock.Mock +} + +// Delete provides a mock function with given fields: id +func (_m *INotificationDb) Delete(id []int64) error { + ret := _m.Called(id) + + if len(ret) == 0 { + panic("no return value specified for Delete") + } + + var r0 error + if rf, ok := ret.Get(0).(func([]int64) error); ok { + r0 = rf(id) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DeleteAll provides a mock function with given fields: +func (_m *INotificationDb) DeleteAll() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for DeleteAll") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// GetAllPendingNotifications provides a mock function with given fields: +func (_m *INotificationDb) GetAllPendingNotifications() ([]*dbmodel.Notification, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetAllPendingNotifications") + } + + var r0 []*dbmodel.Notification + var r1 error + if rf, ok := ret.Get(0).(func() ([]*dbmodel.Notification, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() []*dbmodel.Notification); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*dbmodel.Notification) + } + } + + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetNotificationByCollectionID provides a mock function with given fields: collectionID +func (_m *INotificationDb) GetNotificationByCollectionID(collectionID string) ([]*dbmodel.Notification, error) { + ret := _m.Called(collectionID) + + if len(ret) == 0 { + panic("no return value specified for GetNotificationByCollectionID") + } + + var r0 []*dbmodel.Notification + var r1 error + if rf, ok := ret.Get(0).(func(string) ([]*dbmodel.Notification, error)); ok { + return rf(collectionID) + } + if rf, ok := ret.Get(0).(func(string) []*dbmodel.Notification); ok { + r0 = rf(collectionID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*dbmodel.Notification) + } + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(collectionID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Insert provides a mock function with given fields: in +func (_m *INotificationDb) Insert(in *dbmodel.Notification) error { + ret := _m.Called(in) + + if len(ret) == 0 { + panic("no return value specified for Insert") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*dbmodel.Notification) error); ok { + r0 = rf(in) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewINotificationDb creates a new instance of INotificationDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewINotificationDb(t interface { + mock.TestingT + Cleanup(func()) +}) *INotificationDb { + mock := &INotificationDb{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/IRecordLog.go b/go/mocks/IRecordLog.go new file mode 100644 index 00000000000..885ea7f35ae --- /dev/null +++ b/go/mocks/IRecordLog.go @@ -0,0 +1,156 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + dbmodel "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" + + mock "github.com/stretchr/testify/mock" + + types "github.com/chroma-core/chroma/go/pkg/types" +) + +// IRecordLog is an autogenerated mock type for the IRecordLog type +type IRecordLog struct { + mock.Mock +} + +// GetAllCollectionIDsToCompact provides a mock function with given fields: +func (_m *IRecordLog) GetAllCollectionIDsToCompact() ([]*dbmodel.RecordLog, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetAllCollectionIDsToCompact") + } + + var r0 []*dbmodel.RecordLog + var r1 error + if rf, ok := ret.Get(0).(func() ([]*dbmodel.RecordLog, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() []*dbmodel.RecordLog); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*dbmodel.RecordLog) + } + } + + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PullLogs provides a mock function with given fields: ctx, collectionID, id, batchSize, endTimestamp +func (_m *IRecordLog) PullLogs(ctx context.Context, collectionID types.UniqueID, id int64, batchSize int, endTimestamp int64) ([]*dbmodel.RecordLog, error) { + ret := _m.Called(ctx, collectionID, id, batchSize, endTimestamp) + + if len(ret) == 0 { + panic("no return value specified for PullLogs") + } + + var r0 []*dbmodel.RecordLog + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, types.UniqueID, int64, int, int64) ([]*dbmodel.RecordLog, error)); ok { + return rf(ctx, collectionID, id, batchSize, endTimestamp) + } + if rf, ok := ret.Get(0).(func(context.Context, types.UniqueID, int64, int, int64) []*dbmodel.RecordLog); ok { + r0 = rf(ctx, collectionID, id, batchSize, endTimestamp) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*dbmodel.RecordLog) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, types.UniqueID, int64, int, int64) error); ok { + r1 = rf(ctx, collectionID, id, batchSize, endTimestamp) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PushLogs provides a mock function with given fields: ctx, collectionID, recordContent +func (_m *IRecordLog) PushLogs(ctx context.Context, collectionID types.UniqueID, recordContent [][]byte) (int, error) { + ret := _m.Called(ctx, collectionID, recordContent) + + if len(ret) == 0 { + panic("no return value specified for PushLogs") + } + + var r0 int + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, types.UniqueID, [][]byte) (int, error)); ok { + return rf(ctx, collectionID, recordContent) + } + if rf, ok := ret.Get(0).(func(context.Context, types.UniqueID, [][]byte) int); ok { + r0 = rf(ctx, collectionID, recordContent) + } else { + r0 = ret.Get(0).(int) + } + + if rf, ok := ret.Get(1).(func(context.Context, types.UniqueID, [][]byte) error); ok { + r1 = rf(ctx, collectionID, recordContent) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Start provides a mock function with given fields: +func (_m *IRecordLog) Start() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Start") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Stop provides a mock function with given fields: +func (_m *IRecordLog) Stop() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Stop") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewIRecordLog creates a new instance of IRecordLog. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewIRecordLog(t interface { + mock.TestingT + Cleanup(func()) +}) *IRecordLog { + mock := &IRecordLog{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/IRecordLogDb.go b/go/mocks/IRecordLogDb.go new file mode 100644 index 00000000000..bd4dee0281e --- /dev/null +++ b/go/mocks/IRecordLogDb.go @@ -0,0 +1,117 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + dbmodel "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" + mock "github.com/stretchr/testify/mock" + + types "github.com/chroma-core/chroma/go/pkg/types" +) + +// IRecordLogDb is an autogenerated mock type for the IRecordLogDb type +type IRecordLogDb struct { + mock.Mock +} + +// GetAllCollectionsToCompact provides a mock function with given fields: +func (_m *IRecordLogDb) GetAllCollectionsToCompact() ([]*dbmodel.RecordLog, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetAllCollectionsToCompact") + } + + var r0 []*dbmodel.RecordLog + var r1 error + if rf, ok := ret.Get(0).(func() ([]*dbmodel.RecordLog, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() []*dbmodel.RecordLog); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*dbmodel.RecordLog) + } + } + + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PullLogs provides a mock function with given fields: collectionID, id, batchSize, endTimestamp +func (_m *IRecordLogDb) PullLogs(collectionID types.UniqueID, id int64, batchSize int, endTimestamp int64) ([]*dbmodel.RecordLog, error) { + ret := _m.Called(collectionID, id, batchSize, endTimestamp) + + if len(ret) == 0 { + panic("no return value specified for PullLogs") + } + + var r0 []*dbmodel.RecordLog + var r1 error + if rf, ok := ret.Get(0).(func(types.UniqueID, int64, int, int64) ([]*dbmodel.RecordLog, error)); ok { + return rf(collectionID, id, batchSize, endTimestamp) + } + if rf, ok := ret.Get(0).(func(types.UniqueID, int64, int, int64) []*dbmodel.RecordLog); ok { + r0 = rf(collectionID, id, batchSize, endTimestamp) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*dbmodel.RecordLog) + } + } + + if rf, ok := ret.Get(1).(func(types.UniqueID, int64, int, int64) error); ok { + r1 = rf(collectionID, id, batchSize, endTimestamp) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PushLogs provides a mock function with given fields: collectionID, recordsContent +func (_m *IRecordLogDb) PushLogs(collectionID types.UniqueID, recordsContent [][]byte) (int, error) { + ret := _m.Called(collectionID, recordsContent) + + if len(ret) == 0 { + panic("no return value specified for PushLogs") + } + + var r0 int + var r1 error + if rf, ok := ret.Get(0).(func(types.UniqueID, [][]byte) (int, error)); ok { + return rf(collectionID, recordsContent) + } + if rf, ok := ret.Get(0).(func(types.UniqueID, [][]byte) int); ok { + r0 = rf(collectionID, recordsContent) + } else { + r0 = ret.Get(0).(int) + } + + if rf, ok := ret.Get(1).(func(types.UniqueID, [][]byte) error); ok { + r1 = rf(collectionID, recordsContent) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewIRecordLogDb creates a new instance of IRecordLogDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewIRecordLogDb(t interface { + mock.TestingT + Cleanup(func()) +}) *IRecordLogDb { + mock := &IRecordLogDb{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/ISegmentDb.go b/go/mocks/ISegmentDb.go new file mode 100644 index 00000000000..06d1f104090 --- /dev/null +++ b/go/mocks/ISegmentDb.go @@ -0,0 +1,151 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + dbmodel "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" + mock "github.com/stretchr/testify/mock" + + model "github.com/chroma-core/chroma/go/pkg/model" + + types "github.com/chroma-core/chroma/go/pkg/types" +) + +// ISegmentDb is an autogenerated mock type for the ISegmentDb type +type ISegmentDb struct { + mock.Mock +} + +// DeleteAll provides a mock function with given fields: +func (_m *ISegmentDb) DeleteAll() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for DeleteAll") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DeleteSegmentByID provides a mock function with given fields: id +func (_m *ISegmentDb) DeleteSegmentByID(id string) error { + ret := _m.Called(id) + + if len(ret) == 0 { + panic("no return value specified for DeleteSegmentByID") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string) error); ok { + r0 = rf(id) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// GetSegments provides a mock function with given fields: id, segmentType, scope, collectionID +func (_m *ISegmentDb) GetSegments(id types.UniqueID, segmentType *string, scope *string, collectionID types.UniqueID) ([]*dbmodel.SegmentAndMetadata, error) { + ret := _m.Called(id, segmentType, scope, collectionID) + + if len(ret) == 0 { + panic("no return value specified for GetSegments") + } + + var r0 []*dbmodel.SegmentAndMetadata + var r1 error + if rf, ok := ret.Get(0).(func(types.UniqueID, *string, *string, types.UniqueID) ([]*dbmodel.SegmentAndMetadata, error)); ok { + return rf(id, segmentType, scope, collectionID) + } + if rf, ok := ret.Get(0).(func(types.UniqueID, *string, *string, types.UniqueID) []*dbmodel.SegmentAndMetadata); ok { + r0 = rf(id, segmentType, scope, collectionID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*dbmodel.SegmentAndMetadata) + } + } + + if rf, ok := ret.Get(1).(func(types.UniqueID, *string, *string, types.UniqueID) error); ok { + r1 = rf(id, segmentType, scope, collectionID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Insert provides a mock function with given fields: _a0 +func (_m *ISegmentDb) Insert(_a0 *dbmodel.Segment) error { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for Insert") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*dbmodel.Segment) error); ok { + r0 = rf(_a0) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// RegisterFilePaths provides a mock function with given fields: flushSegmentCompactions +func (_m *ISegmentDb) RegisterFilePaths(flushSegmentCompactions []*model.FlushSegmentCompaction) error { + ret := _m.Called(flushSegmentCompactions) + + if len(ret) == 0 { + panic("no return value specified for RegisterFilePaths") + } + + var r0 error + if rf, ok := ret.Get(0).(func([]*model.FlushSegmentCompaction) error); ok { + r0 = rf(flushSegmentCompactions) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Update provides a mock function with given fields: _a0 +func (_m *ISegmentDb) Update(_a0 *dbmodel.UpdateSegment) error { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for Update") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*dbmodel.UpdateSegment) error); ok { + r0 = rf(_a0) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewISegmentDb creates a new instance of ISegmentDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewISegmentDb(t interface { + mock.TestingT + Cleanup(func()) +}) *ISegmentDb { + mock := &ISegmentDb{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/ISegmentMetadataDb.go b/go/mocks/ISegmentMetadataDb.go new file mode 100644 index 00000000000..e65aa5ab3cc --- /dev/null +++ b/go/mocks/ISegmentMetadataDb.go @@ -0,0 +1,99 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + dbmodel "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" + mock "github.com/stretchr/testify/mock" +) + +// ISegmentMetadataDb is an autogenerated mock type for the ISegmentMetadataDb type +type ISegmentMetadataDb struct { + mock.Mock +} + +// DeleteAll provides a mock function with given fields: +func (_m *ISegmentMetadataDb) DeleteAll() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for DeleteAll") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DeleteBySegmentID provides a mock function with given fields: segmentID +func (_m *ISegmentMetadataDb) DeleteBySegmentID(segmentID string) error { + ret := _m.Called(segmentID) + + if len(ret) == 0 { + panic("no return value specified for DeleteBySegmentID") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string) error); ok { + r0 = rf(segmentID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// DeleteBySegmentIDAndKeys provides a mock function with given fields: segmentID, keys +func (_m *ISegmentMetadataDb) DeleteBySegmentIDAndKeys(segmentID string, keys []string) error { + ret := _m.Called(segmentID, keys) + + if len(ret) == 0 { + panic("no return value specified for DeleteBySegmentIDAndKeys") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, []string) error); ok { + r0 = rf(segmentID, keys) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Insert provides a mock function with given fields: in +func (_m *ISegmentMetadataDb) Insert(in []*dbmodel.SegmentMetadata) error { + ret := _m.Called(in) + + if len(ret) == 0 { + panic("no return value specified for Insert") + } + + var r0 error + if rf, ok := ret.Get(0).(func([]*dbmodel.SegmentMetadata) error); ok { + r0 = rf(in) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewISegmentMetadataDb creates a new instance of ISegmentMetadataDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewISegmentMetadataDb(t interface { + mock.TestingT + Cleanup(func()) +}) *ISegmentMetadataDb { + mock := &ISegmentMetadataDb{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/ITenantDb.go b/go/mocks/ITenantDb.go new file mode 100644 index 00000000000..ffc9c9bb7df --- /dev/null +++ b/go/mocks/ITenantDb.go @@ -0,0 +1,171 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + dbmodel "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" + mock "github.com/stretchr/testify/mock" +) + +// ITenantDb is an autogenerated mock type for the ITenantDb type +type ITenantDb struct { + mock.Mock +} + +// DeleteAll provides a mock function with given fields: +func (_m *ITenantDb) DeleteAll() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for DeleteAll") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// GetAllTenants provides a mock function with given fields: +func (_m *ITenantDb) GetAllTenants() ([]*dbmodel.Tenant, error) { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for GetAllTenants") + } + + var r0 []*dbmodel.Tenant + var r1 error + if rf, ok := ret.Get(0).(func() ([]*dbmodel.Tenant, error)); ok { + return rf() + } + if rf, ok := ret.Get(0).(func() []*dbmodel.Tenant); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*dbmodel.Tenant) + } + } + + if rf, ok := ret.Get(1).(func() error); ok { + r1 = rf() + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetTenants provides a mock function with given fields: tenantID +func (_m *ITenantDb) GetTenants(tenantID string) ([]*dbmodel.Tenant, error) { + ret := _m.Called(tenantID) + + if len(ret) == 0 { + panic("no return value specified for GetTenants") + } + + var r0 []*dbmodel.Tenant + var r1 error + if rf, ok := ret.Get(0).(func(string) ([]*dbmodel.Tenant, error)); ok { + return rf(tenantID) + } + if rf, ok := ret.Get(0).(func(string) []*dbmodel.Tenant); ok { + r0 = rf(tenantID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*dbmodel.Tenant) + } + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(tenantID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetTenantsLastCompactionTime provides a mock function with given fields: tenantIDs +func (_m *ITenantDb) GetTenantsLastCompactionTime(tenantIDs []string) ([]*dbmodel.Tenant, error) { + ret := _m.Called(tenantIDs) + + if len(ret) == 0 { + panic("no return value specified for GetTenantsLastCompactionTime") + } + + var r0 []*dbmodel.Tenant + var r1 error + if rf, ok := ret.Get(0).(func([]string) ([]*dbmodel.Tenant, error)); ok { + return rf(tenantIDs) + } + if rf, ok := ret.Get(0).(func([]string) []*dbmodel.Tenant); ok { + r0 = rf(tenantIDs) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*dbmodel.Tenant) + } + } + + if rf, ok := ret.Get(1).(func([]string) error); ok { + r1 = rf(tenantIDs) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Insert provides a mock function with given fields: in +func (_m *ITenantDb) Insert(in *dbmodel.Tenant) error { + ret := _m.Called(in) + + if len(ret) == 0 { + panic("no return value specified for Insert") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*dbmodel.Tenant) error); ok { + r0 = rf(in) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateTenantLastCompactionTime provides a mock function with given fields: tenantID, lastCompactionTime +func (_m *ITenantDb) UpdateTenantLastCompactionTime(tenantID string, lastCompactionTime int64) error { + ret := _m.Called(tenantID, lastCompactionTime) + + if len(ret) == 0 { + panic("no return value specified for UpdateTenantLastCompactionTime") + } + + var r0 error + if rf, ok := ret.Get(0).(func(string, int64) error); ok { + r0 = rf(tenantID, lastCompactionTime) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewITenantDb creates a new instance of ITenantDb. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewITenantDb(t interface { + mock.TestingT + Cleanup(func()) +}) *ITenantDb { + mock := &ITenantDb{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/ITransaction.go b/go/mocks/ITransaction.go new file mode 100644 index 00000000000..884e3129bbd --- /dev/null +++ b/go/mocks/ITransaction.go @@ -0,0 +1,46 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + mock "github.com/stretchr/testify/mock" +) + +// ITransaction is an autogenerated mock type for the ITransaction type +type ITransaction struct { + mock.Mock +} + +// Transaction provides a mock function with given fields: ctx, fn +func (_m *ITransaction) Transaction(ctx context.Context, fn func(context.Context) error) error { + ret := _m.Called(ctx, fn) + + if len(ret) == 0 { + panic("no return value specified for Transaction") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, func(context.Context) error) error); ok { + r0 = rf(ctx, fn) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewITransaction creates a new instance of ITransaction. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewITransaction(t interface { + mock.TestingT + Cleanup(func()) +}) *ITransaction { + mock := &ITransaction{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/IWatcher.go b/go/mocks/IWatcher.go new file mode 100644 index 00000000000..eba7bd2520e --- /dev/null +++ b/go/mocks/IWatcher.go @@ -0,0 +1,96 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + memberlist_manager "github.com/chroma-core/chroma/go/pkg/memberlist_manager" + mock "github.com/stretchr/testify/mock" +) + +// IWatcher is an autogenerated mock type for the IWatcher type +type IWatcher struct { + mock.Mock +} + +// GetStatus provides a mock function with given fields: node_ip +func (_m *IWatcher) GetStatus(node_ip string) (memberlist_manager.Status, error) { + ret := _m.Called(node_ip) + + if len(ret) == 0 { + panic("no return value specified for GetStatus") + } + + var r0 memberlist_manager.Status + var r1 error + if rf, ok := ret.Get(0).(func(string) (memberlist_manager.Status, error)); ok { + return rf(node_ip) + } + if rf, ok := ret.Get(0).(func(string) memberlist_manager.Status); ok { + r0 = rf(node_ip) + } else { + r0 = ret.Get(0).(memberlist_manager.Status) + } + + if rf, ok := ret.Get(1).(func(string) error); ok { + r1 = rf(node_ip) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RegisterCallback provides a mock function with given fields: callback +func (_m *IWatcher) RegisterCallback(callback memberlist_manager.NodeWatcherCallback) { + _m.Called(callback) +} + +// Start provides a mock function with given fields: +func (_m *IWatcher) Start() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Start") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Stop provides a mock function with given fields: +func (_m *IWatcher) Stop() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Stop") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewIWatcher creates a new instance of IWatcher. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewIWatcher(t interface { + mock.TestingT + Cleanup(func()) +}) *IWatcher { + mock := &IWatcher{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/LogServiceClient.go b/go/mocks/LogServiceClient.go new file mode 100644 index 00000000000..7f7a86cefa7 --- /dev/null +++ b/go/mocks/LogServiceClient.go @@ -0,0 +1,143 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + grpc "google.golang.org/grpc" + + logservicepb "github.com/chroma-core/chroma/go/pkg/proto/logservicepb" + + mock "github.com/stretchr/testify/mock" +) + +// LogServiceClient is an autogenerated mock type for the LogServiceClient type +type LogServiceClient struct { + mock.Mock +} + +// GetAllCollectionInfoToCompact provides a mock function with given fields: ctx, in, opts +func (_m *LogServiceClient) GetAllCollectionInfoToCompact(ctx context.Context, in *logservicepb.GetAllCollectionInfoToCompactRequest, opts ...grpc.CallOption) (*logservicepb.GetAllCollectionInfoToCompactResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for GetAllCollectionInfoToCompact") + } + + var r0 *logservicepb.GetAllCollectionInfoToCompactResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *logservicepb.GetAllCollectionInfoToCompactRequest, ...grpc.CallOption) (*logservicepb.GetAllCollectionInfoToCompactResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *logservicepb.GetAllCollectionInfoToCompactRequest, ...grpc.CallOption) *logservicepb.GetAllCollectionInfoToCompactResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*logservicepb.GetAllCollectionInfoToCompactResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *logservicepb.GetAllCollectionInfoToCompactRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PullLogs provides a mock function with given fields: ctx, in, opts +func (_m *LogServiceClient) PullLogs(ctx context.Context, in *logservicepb.PullLogsRequest, opts ...grpc.CallOption) (*logservicepb.PullLogsResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for PullLogs") + } + + var r0 *logservicepb.PullLogsResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *logservicepb.PullLogsRequest, ...grpc.CallOption) (*logservicepb.PullLogsResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *logservicepb.PullLogsRequest, ...grpc.CallOption) *logservicepb.PullLogsResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*logservicepb.PullLogsResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *logservicepb.PullLogsRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PushLogs provides a mock function with given fields: ctx, in, opts +func (_m *LogServiceClient) PushLogs(ctx context.Context, in *logservicepb.PushLogsRequest, opts ...grpc.CallOption) (*logservicepb.PushLogsResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for PushLogs") + } + + var r0 *logservicepb.PushLogsResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *logservicepb.PushLogsRequest, ...grpc.CallOption) (*logservicepb.PushLogsResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *logservicepb.PushLogsRequest, ...grpc.CallOption) *logservicepb.PushLogsResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*logservicepb.PushLogsResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *logservicepb.PushLogsRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewLogServiceClient creates a new instance of LogServiceClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewLogServiceClient(t interface { + mock.TestingT + Cleanup(func()) +}) *LogServiceClient { + mock := &LogServiceClient{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/LogServiceServer.go b/go/mocks/LogServiceServer.go new file mode 100644 index 00000000000..0cc7414f086 --- /dev/null +++ b/go/mocks/LogServiceServer.go @@ -0,0 +1,124 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + logservicepb "github.com/chroma-core/chroma/go/pkg/proto/logservicepb" + mock "github.com/stretchr/testify/mock" +) + +// LogServiceServer is an autogenerated mock type for the LogServiceServer type +type LogServiceServer struct { + mock.Mock +} + +// GetAllCollectionInfoToCompact provides a mock function with given fields: _a0, _a1 +func (_m *LogServiceServer) GetAllCollectionInfoToCompact(_a0 context.Context, _a1 *logservicepb.GetAllCollectionInfoToCompactRequest) (*logservicepb.GetAllCollectionInfoToCompactResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for GetAllCollectionInfoToCompact") + } + + var r0 *logservicepb.GetAllCollectionInfoToCompactResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *logservicepb.GetAllCollectionInfoToCompactRequest) (*logservicepb.GetAllCollectionInfoToCompactResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *logservicepb.GetAllCollectionInfoToCompactRequest) *logservicepb.GetAllCollectionInfoToCompactResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*logservicepb.GetAllCollectionInfoToCompactResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *logservicepb.GetAllCollectionInfoToCompactRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PullLogs provides a mock function with given fields: _a0, _a1 +func (_m *LogServiceServer) PullLogs(_a0 context.Context, _a1 *logservicepb.PullLogsRequest) (*logservicepb.PullLogsResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for PullLogs") + } + + var r0 *logservicepb.PullLogsResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *logservicepb.PullLogsRequest) (*logservicepb.PullLogsResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *logservicepb.PullLogsRequest) *logservicepb.PullLogsResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*logservicepb.PullLogsResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *logservicepb.PullLogsRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// PushLogs provides a mock function with given fields: _a0, _a1 +func (_m *LogServiceServer) PushLogs(_a0 context.Context, _a1 *logservicepb.PushLogsRequest) (*logservicepb.PushLogsResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for PushLogs") + } + + var r0 *logservicepb.PushLogsResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *logservicepb.PushLogsRequest) (*logservicepb.PushLogsResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *logservicepb.PushLogsRequest) *logservicepb.PushLogsResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*logservicepb.PushLogsResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *logservicepb.PushLogsRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// mustEmbedUnimplementedLogServiceServer provides a mock function with given fields: +func (_m *LogServiceServer) mustEmbedUnimplementedLogServiceServer() { + _m.Called() +} + +// NewLogServiceServer creates a new instance of LogServiceServer. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewLogServiceServer(t interface { + mock.TestingT + Cleanup(func()) +}) *LogServiceServer { + mock := &LogServiceServer{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/NodeWatcherCallback.go b/go/mocks/NodeWatcherCallback.go new file mode 100644 index 00000000000..83af860bfc7 --- /dev/null +++ b/go/mocks/NodeWatcherCallback.go @@ -0,0 +1,29 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import mock "github.com/stretchr/testify/mock" + +// NodeWatcherCallback is an autogenerated mock type for the NodeWatcherCallback type +type NodeWatcherCallback struct { + mock.Mock +} + +// Execute provides a mock function with given fields: node_ip +func (_m *NodeWatcherCallback) Execute(node_ip string) { + _m.Called(node_ip) +} + +// NewNodeWatcherCallback creates a new instance of NodeWatcherCallback. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewNodeWatcherCallback(t interface { + mock.TestingT + Cleanup(func()) +}) *NodeWatcherCallback { + mock := &NodeWatcherCallback{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/NotificationProcessor.go b/go/mocks/NotificationProcessor.go new file mode 100644 index 00000000000..381e5b748c8 --- /dev/null +++ b/go/mocks/NotificationProcessor.go @@ -0,0 +1,88 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + notification "github.com/chroma-core/chroma/go/pkg/notification" + mock "github.com/stretchr/testify/mock" +) + +// NotificationProcessor is an autogenerated mock type for the NotificationProcessor type +type NotificationProcessor struct { + mock.Mock +} + +// Process provides a mock function with given fields: ctx +func (_m *NotificationProcessor) Process(ctx context.Context) error { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for Process") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context) error); ok { + r0 = rf(ctx) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Start provides a mock function with given fields: +func (_m *NotificationProcessor) Start() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Start") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Stop provides a mock function with given fields: +func (_m *NotificationProcessor) Stop() error { + ret := _m.Called() + + if len(ret) == 0 { + panic("no return value specified for Stop") + } + + var r0 error + if rf, ok := ret.Get(0).(func() error); ok { + r0 = rf() + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Trigger provides a mock function with given fields: ctx, triggerMsg +func (_m *NotificationProcessor) Trigger(ctx context.Context, triggerMsg notification.TriggerMessage) { + _m.Called(ctx, triggerMsg) +} + +// NewNotificationProcessor creates a new instance of NotificationProcessor. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewNotificationProcessor(t interface { + mock.TestingT + Cleanup(func()) +}) *NotificationProcessor { + mock := &NotificationProcessor{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/NotificationStore.go b/go/mocks/NotificationStore.go new file mode 100644 index 00000000000..9a744edc8a6 --- /dev/null +++ b/go/mocks/NotificationStore.go @@ -0,0 +1,125 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + model "github.com/chroma-core/chroma/go/pkg/model" + mock "github.com/stretchr/testify/mock" +) + +// NotificationStore is an autogenerated mock type for the NotificationStore type +type NotificationStore struct { + mock.Mock +} + +// AddNotification provides a mock function with given fields: ctx, _a1 +func (_m *NotificationStore) AddNotification(ctx context.Context, _a1 model.Notification) error { + ret := _m.Called(ctx, _a1) + + if len(ret) == 0 { + panic("no return value specified for AddNotification") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, model.Notification) error); ok { + r0 = rf(ctx, _a1) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// GetAllPendingNotifications provides a mock function with given fields: ctx +func (_m *NotificationStore) GetAllPendingNotifications(ctx context.Context) (map[string][]model.Notification, error) { + ret := _m.Called(ctx) + + if len(ret) == 0 { + panic("no return value specified for GetAllPendingNotifications") + } + + var r0 map[string][]model.Notification + var r1 error + if rf, ok := ret.Get(0).(func(context.Context) (map[string][]model.Notification, error)); ok { + return rf(ctx) + } + if rf, ok := ret.Get(0).(func(context.Context) map[string][]model.Notification); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string][]model.Notification) + } + } + + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetNotifications provides a mock function with given fields: ctx, collecitonID +func (_m *NotificationStore) GetNotifications(ctx context.Context, collecitonID string) ([]model.Notification, error) { + ret := _m.Called(ctx, collecitonID) + + if len(ret) == 0 { + panic("no return value specified for GetNotifications") + } + + var r0 []model.Notification + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string) ([]model.Notification, error)); ok { + return rf(ctx, collecitonID) + } + if rf, ok := ret.Get(0).(func(context.Context, string) []model.Notification); ok { + r0 = rf(ctx, collecitonID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]model.Notification) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, collecitonID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// RemoveNotifications provides a mock function with given fields: ctx, notifications +func (_m *NotificationStore) RemoveNotifications(ctx context.Context, notifications []model.Notification) error { + ret := _m.Called(ctx, notifications) + + if len(ret) == 0 { + panic("no return value specified for RemoveNotifications") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, []model.Notification) error); ok { + r0 = rf(ctx, notifications) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewNotificationStore creates a new instance of NotificationStore. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewNotificationStore(t interface { + mock.TestingT + Cleanup(func()) +}) *NotificationStore { + mock := &NotificationStore{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/Notifier.go b/go/mocks/Notifier.go new file mode 100644 index 00000000000..2d2be648291 --- /dev/null +++ b/go/mocks/Notifier.go @@ -0,0 +1,47 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + model "github.com/chroma-core/chroma/go/pkg/model" + mock "github.com/stretchr/testify/mock" +) + +// Notifier is an autogenerated mock type for the Notifier type +type Notifier struct { + mock.Mock +} + +// Notify provides a mock function with given fields: ctx, notifications +func (_m *Notifier) Notify(ctx context.Context, notifications []model.Notification) error { + ret := _m.Called(ctx, notifications) + + if len(ret) == 0 { + panic("no return value specified for Notify") + } + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, []model.Notification) error); ok { + r0 = rf(ctx, notifications) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// NewNotifier creates a new instance of Notifier. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewNotifier(t interface { + mock.TestingT + Cleanup(func()) +}) *Notifier { + mock := &Notifier{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/SegmentMetadataValueType.go b/go/mocks/SegmentMetadataValueType.go new file mode 100644 index 00000000000..f742156d07d --- /dev/null +++ b/go/mocks/SegmentMetadataValueType.go @@ -0,0 +1,29 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import mock "github.com/stretchr/testify/mock" + +// SegmentMetadataValueType is an autogenerated mock type for the SegmentMetadataValueType type +type SegmentMetadataValueType struct { + mock.Mock +} + +// IsSegmentMetadataValueType provides a mock function with given fields: +func (_m *SegmentMetadataValueType) IsSegmentMetadataValueType() { + _m.Called() +} + +// NewSegmentMetadataValueType creates a new instance of SegmentMetadataValueType. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewSegmentMetadataValueType(t interface { + mock.TestingT + Cleanup(func()) +}) *SegmentMetadataValueType { + mock := &SegmentMetadataValueType{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/SysDBClient.go b/go/mocks/SysDBClient.go new file mode 100644 index 00000000000..c2eeb3bc44e --- /dev/null +++ b/go/mocks/SysDBClient.go @@ -0,0 +1,625 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + coordinatorpb "github.com/chroma-core/chroma/go/pkg/proto/coordinatorpb" + emptypb "google.golang.org/protobuf/types/known/emptypb" + + grpc "google.golang.org/grpc" + + mock "github.com/stretchr/testify/mock" +) + +// SysDBClient is an autogenerated mock type for the SysDBClient type +type SysDBClient struct { + mock.Mock +} + +// CreateCollection provides a mock function with given fields: ctx, in, opts +func (_m *SysDBClient) CreateCollection(ctx context.Context, in *coordinatorpb.CreateCollectionRequest, opts ...grpc.CallOption) (*coordinatorpb.CreateCollectionResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for CreateCollection") + } + + var r0 *coordinatorpb.CreateCollectionResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.CreateCollectionRequest, ...grpc.CallOption) (*coordinatorpb.CreateCollectionResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.CreateCollectionRequest, ...grpc.CallOption) *coordinatorpb.CreateCollectionResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.CreateCollectionResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.CreateCollectionRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateDatabase provides a mock function with given fields: ctx, in, opts +func (_m *SysDBClient) CreateDatabase(ctx context.Context, in *coordinatorpb.CreateDatabaseRequest, opts ...grpc.CallOption) (*coordinatorpb.CreateDatabaseResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for CreateDatabase") + } + + var r0 *coordinatorpb.CreateDatabaseResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.CreateDatabaseRequest, ...grpc.CallOption) (*coordinatorpb.CreateDatabaseResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.CreateDatabaseRequest, ...grpc.CallOption) *coordinatorpb.CreateDatabaseResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.CreateDatabaseResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.CreateDatabaseRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateSegment provides a mock function with given fields: ctx, in, opts +func (_m *SysDBClient) CreateSegment(ctx context.Context, in *coordinatorpb.CreateSegmentRequest, opts ...grpc.CallOption) (*coordinatorpb.CreateSegmentResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for CreateSegment") + } + + var r0 *coordinatorpb.CreateSegmentResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.CreateSegmentRequest, ...grpc.CallOption) (*coordinatorpb.CreateSegmentResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.CreateSegmentRequest, ...grpc.CallOption) *coordinatorpb.CreateSegmentResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.CreateSegmentResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.CreateSegmentRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateTenant provides a mock function with given fields: ctx, in, opts +func (_m *SysDBClient) CreateTenant(ctx context.Context, in *coordinatorpb.CreateTenantRequest, opts ...grpc.CallOption) (*coordinatorpb.CreateTenantResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for CreateTenant") + } + + var r0 *coordinatorpb.CreateTenantResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.CreateTenantRequest, ...grpc.CallOption) (*coordinatorpb.CreateTenantResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.CreateTenantRequest, ...grpc.CallOption) *coordinatorpb.CreateTenantResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.CreateTenantResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.CreateTenantRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteCollection provides a mock function with given fields: ctx, in, opts +func (_m *SysDBClient) DeleteCollection(ctx context.Context, in *coordinatorpb.DeleteCollectionRequest, opts ...grpc.CallOption) (*coordinatorpb.DeleteCollectionResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for DeleteCollection") + } + + var r0 *coordinatorpb.DeleteCollectionResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.DeleteCollectionRequest, ...grpc.CallOption) (*coordinatorpb.DeleteCollectionResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.DeleteCollectionRequest, ...grpc.CallOption) *coordinatorpb.DeleteCollectionResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.DeleteCollectionResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.DeleteCollectionRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteSegment provides a mock function with given fields: ctx, in, opts +func (_m *SysDBClient) DeleteSegment(ctx context.Context, in *coordinatorpb.DeleteSegmentRequest, opts ...grpc.CallOption) (*coordinatorpb.DeleteSegmentResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for DeleteSegment") + } + + var r0 *coordinatorpb.DeleteSegmentResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.DeleteSegmentRequest, ...grpc.CallOption) (*coordinatorpb.DeleteSegmentResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.DeleteSegmentRequest, ...grpc.CallOption) *coordinatorpb.DeleteSegmentResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.DeleteSegmentResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.DeleteSegmentRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FlushCollectionCompaction provides a mock function with given fields: ctx, in, opts +func (_m *SysDBClient) FlushCollectionCompaction(ctx context.Context, in *coordinatorpb.FlushCollectionCompactionRequest, opts ...grpc.CallOption) (*coordinatorpb.FlushCollectionCompactionResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for FlushCollectionCompaction") + } + + var r0 *coordinatorpb.FlushCollectionCompactionResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.FlushCollectionCompactionRequest, ...grpc.CallOption) (*coordinatorpb.FlushCollectionCompactionResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.FlushCollectionCompactionRequest, ...grpc.CallOption) *coordinatorpb.FlushCollectionCompactionResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.FlushCollectionCompactionResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.FlushCollectionCompactionRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCollections provides a mock function with given fields: ctx, in, opts +func (_m *SysDBClient) GetCollections(ctx context.Context, in *coordinatorpb.GetCollectionsRequest, opts ...grpc.CallOption) (*coordinatorpb.GetCollectionsResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for GetCollections") + } + + var r0 *coordinatorpb.GetCollectionsResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetCollectionsRequest, ...grpc.CallOption) (*coordinatorpb.GetCollectionsResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetCollectionsRequest, ...grpc.CallOption) *coordinatorpb.GetCollectionsResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.GetCollectionsResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.GetCollectionsRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetDatabase provides a mock function with given fields: ctx, in, opts +func (_m *SysDBClient) GetDatabase(ctx context.Context, in *coordinatorpb.GetDatabaseRequest, opts ...grpc.CallOption) (*coordinatorpb.GetDatabaseResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for GetDatabase") + } + + var r0 *coordinatorpb.GetDatabaseResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetDatabaseRequest, ...grpc.CallOption) (*coordinatorpb.GetDatabaseResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetDatabaseRequest, ...grpc.CallOption) *coordinatorpb.GetDatabaseResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.GetDatabaseResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.GetDatabaseRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetLastCompactionTimeForTenant provides a mock function with given fields: ctx, in, opts +func (_m *SysDBClient) GetLastCompactionTimeForTenant(ctx context.Context, in *coordinatorpb.GetLastCompactionTimeForTenantRequest, opts ...grpc.CallOption) (*coordinatorpb.GetLastCompactionTimeForTenantResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for GetLastCompactionTimeForTenant") + } + + var r0 *coordinatorpb.GetLastCompactionTimeForTenantResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetLastCompactionTimeForTenantRequest, ...grpc.CallOption) (*coordinatorpb.GetLastCompactionTimeForTenantResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetLastCompactionTimeForTenantRequest, ...grpc.CallOption) *coordinatorpb.GetLastCompactionTimeForTenantResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.GetLastCompactionTimeForTenantResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.GetLastCompactionTimeForTenantRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetSegments provides a mock function with given fields: ctx, in, opts +func (_m *SysDBClient) GetSegments(ctx context.Context, in *coordinatorpb.GetSegmentsRequest, opts ...grpc.CallOption) (*coordinatorpb.GetSegmentsResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for GetSegments") + } + + var r0 *coordinatorpb.GetSegmentsResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetSegmentsRequest, ...grpc.CallOption) (*coordinatorpb.GetSegmentsResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetSegmentsRequest, ...grpc.CallOption) *coordinatorpb.GetSegmentsResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.GetSegmentsResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.GetSegmentsRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetTenant provides a mock function with given fields: ctx, in, opts +func (_m *SysDBClient) GetTenant(ctx context.Context, in *coordinatorpb.GetTenantRequest, opts ...grpc.CallOption) (*coordinatorpb.GetTenantResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for GetTenant") + } + + var r0 *coordinatorpb.GetTenantResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetTenantRequest, ...grpc.CallOption) (*coordinatorpb.GetTenantResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetTenantRequest, ...grpc.CallOption) *coordinatorpb.GetTenantResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.GetTenantResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.GetTenantRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ResetState provides a mock function with given fields: ctx, in, opts +func (_m *SysDBClient) ResetState(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*coordinatorpb.ResetStateResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for ResetState") + } + + var r0 *coordinatorpb.ResetStateResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *emptypb.Empty, ...grpc.CallOption) (*coordinatorpb.ResetStateResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *emptypb.Empty, ...grpc.CallOption) *coordinatorpb.ResetStateResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.ResetStateResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *emptypb.Empty, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// SetLastCompactionTimeForTenant provides a mock function with given fields: ctx, in, opts +func (_m *SysDBClient) SetLastCompactionTimeForTenant(ctx context.Context, in *coordinatorpb.SetLastCompactionTimeForTenantRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for SetLastCompactionTimeForTenant") + } + + var r0 *emptypb.Empty + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.SetLastCompactionTimeForTenantRequest, ...grpc.CallOption) (*emptypb.Empty, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.SetLastCompactionTimeForTenantRequest, ...grpc.CallOption) *emptypb.Empty); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*emptypb.Empty) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.SetLastCompactionTimeForTenantRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateCollection provides a mock function with given fields: ctx, in, opts +func (_m *SysDBClient) UpdateCollection(ctx context.Context, in *coordinatorpb.UpdateCollectionRequest, opts ...grpc.CallOption) (*coordinatorpb.UpdateCollectionResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for UpdateCollection") + } + + var r0 *coordinatorpb.UpdateCollectionResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.UpdateCollectionRequest, ...grpc.CallOption) (*coordinatorpb.UpdateCollectionResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.UpdateCollectionRequest, ...grpc.CallOption) *coordinatorpb.UpdateCollectionResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.UpdateCollectionResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.UpdateCollectionRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateSegment provides a mock function with given fields: ctx, in, opts +func (_m *SysDBClient) UpdateSegment(ctx context.Context, in *coordinatorpb.UpdateSegmentRequest, opts ...grpc.CallOption) (*coordinatorpb.UpdateSegmentResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for UpdateSegment") + } + + var r0 *coordinatorpb.UpdateSegmentResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.UpdateSegmentRequest, ...grpc.CallOption) (*coordinatorpb.UpdateSegmentResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.UpdateSegmentRequest, ...grpc.CallOption) *coordinatorpb.UpdateSegmentResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.UpdateSegmentResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.UpdateSegmentRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewSysDBClient creates a new instance of SysDBClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewSysDBClient(t interface { + mock.TestingT + Cleanup(func()) +}) *SysDBClient { + mock := &SysDBClient{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/SysDBServer.go b/go/mocks/SysDBServer.go new file mode 100644 index 00000000000..45b53925cdf --- /dev/null +++ b/go/mocks/SysDBServer.go @@ -0,0 +1,516 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + coordinatorpb "github.com/chroma-core/chroma/go/pkg/proto/coordinatorpb" + emptypb "google.golang.org/protobuf/types/known/emptypb" + + mock "github.com/stretchr/testify/mock" +) + +// SysDBServer is an autogenerated mock type for the SysDBServer type +type SysDBServer struct { + mock.Mock +} + +// CreateCollection provides a mock function with given fields: _a0, _a1 +func (_m *SysDBServer) CreateCollection(_a0 context.Context, _a1 *coordinatorpb.CreateCollectionRequest) (*coordinatorpb.CreateCollectionResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for CreateCollection") + } + + var r0 *coordinatorpb.CreateCollectionResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.CreateCollectionRequest) (*coordinatorpb.CreateCollectionResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.CreateCollectionRequest) *coordinatorpb.CreateCollectionResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.CreateCollectionResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.CreateCollectionRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateDatabase provides a mock function with given fields: _a0, _a1 +func (_m *SysDBServer) CreateDatabase(_a0 context.Context, _a1 *coordinatorpb.CreateDatabaseRequest) (*coordinatorpb.CreateDatabaseResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for CreateDatabase") + } + + var r0 *coordinatorpb.CreateDatabaseResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.CreateDatabaseRequest) (*coordinatorpb.CreateDatabaseResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.CreateDatabaseRequest) *coordinatorpb.CreateDatabaseResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.CreateDatabaseResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.CreateDatabaseRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateSegment provides a mock function with given fields: _a0, _a1 +func (_m *SysDBServer) CreateSegment(_a0 context.Context, _a1 *coordinatorpb.CreateSegmentRequest) (*coordinatorpb.CreateSegmentResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for CreateSegment") + } + + var r0 *coordinatorpb.CreateSegmentResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.CreateSegmentRequest) (*coordinatorpb.CreateSegmentResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.CreateSegmentRequest) *coordinatorpb.CreateSegmentResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.CreateSegmentResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.CreateSegmentRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CreateTenant provides a mock function with given fields: _a0, _a1 +func (_m *SysDBServer) CreateTenant(_a0 context.Context, _a1 *coordinatorpb.CreateTenantRequest) (*coordinatorpb.CreateTenantResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for CreateTenant") + } + + var r0 *coordinatorpb.CreateTenantResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.CreateTenantRequest) (*coordinatorpb.CreateTenantResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.CreateTenantRequest) *coordinatorpb.CreateTenantResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.CreateTenantResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.CreateTenantRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteCollection provides a mock function with given fields: _a0, _a1 +func (_m *SysDBServer) DeleteCollection(_a0 context.Context, _a1 *coordinatorpb.DeleteCollectionRequest) (*coordinatorpb.DeleteCollectionResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for DeleteCollection") + } + + var r0 *coordinatorpb.DeleteCollectionResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.DeleteCollectionRequest) (*coordinatorpb.DeleteCollectionResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.DeleteCollectionRequest) *coordinatorpb.DeleteCollectionResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.DeleteCollectionResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.DeleteCollectionRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteSegment provides a mock function with given fields: _a0, _a1 +func (_m *SysDBServer) DeleteSegment(_a0 context.Context, _a1 *coordinatorpb.DeleteSegmentRequest) (*coordinatorpb.DeleteSegmentResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for DeleteSegment") + } + + var r0 *coordinatorpb.DeleteSegmentResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.DeleteSegmentRequest) (*coordinatorpb.DeleteSegmentResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.DeleteSegmentRequest) *coordinatorpb.DeleteSegmentResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.DeleteSegmentResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.DeleteSegmentRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FlushCollectionCompaction provides a mock function with given fields: _a0, _a1 +func (_m *SysDBServer) FlushCollectionCompaction(_a0 context.Context, _a1 *coordinatorpb.FlushCollectionCompactionRequest) (*coordinatorpb.FlushCollectionCompactionResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for FlushCollectionCompaction") + } + + var r0 *coordinatorpb.FlushCollectionCompactionResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.FlushCollectionCompactionRequest) (*coordinatorpb.FlushCollectionCompactionResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.FlushCollectionCompactionRequest) *coordinatorpb.FlushCollectionCompactionResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.FlushCollectionCompactionResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.FlushCollectionCompactionRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCollections provides a mock function with given fields: _a0, _a1 +func (_m *SysDBServer) GetCollections(_a0 context.Context, _a1 *coordinatorpb.GetCollectionsRequest) (*coordinatorpb.GetCollectionsResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for GetCollections") + } + + var r0 *coordinatorpb.GetCollectionsResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetCollectionsRequest) (*coordinatorpb.GetCollectionsResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetCollectionsRequest) *coordinatorpb.GetCollectionsResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.GetCollectionsResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.GetCollectionsRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetDatabase provides a mock function with given fields: _a0, _a1 +func (_m *SysDBServer) GetDatabase(_a0 context.Context, _a1 *coordinatorpb.GetDatabaseRequest) (*coordinatorpb.GetDatabaseResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for GetDatabase") + } + + var r0 *coordinatorpb.GetDatabaseResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetDatabaseRequest) (*coordinatorpb.GetDatabaseResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetDatabaseRequest) *coordinatorpb.GetDatabaseResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.GetDatabaseResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.GetDatabaseRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetLastCompactionTimeForTenant provides a mock function with given fields: _a0, _a1 +func (_m *SysDBServer) GetLastCompactionTimeForTenant(_a0 context.Context, _a1 *coordinatorpb.GetLastCompactionTimeForTenantRequest) (*coordinatorpb.GetLastCompactionTimeForTenantResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for GetLastCompactionTimeForTenant") + } + + var r0 *coordinatorpb.GetLastCompactionTimeForTenantResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetLastCompactionTimeForTenantRequest) (*coordinatorpb.GetLastCompactionTimeForTenantResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetLastCompactionTimeForTenantRequest) *coordinatorpb.GetLastCompactionTimeForTenantResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.GetLastCompactionTimeForTenantResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.GetLastCompactionTimeForTenantRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetSegments provides a mock function with given fields: _a0, _a1 +func (_m *SysDBServer) GetSegments(_a0 context.Context, _a1 *coordinatorpb.GetSegmentsRequest) (*coordinatorpb.GetSegmentsResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for GetSegments") + } + + var r0 *coordinatorpb.GetSegmentsResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetSegmentsRequest) (*coordinatorpb.GetSegmentsResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetSegmentsRequest) *coordinatorpb.GetSegmentsResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.GetSegmentsResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.GetSegmentsRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetTenant provides a mock function with given fields: _a0, _a1 +func (_m *SysDBServer) GetTenant(_a0 context.Context, _a1 *coordinatorpb.GetTenantRequest) (*coordinatorpb.GetTenantResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for GetTenant") + } + + var r0 *coordinatorpb.GetTenantResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetTenantRequest) (*coordinatorpb.GetTenantResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetTenantRequest) *coordinatorpb.GetTenantResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.GetTenantResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.GetTenantRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ResetState provides a mock function with given fields: _a0, _a1 +func (_m *SysDBServer) ResetState(_a0 context.Context, _a1 *emptypb.Empty) (*coordinatorpb.ResetStateResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for ResetState") + } + + var r0 *coordinatorpb.ResetStateResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *emptypb.Empty) (*coordinatorpb.ResetStateResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *emptypb.Empty) *coordinatorpb.ResetStateResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.ResetStateResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *emptypb.Empty) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// SetLastCompactionTimeForTenant provides a mock function with given fields: _a0, _a1 +func (_m *SysDBServer) SetLastCompactionTimeForTenant(_a0 context.Context, _a1 *coordinatorpb.SetLastCompactionTimeForTenantRequest) (*emptypb.Empty, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for SetLastCompactionTimeForTenant") + } + + var r0 *emptypb.Empty + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.SetLastCompactionTimeForTenantRequest) (*emptypb.Empty, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.SetLastCompactionTimeForTenantRequest) *emptypb.Empty); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*emptypb.Empty) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.SetLastCompactionTimeForTenantRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateCollection provides a mock function with given fields: _a0, _a1 +func (_m *SysDBServer) UpdateCollection(_a0 context.Context, _a1 *coordinatorpb.UpdateCollectionRequest) (*coordinatorpb.UpdateCollectionResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for UpdateCollection") + } + + var r0 *coordinatorpb.UpdateCollectionResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.UpdateCollectionRequest) (*coordinatorpb.UpdateCollectionResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.UpdateCollectionRequest) *coordinatorpb.UpdateCollectionResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.UpdateCollectionResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.UpdateCollectionRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// UpdateSegment provides a mock function with given fields: _a0, _a1 +func (_m *SysDBServer) UpdateSegment(_a0 context.Context, _a1 *coordinatorpb.UpdateSegmentRequest) (*coordinatorpb.UpdateSegmentResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for UpdateSegment") + } + + var r0 *coordinatorpb.UpdateSegmentResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.UpdateSegmentRequest) (*coordinatorpb.UpdateSegmentResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.UpdateSegmentRequest) *coordinatorpb.UpdateSegmentResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.UpdateSegmentResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.UpdateSegmentRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// mustEmbedUnimplementedSysDBServer provides a mock function with given fields: +func (_m *SysDBServer) mustEmbedUnimplementedSysDBServer() { + _m.Called() +} + +// NewSysDBServer creates a new instance of SysDBServer. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewSysDBServer(t interface { + mock.TestingT + Cleanup(func()) +}) *SysDBServer { + mock := &SysDBServer{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/UnsafeLogServiceServer.go b/go/mocks/UnsafeLogServiceServer.go new file mode 100644 index 00000000000..92a15424ae0 --- /dev/null +++ b/go/mocks/UnsafeLogServiceServer.go @@ -0,0 +1,29 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import mock "github.com/stretchr/testify/mock" + +// UnsafeLogServiceServer is an autogenerated mock type for the UnsafeLogServiceServer type +type UnsafeLogServiceServer struct { + mock.Mock +} + +// mustEmbedUnimplementedLogServiceServer provides a mock function with given fields: +func (_m *UnsafeLogServiceServer) mustEmbedUnimplementedLogServiceServer() { + _m.Called() +} + +// NewUnsafeLogServiceServer creates a new instance of UnsafeLogServiceServer. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewUnsafeLogServiceServer(t interface { + mock.TestingT + Cleanup(func()) +}) *UnsafeLogServiceServer { + mock := &UnsafeLogServiceServer{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/UnsafeSysDBServer.go b/go/mocks/UnsafeSysDBServer.go new file mode 100644 index 00000000000..a45b6af2001 --- /dev/null +++ b/go/mocks/UnsafeSysDBServer.go @@ -0,0 +1,29 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import mock "github.com/stretchr/testify/mock" + +// UnsafeSysDBServer is an autogenerated mock type for the UnsafeSysDBServer type +type UnsafeSysDBServer struct { + mock.Mock +} + +// mustEmbedUnimplementedSysDBServer provides a mock function with given fields: +func (_m *UnsafeSysDBServer) mustEmbedUnimplementedSysDBServer() { + _m.Called() +} + +// NewUnsafeSysDBServer creates a new instance of UnsafeSysDBServer. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewUnsafeSysDBServer(t interface { + mock.TestingT + Cleanup(func()) +}) *UnsafeSysDBServer { + mock := &UnsafeSysDBServer{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/UnsafeVectorReaderServer.go b/go/mocks/UnsafeVectorReaderServer.go new file mode 100644 index 00000000000..a55c0a1663c --- /dev/null +++ b/go/mocks/UnsafeVectorReaderServer.go @@ -0,0 +1,29 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import mock "github.com/stretchr/testify/mock" + +// UnsafeVectorReaderServer is an autogenerated mock type for the UnsafeVectorReaderServer type +type UnsafeVectorReaderServer struct { + mock.Mock +} + +// mustEmbedUnimplementedVectorReaderServer provides a mock function with given fields: +func (_m *UnsafeVectorReaderServer) mustEmbedUnimplementedVectorReaderServer() { + _m.Called() +} + +// NewUnsafeVectorReaderServer creates a new instance of UnsafeVectorReaderServer. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewUnsafeVectorReaderServer(t interface { + mock.TestingT + Cleanup(func()) +}) *UnsafeVectorReaderServer { + mock := &UnsafeVectorReaderServer{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/VectorReaderClient.go b/go/mocks/VectorReaderClient.go new file mode 100644 index 00000000000..62c436f4670 --- /dev/null +++ b/go/mocks/VectorReaderClient.go @@ -0,0 +1,105 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + coordinatorpb "github.com/chroma-core/chroma/go/pkg/proto/coordinatorpb" + grpc "google.golang.org/grpc" + + mock "github.com/stretchr/testify/mock" +) + +// VectorReaderClient is an autogenerated mock type for the VectorReaderClient type +type VectorReaderClient struct { + mock.Mock +} + +// GetVectors provides a mock function with given fields: ctx, in, opts +func (_m *VectorReaderClient) GetVectors(ctx context.Context, in *coordinatorpb.GetVectorsRequest, opts ...grpc.CallOption) (*coordinatorpb.GetVectorsResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for GetVectors") + } + + var r0 *coordinatorpb.GetVectorsResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetVectorsRequest, ...grpc.CallOption) (*coordinatorpb.GetVectorsResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetVectorsRequest, ...grpc.CallOption) *coordinatorpb.GetVectorsResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.GetVectorsResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.GetVectorsRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// QueryVectors provides a mock function with given fields: ctx, in, opts +func (_m *VectorReaderClient) QueryVectors(ctx context.Context, in *coordinatorpb.QueryVectorsRequest, opts ...grpc.CallOption) (*coordinatorpb.QueryVectorsResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for QueryVectors") + } + + var r0 *coordinatorpb.QueryVectorsResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.QueryVectorsRequest, ...grpc.CallOption) (*coordinatorpb.QueryVectorsResponse, error)); ok { + return rf(ctx, in, opts...) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.QueryVectorsRequest, ...grpc.CallOption) *coordinatorpb.QueryVectorsResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.QueryVectorsResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.QueryVectorsRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewVectorReaderClient creates a new instance of VectorReaderClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewVectorReaderClient(t interface { + mock.TestingT + Cleanup(func()) +}) *VectorReaderClient { + mock := &VectorReaderClient{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/VectorReaderServer.go b/go/mocks/VectorReaderServer.go new file mode 100644 index 00000000000..55852d2e43b --- /dev/null +++ b/go/mocks/VectorReaderServer.go @@ -0,0 +1,94 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + coordinatorpb "github.com/chroma-core/chroma/go/pkg/proto/coordinatorpb" + mock "github.com/stretchr/testify/mock" +) + +// VectorReaderServer is an autogenerated mock type for the VectorReaderServer type +type VectorReaderServer struct { + mock.Mock +} + +// GetVectors provides a mock function with given fields: _a0, _a1 +func (_m *VectorReaderServer) GetVectors(_a0 context.Context, _a1 *coordinatorpb.GetVectorsRequest) (*coordinatorpb.GetVectorsResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for GetVectors") + } + + var r0 *coordinatorpb.GetVectorsResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetVectorsRequest) (*coordinatorpb.GetVectorsResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.GetVectorsRequest) *coordinatorpb.GetVectorsResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.GetVectorsResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.GetVectorsRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// QueryVectors provides a mock function with given fields: _a0, _a1 +func (_m *VectorReaderServer) QueryVectors(_a0 context.Context, _a1 *coordinatorpb.QueryVectorsRequest) (*coordinatorpb.QueryVectorsResponse, error) { + ret := _m.Called(_a0, _a1) + + if len(ret) == 0 { + panic("no return value specified for QueryVectors") + } + + var r0 *coordinatorpb.QueryVectorsResponse + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.QueryVectorsRequest) (*coordinatorpb.QueryVectorsResponse, error)); ok { + return rf(_a0, _a1) + } + if rf, ok := ret.Get(0).(func(context.Context, *coordinatorpb.QueryVectorsRequest) *coordinatorpb.QueryVectorsResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*coordinatorpb.QueryVectorsResponse) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *coordinatorpb.QueryVectorsRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// mustEmbedUnimplementedVectorReaderServer provides a mock function with given fields: +func (_m *VectorReaderServer) mustEmbedUnimplementedVectorReaderServer() { + _m.Called() +} + +// NewVectorReaderServer creates a new instance of VectorReaderServer. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewVectorReaderServer(t interface { + mock.TestingT + Cleanup(func()) +}) *VectorReaderServer { + mock := &VectorReaderServer{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/isUpdateCollectionRequest_MetadataUpdate.go b/go/mocks/isUpdateCollectionRequest_MetadataUpdate.go new file mode 100644 index 00000000000..204cb9a51a8 --- /dev/null +++ b/go/mocks/isUpdateCollectionRequest_MetadataUpdate.go @@ -0,0 +1,29 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import mock "github.com/stretchr/testify/mock" + +// isUpdateCollectionRequest_MetadataUpdate is an autogenerated mock type for the isUpdateCollectionRequest_MetadataUpdate type +type isUpdateCollectionRequest_MetadataUpdate struct { + mock.Mock +} + +// isUpdateCollectionRequest_MetadataUpdate provides a mock function with given fields: +func (_m *isUpdateCollectionRequest_MetadataUpdate) isUpdateCollectionRequest_MetadataUpdate() { + _m.Called() +} + +// newIsUpdateCollectionRequest_MetadataUpdate creates a new instance of isUpdateCollectionRequest_MetadataUpdate. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func newIsUpdateCollectionRequest_MetadataUpdate(t interface { + mock.TestingT + Cleanup(func()) +}) *isUpdateCollectionRequest_MetadataUpdate { + mock := &isUpdateCollectionRequest_MetadataUpdate{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/isUpdateMetadataValue_Value.go b/go/mocks/isUpdateMetadataValue_Value.go new file mode 100644 index 00000000000..51187097971 --- /dev/null +++ b/go/mocks/isUpdateMetadataValue_Value.go @@ -0,0 +1,29 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import mock "github.com/stretchr/testify/mock" + +// isUpdateMetadataValue_Value is an autogenerated mock type for the isUpdateMetadataValue_Value type +type isUpdateMetadataValue_Value struct { + mock.Mock +} + +// isUpdateMetadataValue_Value provides a mock function with given fields: +func (_m *isUpdateMetadataValue_Value) isUpdateMetadataValue_Value() { + _m.Called() +} + +// newIsUpdateMetadataValue_Value creates a new instance of isUpdateMetadataValue_Value. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func newIsUpdateMetadataValue_Value(t interface { + mock.TestingT + Cleanup(func()) +}) *isUpdateMetadataValue_Value { + mock := &isUpdateMetadataValue_Value{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/isUpdateSegmentRequest_CollectionUpdate.go b/go/mocks/isUpdateSegmentRequest_CollectionUpdate.go new file mode 100644 index 00000000000..9a1dee41326 --- /dev/null +++ b/go/mocks/isUpdateSegmentRequest_CollectionUpdate.go @@ -0,0 +1,29 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import mock "github.com/stretchr/testify/mock" + +// isUpdateSegmentRequest_CollectionUpdate is an autogenerated mock type for the isUpdateSegmentRequest_CollectionUpdate type +type isUpdateSegmentRequest_CollectionUpdate struct { + mock.Mock +} + +// isUpdateSegmentRequest_CollectionUpdate provides a mock function with given fields: +func (_m *isUpdateSegmentRequest_CollectionUpdate) isUpdateSegmentRequest_CollectionUpdate() { + _m.Called() +} + +// newIsUpdateSegmentRequest_CollectionUpdate creates a new instance of isUpdateSegmentRequest_CollectionUpdate. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func newIsUpdateSegmentRequest_CollectionUpdate(t interface { + mock.TestingT + Cleanup(func()) +}) *isUpdateSegmentRequest_CollectionUpdate { + mock := &isUpdateSegmentRequest_CollectionUpdate{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/mocks/isUpdateSegmentRequest_MetadataUpdate.go b/go/mocks/isUpdateSegmentRequest_MetadataUpdate.go new file mode 100644 index 00000000000..15fffa75bee --- /dev/null +++ b/go/mocks/isUpdateSegmentRequest_MetadataUpdate.go @@ -0,0 +1,29 @@ +// Code generated by mockery v2.42.1. DO NOT EDIT. + +package mocks + +import mock "github.com/stretchr/testify/mock" + +// isUpdateSegmentRequest_MetadataUpdate is an autogenerated mock type for the isUpdateSegmentRequest_MetadataUpdate type +type isUpdateSegmentRequest_MetadataUpdate struct { + mock.Mock +} + +// isUpdateSegmentRequest_MetadataUpdate provides a mock function with given fields: +func (_m *isUpdateSegmentRequest_MetadataUpdate) isUpdateSegmentRequest_MetadataUpdate() { + _m.Called() +} + +// newIsUpdateSegmentRequest_MetadataUpdate creates a new instance of isUpdateSegmentRequest_MetadataUpdate. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func newIsUpdateSegmentRequest_MetadataUpdate(t interface { + mock.TestingT + Cleanup(func()) +}) *isUpdateSegmentRequest_MetadataUpdate { + mock := &isUpdateSegmentRequest_MetadataUpdate{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/go/pkg/logservice/grpc/record_log_service.go b/go/pkg/logservice/grpc/record_log_service.go index fe3db4b87d4..39461110624 100644 --- a/go/pkg/logservice/grpc/record_log_service.go +++ b/go/pkg/logservice/grpc/record_log_service.go @@ -56,8 +56,8 @@ func (s *Server) PullLogs(ctx context.Context, req *logservicepb.PullLogsRequest if err != nil { return nil, err } - records := make([]*logservicepb.RecordLog, 0) - recordLogs, err := s.logService.PullLogs(ctx, collectionID, req.GetStartFromId(), int(req.BatchSize), req.GetEndTimestamp()) + records := make([]*logservicepb.LogRecord, 0) + recordLogs, err := s.logService.PullLogs(ctx, collectionID, req.GetStartFromOffset(), int(req.BatchSize), req.GetEndTimestamp()) if err != nil { log.Error("error pulling logs", zap.Error(err)) return nil, grpcutils.BuildInternalGrpcError(err.Error()) @@ -72,9 +72,9 @@ func (s *Server) PullLogs(ctx context.Context, req *logservicepb.PullLogsRequest } return nil, grpcError } - recordLog := &logservicepb.RecordLog{ - LogId: recordLogs[index].ID, - Record: record, + recordLog := &logservicepb.LogRecord{ + LogOffset: recordLogs[index].LogOffset, + Record: record, } records = append(records, recordLog) } @@ -94,9 +94,9 @@ func (s *Server) GetAllCollectionInfoToCompact(ctx context.Context, req *logserv } for _, recordLog := range recordLogs { collectionInfo := &logservicepb.CollectionInfo{ - CollectionId: *recordLog.CollectionID, - FirstLogId: recordLog.ID, - FirstLogIdTs: recordLog.Timestamp, + CollectionId: *recordLog.CollectionID, + FirstLogOffset: recordLog.LogOffset, + FirstLogTs: recordLog.Timestamp, } res.AllCollectionInfo = append(res.AllCollectionInfo, collectionInfo) } diff --git a/go/pkg/logservice/grpc/record_log_service_test.go b/go/pkg/logservice/grpc/record_log_service_test.go index 1aad091ee00..a71d62976e6 100644 --- a/go/pkg/logservice/grpc/record_log_service_test.go +++ b/go/pkg/logservice/grpc/record_log_service_test.go @@ -111,7 +111,7 @@ func (suite *RecordLogServiceTestSuite) TestServer_PushLogs() { suite.db.Where("collection_id = ?", types.FromUniqueID(suite.collectionId)).Find(&recordLogs) suite.Len(recordLogs, 3) for index := range recordLogs { - suite.Equal(int64(index+1), recordLogs[index].ID) + suite.Equal(int64(index+1), recordLogs[index].LogOffset) suite.Equal(suite.collectionId.String(), *recordLogs[index].CollectionID) record := &coordinatorpb.OperationRecord{} if unmarshalErr := proto.Unmarshal(*recordLogs[index].Record, record); err != nil { @@ -143,15 +143,15 @@ func (suite *RecordLogServiceTestSuite) TestServer_PullLogs() { // pull the records pullRequest := logservicepb.PullLogsRequest{ - CollectionId: suite.collectionId.String(), - StartFromId: 0, - BatchSize: 10, + CollectionId: suite.collectionId.String(), + StartFromOffset: 0, + BatchSize: 10, } pullResponse, err := suite.s.PullLogs(context.Background(), &pullRequest) suite.NoError(err) suite.Len(pullResponse.Records, 3) for index := range pullResponse.Records { - suite.Equal(int64(index+1), pullResponse.Records[index].LogId) + suite.Equal(int64(index+1), pullResponse.Records[index].LogOffset) suite.Equal(recordsToSubmit_sot[index].Id, pullResponse.Records[index].Record.Id) suite.Equal(recordsToSubmit_sot[index].Operation, pullResponse.Records[index].Record.Operation) suite.Equal(recordsToSubmit_sot[index].Metadata, pullResponse.Records[index].Record.Metadata) @@ -178,9 +178,9 @@ func (suite *RecordLogServiceTestSuite) TestServer_Bad_CollectionId() { // pull the records // pull the records pullRequest := logservicepb.PullLogsRequest{ - CollectionId: "badId", - StartFromId: 0, - BatchSize: 10, + CollectionId: "badId", + StartFromOffset: 0, + BatchSize: 10, } _, err = suite.s.PullLogs(context.Background(), &pullRequest) suite.Error(err) @@ -207,9 +207,9 @@ func (suite *RecordLogServiceTestSuite) TestServer_GetAllCollectionInfoToCompact suite.NoError(err) suite.Len(response.AllCollectionInfo, 1) suite.Equal(suite.collectionId.String(), response.AllCollectionInfo[0].CollectionId) - suite.Equal(int64(1), response.AllCollectionInfo[0].FirstLogId) - suite.True(response.AllCollectionInfo[0].FirstLogIdTs > startTime) - suite.True(response.AllCollectionInfo[0].FirstLogIdTs < time.Now().UnixNano()) + suite.Equal(int64(1), response.AllCollectionInfo[0].FirstLogOffset) + suite.True(response.AllCollectionInfo[0].FirstLogTs > startTime) + suite.True(response.AllCollectionInfo[0].FirstLogTs < time.Now().UnixNano()) // move log position testutils.MoveLogPosition(suite.db, suite.collectionId, 2) @@ -220,9 +220,9 @@ func (suite *RecordLogServiceTestSuite) TestServer_GetAllCollectionInfoToCompact suite.NoError(err) suite.Len(response.AllCollectionInfo, 1) suite.Equal(suite.collectionId.String(), response.AllCollectionInfo[0].CollectionId) - suite.Equal(int64(3), response.AllCollectionInfo[0].FirstLogId) - suite.True(response.AllCollectionInfo[0].FirstLogIdTs > startTime) - suite.True(response.AllCollectionInfo[0].FirstLogIdTs < time.Now().UnixNano()) + suite.Equal(int64(3), response.AllCollectionInfo[0].FirstLogOffset) + suite.True(response.AllCollectionInfo[0].FirstLogTs > startTime) + suite.True(response.AllCollectionInfo[0].FirstLogTs < time.Now().UnixNano()) } func TestRecordLogServiceTestSuite(t *testing.T) { diff --git a/go/pkg/metastore/db/dao/record_log.go b/go/pkg/metastore/db/dao/record_log.go index aa0c102929c..c967f5ebf7d 100644 --- a/go/pkg/metastore/db/dao/record_log.go +++ b/go/pkg/metastore/db/dao/record_log.go @@ -28,20 +28,22 @@ func (s *recordLogDb) PushLogs(collectionID types.UniqueID, recordsContent [][]b zap.Int("count", len(recordsContent))) var lastLog *dbmodel.RecordLog - err := tx.Select("id").Where("collection_id = ?", collectionIDStr).Last(&lastLog).Error + err := tx.Select("log_offset").Where("collection_id = ?", collectionIDStr).Order("log_offset DESC").Limit(1).Find(&lastLog).Error if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) { - log.Error("Get last log id error", zap.Error(err)) + log.Error("Get last log offset error", zap.Error(err)) tx.Rollback() return err } - var lastLogId = lastLog.ID - log.Info("PushLogs", zap.Int64("lastLogId", lastLogId)) + // The select will populate the lastLog with the last log in the collection, if + // one does not exist, it will have a default value of 0, so we can safely use it + var lastLogOffset = lastLog.LogOffset + log.Info("PushLogs", zap.Int64("lastLogOffset", lastLogOffset)) var recordLogs []*dbmodel.RecordLog for index := range recordsContent { recordLogs = append(recordLogs, &dbmodel.RecordLog{ CollectionID: collectionIDStr, - ID: lastLogId + int64(index) + 1, + LogOffset: lastLogOffset + int64(index) + 1, Timestamp: timestamp, Record: &recordsContent[index], }) @@ -61,23 +63,23 @@ func (s *recordLogDb) PushLogs(collectionID types.UniqueID, recordsContent [][]b return len(recordsContent), nil } -func (s *recordLogDb) PullLogs(collectionID types.UniqueID, id int64, batchSize int, endTimestamp int64) ([]*dbmodel.RecordLog, error) { +func (s *recordLogDb) PullLogs(collectionID types.UniqueID, offset int64, batchSize int, endTimestamp int64) ([]*dbmodel.RecordLog, error) { var collectionIDStr = types.FromUniqueID(collectionID) log.Info("PullLogs", zap.String("collectionID", *collectionIDStr), - zap.Int64("ID", id), + zap.Int64("log_offset", offset), zap.Int("batch_size", batchSize), zap.Int64("endTimestamp", endTimestamp)) var recordLogs []*dbmodel.RecordLog if endTimestamp > 0 { - result := s.db.Where("collection_id = ? AND id >= ? AND timestamp <= ?", collectionIDStr, id, endTimestamp).Order("id").Limit(batchSize).Find(&recordLogs) + result := s.db.Where("collection_id = ? AND log_offset >= ? AND timestamp <= ?", collectionIDStr, offset, endTimestamp).Order("log_offset").Limit(batchSize).Find(&recordLogs) if result.Error != nil && !errors.Is(result.Error, gorm.ErrRecordNotFound) { log.Error("PullLogs error", zap.Error(result.Error)) return nil, result.Error } } else { - result := s.db.Where("collection_id = ? AND id >= ?", collectionIDStr, id).Order("id").Limit(batchSize).Find(&recordLogs) + result := s.db.Where("collection_id = ? AND log_offset >= ?", collectionIDStr, offset).Order("log_offset").Limit(batchSize).Find(&recordLogs) if result.Error != nil && !errors.Is(result.Error, gorm.ErrRecordNotFound) { log.Error("PullLogs error", zap.Error(result.Error)) return nil, result.Error @@ -85,7 +87,7 @@ func (s *recordLogDb) PullLogs(collectionID types.UniqueID, id int64, batchSize } log.Info("PullLogs", zap.String("collectionID", *collectionIDStr), - zap.Int64("ID", id), + zap.Int64("log_offset", offset), zap.Int("batch_size", batchSize), zap.Int("count", len(recordLogs))) return recordLogs, nil @@ -96,10 +98,10 @@ func (s *recordLogDb) GetAllCollectionsToCompact() ([]*dbmodel.RecordLog, error) var recordLogs []*dbmodel.RecordLog var rawSql = ` with summary as ( - select r.collection_id, r.id, r.timestamp, row_number() over(partition by r.collection_id order by r.id) as rank + select r.collection_id, r.log_offset, r.timestamp, row_number() over(partition by r.collection_id order by r.log_offset) as rank from record_logs r, collections c where r.collection_id = c.id - and r.id>c.log_position + and r.log_offset>c.log_position ) select * from summary where rank=1 diff --git a/go/pkg/metastore/db/dao/record_log_test.go b/go/pkg/metastore/db/dao/record_log_test.go index b28f904a6d8..49f5019a0ae 100644 --- a/go/pkg/metastore/db/dao/record_log_test.go +++ b/go/pkg/metastore/db/dao/record_log_test.go @@ -65,7 +65,7 @@ func (suite *RecordLogDbTestSuite) TestRecordLogDb_PushLogs() { suite.db.Where("collection_id = ?", types.FromUniqueID(suite.collectionId1)).Find(&recordLogs) suite.Len(recordLogs, 3) for index := range recordLogs { - suite.Equal(int64(index+1), recordLogs[index].ID) + suite.Equal(int64(index+1), recordLogs[index].LogOffset) suite.Equal(suite.records[index], *recordLogs[index].Record) } @@ -80,7 +80,7 @@ func (suite *RecordLogDbTestSuite) TestRecordLogDb_PushLogs() { suite.db.Where("collection_id = ?", types.FromUniqueID(suite.collectionId1)).Find(&recordLogs) suite.Len(recordLogs, 5) for index := range recordLogs { - suite.Equal(int64(index+1), recordLogs[index].ID) + suite.Equal(int64(index+1), recordLogs[index].LogOffset) suite.Equal(suite.records[index], *recordLogs[index].Record) } @@ -95,7 +95,7 @@ func (suite *RecordLogDbTestSuite) TestRecordLogDb_PushLogs() { suite.db.Where("collection_id = ?", types.FromUniqueID(suite.collectionId2)).Find(&recordLogs) suite.Len(recordLogs, 5) for index := range recordLogs { - suite.Equal(int64(index+1), recordLogs[index].ID) + suite.Equal(int64(index+1), recordLogs[index].LogOffset) suite.Equal(suite.records[index], *recordLogs[index].Record) } } @@ -121,7 +121,7 @@ func (suite *RecordLogDbTestSuite) TestRecordLogDb_PullLogsFromID() { suite.NoError(err) suite.Len(recordLogs, 3) for index := range recordLogs { - suite.Equal(int64(index+1), recordLogs[index].ID) + suite.Equal(int64(index+1), recordLogs[index].LogOffset) suite.Equal(suite.records[index], *recordLogs[index].Record) } @@ -131,7 +131,7 @@ func (suite *RecordLogDbTestSuite) TestRecordLogDb_PullLogsFromID() { suite.Len(recordLogs, 5) for index := range recordLogs { - suite.Equal(int64(index+1), recordLogs[index].ID) + suite.Equal(int64(index+1), recordLogs[index].LogOffset) suite.Equal(suite.records[index], *recordLogs[index].Record) } @@ -140,7 +140,7 @@ func (suite *RecordLogDbTestSuite) TestRecordLogDb_PullLogsFromID() { suite.NoError(err) suite.Len(recordLogs, 3) for index := range recordLogs { - suite.Equal(int64(index+3), recordLogs[index].ID) + suite.Equal(int64(index+3), recordLogs[index].LogOffset) suite.Equal(suite.records[index+2], *recordLogs[index].Record) } @@ -149,7 +149,7 @@ func (suite *RecordLogDbTestSuite) TestRecordLogDb_PullLogsFromID() { suite.NoError(err) suite.Len(recordLogs, 3) for index := range recordLogs { - suite.Equal(int64(index+3), recordLogs[index].ID) + suite.Equal(int64(index+3), recordLogs[index].LogOffset) suite.Equal(suite.records[index+2], *recordLogs[index].Record) } } @@ -165,7 +165,7 @@ func (suite *RecordLogDbTestSuite) TestRecordLogDb_GetAllCollectionsToCompact() suite.NoError(err) suite.Len(collectionInfos, 1) suite.Equal(suite.collectionId1.String(), *collectionInfos[0].CollectionID) - suite.Equal(int64(1), collectionInfos[0].ID) + suite.Equal(int64(1), collectionInfos[0].LogOffset) // move log position testutils.MoveLogPosition(suite.db, suite.collectionId1, 2) @@ -175,7 +175,7 @@ func (suite *RecordLogDbTestSuite) TestRecordLogDb_GetAllCollectionsToCompact() suite.NoError(err) suite.Len(collectionInfos, 1) suite.Equal(suite.collectionId1.String(), *collectionInfos[0].CollectionID) - suite.Equal(int64(3), collectionInfos[0].ID) + suite.Equal(int64(3), collectionInfos[0].LogOffset) // push some logs count, err = suite.Db.PushLogs(suite.collectionId2, suite.records) @@ -187,9 +187,9 @@ func (suite *RecordLogDbTestSuite) TestRecordLogDb_GetAllCollectionsToCompact() suite.NoError(err) suite.Len(collectionInfos, 2) suite.Equal(suite.collectionId1.String(), *collectionInfos[0].CollectionID) - suite.Equal(int64(3), collectionInfos[0].ID) + suite.Equal(int64(3), collectionInfos[0].LogOffset) suite.Equal(suite.collectionId2.String(), *collectionInfos[1].CollectionID) - suite.Equal(int64(1), collectionInfos[1].ID) + suite.Equal(int64(1), collectionInfos[1].LogOffset) } func TestRecordLogDbTestSuite(t *testing.T) { diff --git a/go/pkg/metastore/db/dbcore/core.go b/go/pkg/metastore/db/dbcore/core.go index 83b47338ae7..956df311253 100644 --- a/go/pkg/metastore/db/dbcore/core.go +++ b/go/pkg/metastore/db/dbcore/core.go @@ -3,12 +3,13 @@ package dbcore import ( "context" "fmt" - "github.com/chroma-core/chroma/go/pkg/types" "os" "reflect" "strconv" "time" + "github.com/chroma-core/chroma/go/pkg/types" + "github.com/chroma-core/chroma/go/pkg/common" "github.com/chroma-core/chroma/go/pkg/metastore/db/dbmodel" "github.com/pingcap/log" diff --git a/go/pkg/metastore/db/dbmodel/record_log.go b/go/pkg/metastore/db/dbmodel/record_log.go index 5bd4da0ca0a..221235c0b4d 100644 --- a/go/pkg/metastore/db/dbmodel/record_log.go +++ b/go/pkg/metastore/db/dbmodel/record_log.go @@ -6,7 +6,7 @@ import ( type RecordLog struct { CollectionID *string `gorm:"collection_id;primaryKey;autoIncrement:false"` - ID int64 `gorm:"id;primaryKey;autoIncrement:false"` + LogOffset int64 `gorm:"log_offset;primaryKey;autoIncrement:false"` Timestamp int64 `gorm:"timestamp;"` Record *[]byte `gorm:"record;type:bytea"` } diff --git a/go/pkg/proto/coordinatorpb/chroma.pb.go b/go/pkg/proto/coordinatorpb/chroma.pb.go index 201f5b43bd1..0043b24c493 100644 --- a/go/pkg/proto/coordinatorpb/chroma.pb.go +++ b/go/pkg/proto/coordinatorpb/chroma.pb.go @@ -428,7 +428,7 @@ type Collection struct { Dimension *int32 `protobuf:"varint,5,opt,name=dimension,proto3,oneof" json:"dimension,omitempty"` Tenant string `protobuf:"bytes,6,opt,name=tenant,proto3" json:"tenant,omitempty"` Database string `protobuf:"bytes,7,opt,name=database,proto3" json:"database,omitempty"` - LogPosition int64 `protobuf:"varint,8,opt,name=logPosition,proto3" json:"logPosition,omitempty"` + LogPosition int64 `protobuf:"varint,8,opt,name=log_position,json=logPosition,proto3" json:"log_position,omitempty"` Version int32 `protobuf:"varint,9,opt,name=version,proto3" json:"version,omitempty"` } @@ -772,7 +772,7 @@ func (x *UpdateMetadata) GetMetadata() map[string]*UpdateMetadataValue { return nil } -// Represents an operation on the log +// Represents an operation the user submits type OperationRecord struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -1293,7 +1293,7 @@ var file_chromadb_proto_chroma_proto_rawDesc = []byte{ 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x0b, 0x0a, 0x09, 0x5f, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0x97, 0x02, 0x0a, 0x0a, 0x43, 0x6f, 0x6c, + 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0x98, 0x02, 0x0a, 0x0a, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x37, 0x0a, 0x08, 0x6d, @@ -1305,121 +1305,121 @@ var file_chromadb_proto_chroma_proto_rawDesc = []byte{ 0x73, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x16, 0x0a, 0x06, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x6c, - 0x6f, 0x67, 0x50, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, 0x03, - 0x52, 0x0b, 0x6c, 0x6f, 0x67, 0x50, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, - 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, - 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, - 0x6f, 0x6e, 0x22, 0x46, 0x0a, 0x08, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x12, 0x0e, - 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, - 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x06, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x22, 0x1c, 0x0a, 0x06, 0x54, 0x65, - 0x6e, 0x61, 0x6e, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x85, 0x01, 0x0a, 0x13, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x56, 0x61, 0x6c, 0x75, 0x65, - 0x12, 0x23, 0x0a, 0x0c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1d, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x12, 0x21, 0x0a, 0x0b, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x5f, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x6c, 0x6f, - 0x61, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x22, 0xac, 0x01, 0x0a, 0x0e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x12, 0x40, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, - 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x4d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x1a, 0x58, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x31, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, - 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, - 0xd0, 0x01, 0x0a, 0x0f, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x63, - 0x6f, 0x72, 0x64, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x02, 0x69, 0x64, 0x12, 0x2b, 0x0a, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x88, 0x01, 0x01, - 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x48, 0x01, 0x52, 0x08, 0x6d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x12, 0x2f, 0x0a, 0x09, 0x6f, 0x70, 0x65, - 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x11, 0x2e, 0x63, - 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, - 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x76, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x22, 0x66, 0x0a, 0x15, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x45, 0x6d, 0x62, 0x65, - 0x64, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x12, 0x0e, 0x0a, 0x02, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x73, - 0x65, 0x71, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x65, 0x71, - 0x49, 0x64, 0x12, 0x26, 0x0a, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, 0x63, 0x74, - 0x6f, 0x72, 0x52, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x8e, 0x01, 0x0a, 0x11, 0x56, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, - 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, - 0x12, 0x15, 0x0a, 0x06, 0x73, 0x65, 0x71, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, - 0x52, 0x05, 0x73, 0x65, 0x71, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x64, 0x69, 0x73, 0x74, 0x61, - 0x6e, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, 0x52, 0x08, 0x64, 0x69, 0x73, 0x74, 0x61, - 0x6e, 0x63, 0x65, 0x12, 0x2b, 0x0a, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, + 0x09, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x6c, + 0x6f, 0x67, 0x5f, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, + 0x03, 0x52, 0x0b, 0x6c, 0x6f, 0x67, 0x50, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, + 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x05, 0x52, + 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, + 0x69, 0x6f, 0x6e, 0x22, 0x46, 0x0a, 0x08, 0x44, 0x61, 0x74, 0x61, 0x62, 0x61, 0x73, 0x65, 0x12, + 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x22, 0x1c, 0x0a, 0x06, 0x54, + 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x85, 0x01, 0x0a, 0x13, 0x55, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x12, 0x23, 0x0a, 0x0c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x73, 0x74, 0x72, 0x69, 0x6e, + 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1d, 0x0a, 0x09, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52, 0x08, 0x69, 0x6e, 0x74, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x21, 0x0a, 0x0b, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x5f, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x6c, + 0x6f, 0x61, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x07, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x22, 0xac, 0x01, 0x0a, 0x0e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x12, 0x40, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x4d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x1a, 0x58, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x31, 0x0a, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, + 0x61, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, + 0x22, 0xd0, 0x01, 0x0a, 0x0f, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, + 0x63, 0x6f, 0x72, 0x64, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x02, 0x69, 0x64, 0x12, 0x2b, 0x0a, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x88, 0x01, + 0x01, 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x48, 0x01, 0x52, 0x08, 0x6d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x12, 0x2f, 0x0a, 0x09, 0x6f, 0x70, + 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x11, 0x2e, + 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x52, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x09, 0x0a, 0x07, 0x5f, + 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x22, 0x66, 0x0a, 0x15, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x45, 0x6d, 0x62, + 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x15, 0x0a, 0x06, + 0x73, 0x65, 0x71, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x65, + 0x71, 0x49, 0x64, 0x12, 0x26, 0x0a, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x88, 0x01, 0x01, - 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x49, 0x0a, 0x12, 0x56, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, - 0x73, 0x12, 0x33, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, 0x63, 0x74, - 0x6f, 0x72, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x07, 0x72, - 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x22, 0x44, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x56, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x69, - 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x03, 0x69, 0x64, 0x73, 0x12, 0x1d, 0x0a, - 0x0a, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x09, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x22, 0x4d, 0x0a, 0x12, - 0x47, 0x65, 0x74, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x12, 0x37, 0x0a, 0x07, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x63, 0x6f, - 0x72, 0x64, 0x52, 0x07, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x73, 0x22, 0xbc, 0x01, 0x0a, 0x13, - 0x51, 0x75, 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x12, 0x28, 0x0a, 0x07, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x18, 0x01, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x52, 0x07, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x0c, 0x0a, - 0x01, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x01, 0x6b, 0x12, 0x1f, 0x0a, 0x0b, 0x61, - 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, - 0x52, 0x0a, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x49, 0x64, 0x73, 0x12, 0x2d, 0x0a, 0x12, - 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x5f, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, - 0x67, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, - 0x65, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x73, - 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x09, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x22, 0x4c, 0x0a, 0x14, 0x51, 0x75, - 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x12, 0x34, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x52, - 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x2a, 0x38, 0x0a, 0x09, 0x4f, 0x70, 0x65, 0x72, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x07, 0x0a, 0x03, 0x41, 0x44, 0x44, 0x10, 0x00, 0x12, 0x0a, - 0x0a, 0x06, 0x55, 0x50, 0x44, 0x41, 0x54, 0x45, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x55, 0x50, - 0x53, 0x45, 0x52, 0x54, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x44, 0x45, 0x4c, 0x45, 0x54, 0x45, - 0x10, 0x03, 0x2a, 0x28, 0x0a, 0x0e, 0x53, 0x63, 0x61, 0x6c, 0x61, 0x72, 0x45, 0x6e, 0x63, 0x6f, - 0x64, 0x69, 0x6e, 0x67, 0x12, 0x0b, 0x0a, 0x07, 0x46, 0x4c, 0x4f, 0x41, 0x54, 0x33, 0x32, 0x10, - 0x00, 0x12, 0x09, 0x0a, 0x05, 0x49, 0x4e, 0x54, 0x33, 0x32, 0x10, 0x01, 0x2a, 0x28, 0x0a, 0x0c, - 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x53, 0x63, 0x6f, 0x70, 0x65, 0x12, 0x0a, 0x0a, 0x06, - 0x56, 0x45, 0x43, 0x54, 0x4f, 0x52, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x4d, 0x45, 0x54, 0x41, - 0x44, 0x41, 0x54, 0x41, 0x10, 0x01, 0x32, 0xa2, 0x01, 0x0a, 0x0c, 0x56, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x45, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x56, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x19, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, - 0x65, 0x74, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x1a, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x56, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4b, - 0x0a, 0x0c, 0x51, 0x75, 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x1b, - 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x63, 0x68, - 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, - 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x3a, 0x5a, 0x38, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, - 0x2d, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2f, 0x67, 0x6f, 0x2f, - 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x63, 0x6f, 0x6f, 0x72, 0x64, 0x69, - 0x6e, 0x61, 0x74, 0x6f, 0x72, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x74, 0x6f, 0x72, 0x52, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x8e, 0x01, 0x0a, 0x11, + 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x73, 0x75, 0x6c, + 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, + 0x64, 0x12, 0x15, 0x0a, 0x06, 0x73, 0x65, 0x71, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x05, 0x73, 0x65, 0x71, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x64, 0x69, 0x73, 0x74, + 0x61, 0x6e, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, 0x52, 0x08, 0x64, 0x69, 0x73, 0x74, + 0x61, 0x6e, 0x63, 0x65, 0x12, 0x2b, 0x0a, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x06, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x88, 0x01, + 0x01, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x49, 0x0a, 0x12, + 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x73, 0x75, 0x6c, + 0x74, 0x73, 0x12, 0x33, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x01, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x07, + 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x22, 0x44, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x56, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, + 0x69, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x03, 0x69, 0x64, 0x73, 0x12, 0x1d, + 0x0a, 0x0a, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x09, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x22, 0x4d, 0x0a, + 0x12, 0x47, 0x65, 0x74, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x07, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x73, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x63, + 0x6f, 0x72, 0x64, 0x52, 0x07, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x73, 0x22, 0xbc, 0x01, 0x0a, + 0x13, 0x51, 0x75, 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x28, 0x0a, 0x07, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x18, + 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, + 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x07, 0x76, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x0c, + 0x0a, 0x01, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x01, 0x6b, 0x12, 0x1f, 0x0a, 0x0b, + 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, + 0x09, 0x52, 0x0a, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x49, 0x64, 0x73, 0x12, 0x2d, 0x0a, + 0x12, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x5f, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, + 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x69, 0x6e, 0x63, 0x6c, 0x75, + 0x64, 0x65, 0x45, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x1d, 0x0a, 0x0a, + 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x09, 0x73, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x22, 0x4c, 0x0a, 0x14, 0x51, + 0x75, 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x34, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x56, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x51, 0x75, 0x65, 0x72, 0x79, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, + 0x52, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73, 0x2a, 0x38, 0x0a, 0x09, 0x4f, 0x70, 0x65, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x07, 0x0a, 0x03, 0x41, 0x44, 0x44, 0x10, 0x00, 0x12, + 0x0a, 0x0a, 0x06, 0x55, 0x50, 0x44, 0x41, 0x54, 0x45, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x55, + 0x50, 0x53, 0x45, 0x52, 0x54, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x44, 0x45, 0x4c, 0x45, 0x54, + 0x45, 0x10, 0x03, 0x2a, 0x28, 0x0a, 0x0e, 0x53, 0x63, 0x61, 0x6c, 0x61, 0x72, 0x45, 0x6e, 0x63, + 0x6f, 0x64, 0x69, 0x6e, 0x67, 0x12, 0x0b, 0x0a, 0x07, 0x46, 0x4c, 0x4f, 0x41, 0x54, 0x33, 0x32, + 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x49, 0x4e, 0x54, 0x33, 0x32, 0x10, 0x01, 0x2a, 0x28, 0x0a, + 0x0c, 0x53, 0x65, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x53, 0x63, 0x6f, 0x70, 0x65, 0x12, 0x0a, 0x0a, + 0x06, 0x56, 0x45, 0x43, 0x54, 0x4f, 0x52, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x4d, 0x45, 0x54, + 0x41, 0x44, 0x41, 0x54, 0x41, 0x10, 0x01, 0x32, 0xa2, 0x01, 0x0a, 0x0c, 0x56, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x52, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, 0x45, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x56, + 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x19, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, + 0x47, 0x65, 0x74, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x1a, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x56, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, + 0x4b, 0x0a, 0x0c, 0x51, 0x75, 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, + 0x1b, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x56, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x63, + 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x51, 0x75, 0x65, 0x72, 0x79, 0x56, 0x65, 0x63, 0x74, 0x6f, + 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x3a, 0x5a, 0x38, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x68, 0x72, 0x6f, 0x6d, + 0x61, 0x2d, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2f, 0x67, 0x6f, + 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x63, 0x6f, 0x6f, 0x72, 0x64, + 0x69, 0x6e, 0x61, 0x74, 0x6f, 0x72, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/go/pkg/proto/logservicepb/logservice.pb.go b/go/pkg/proto/logservicepb/logservice.pb.go index a69a399975c..434aa10f898 100644 --- a/go/pkg/proto/logservicepb/logservice.pb.go +++ b/go/pkg/proto/logservicepb/logservice.pb.go @@ -128,10 +128,10 @@ type PullLogsRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - CollectionId string `protobuf:"bytes,1,opt,name=collection_id,json=collectionId,proto3" json:"collection_id,omitempty"` - StartFromId int64 `protobuf:"varint,2,opt,name=start_from_id,json=startFromId,proto3" json:"start_from_id,omitempty"` - BatchSize int32 `protobuf:"varint,3,opt,name=batch_size,json=batchSize,proto3" json:"batch_size,omitempty"` - EndTimestamp int64 `protobuf:"varint,4,opt,name=end_timestamp,json=endTimestamp,proto3" json:"end_timestamp,omitempty"` + CollectionId string `protobuf:"bytes,1,opt,name=collection_id,json=collectionId,proto3" json:"collection_id,omitempty"` + StartFromOffset int64 `protobuf:"varint,2,opt,name=start_from_offset,json=startFromOffset,proto3" json:"start_from_offset,omitempty"` + BatchSize int32 `protobuf:"varint,3,opt,name=batch_size,json=batchSize,proto3" json:"batch_size,omitempty"` + EndTimestamp int64 `protobuf:"varint,4,opt,name=end_timestamp,json=endTimestamp,proto3" json:"end_timestamp,omitempty"` } func (x *PullLogsRequest) Reset() { @@ -173,9 +173,9 @@ func (x *PullLogsRequest) GetCollectionId() string { return "" } -func (x *PullLogsRequest) GetStartFromId() int64 { +func (x *PullLogsRequest) GetStartFromOffset() int64 { if x != nil { - return x.StartFromId + return x.StartFromOffset } return 0 } @@ -194,17 +194,18 @@ func (x *PullLogsRequest) GetEndTimestamp() int64 { return 0 } -type RecordLog struct { +// Represents an operation from the log +type LogRecord struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - LogId int64 `protobuf:"varint,1,opt,name=log_id,json=logId,proto3" json:"log_id,omitempty"` - Record *coordinatorpb.OperationRecord `protobuf:"bytes,2,opt,name=record,proto3" json:"record,omitempty"` + LogOffset int64 `protobuf:"varint,1,opt,name=log_offset,json=logOffset,proto3" json:"log_offset,omitempty"` + Record *coordinatorpb.OperationRecord `protobuf:"bytes,2,opt,name=record,proto3" json:"record,omitempty"` } -func (x *RecordLog) Reset() { - *x = RecordLog{} +func (x *LogRecord) Reset() { + *x = LogRecord{} if protoimpl.UnsafeEnabled { mi := &file_chromadb_proto_logservice_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -212,13 +213,13 @@ func (x *RecordLog) Reset() { } } -func (x *RecordLog) String() string { +func (x *LogRecord) String() string { return protoimpl.X.MessageStringOf(x) } -func (*RecordLog) ProtoMessage() {} +func (*LogRecord) ProtoMessage() {} -func (x *RecordLog) ProtoReflect() protoreflect.Message { +func (x *LogRecord) ProtoReflect() protoreflect.Message { mi := &file_chromadb_proto_logservice_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -230,19 +231,19 @@ func (x *RecordLog) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use RecordLog.ProtoReflect.Descriptor instead. -func (*RecordLog) Descriptor() ([]byte, []int) { +// Deprecated: Use LogRecord.ProtoReflect.Descriptor instead. +func (*LogRecord) Descriptor() ([]byte, []int) { return file_chromadb_proto_logservice_proto_rawDescGZIP(), []int{3} } -func (x *RecordLog) GetLogId() int64 { +func (x *LogRecord) GetLogOffset() int64 { if x != nil { - return x.LogId + return x.LogOffset } return 0 } -func (x *RecordLog) GetRecord() *coordinatorpb.OperationRecord { +func (x *LogRecord) GetRecord() *coordinatorpb.OperationRecord { if x != nil { return x.Record } @@ -254,7 +255,7 @@ type PullLogsResponse struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Records []*RecordLog `protobuf:"bytes,1,rep,name=records,proto3" json:"records,omitempty"` + Records []*LogRecord `protobuf:"bytes,1,rep,name=records,proto3" json:"records,omitempty"` } func (x *PullLogsResponse) Reset() { @@ -289,7 +290,7 @@ func (*PullLogsResponse) Descriptor() ([]byte, []int) { return file_chromadb_proto_logservice_proto_rawDescGZIP(), []int{4} } -func (x *PullLogsResponse) GetRecords() []*RecordLog { +func (x *PullLogsResponse) GetRecords() []*LogRecord { if x != nil { return x.Records } @@ -302,9 +303,10 @@ type CollectionInfo struct { unknownFields protoimpl.UnknownFields CollectionId string `protobuf:"bytes,1,opt,name=collection_id,json=collectionId,proto3" json:"collection_id,omitempty"` - // The first log id of the collection that needs to be compacted - FirstLogId int64 `protobuf:"varint,2,opt,name=first_log_id,json=firstLogId,proto3" json:"first_log_id,omitempty"` - FirstLogIdTs int64 `protobuf:"varint,3,opt,name=first_log_id_ts,json=firstLogIdTs,proto3" json:"first_log_id_ts,omitempty"` + // The log offset of the first log entry of the collection that needs to be compacted + FirstLogOffset int64 `protobuf:"varint,2,opt,name=first_log_offset,json=firstLogOffset,proto3" json:"first_log_offset,omitempty"` + // The timestamp of the first log entry of the collection that needs to be compacted + FirstLogTs int64 `protobuf:"varint,3,opt,name=first_log_ts,json=firstLogTs,proto3" json:"first_log_ts,omitempty"` } func (x *CollectionInfo) Reset() { @@ -346,16 +348,16 @@ func (x *CollectionInfo) GetCollectionId() string { return "" } -func (x *CollectionInfo) GetFirstLogId() int64 { +func (x *CollectionInfo) GetFirstLogOffset() int64 { if x != nil { - return x.FirstLogId + return x.FirstLogOffset } return 0 } -func (x *CollectionInfo) GetFirstLogIdTs() int64 { +func (x *CollectionInfo) GetFirstLogTs() int64 { if x != nil { - return x.FirstLogIdTs + return x.FirstLogTs } return 0 } @@ -462,65 +464,66 @@ var file_chromadb_proto_logservice_proto_rawDesc = []byte{ 0x73, 0x22, 0x35, 0x0a, 0x10, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0b, 0x72, 0x65, 0x63, - 0x6f, 0x72, 0x64, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x9e, 0x01, 0x0a, 0x0f, 0x50, 0x75, 0x6c, + 0x6f, 0x72, 0x64, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0xa6, 0x01, 0x0a, 0x0f, 0x50, 0x75, 0x6c, 0x6c, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x23, 0x0a, 0x0d, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, - 0x64, 0x12, 0x22, 0x0a, 0x0d, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x66, 0x72, 0x6f, 0x6d, 0x5f, - 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x73, 0x74, 0x61, 0x72, 0x74, 0x46, - 0x72, 0x6f, 0x6d, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x73, - 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x62, 0x61, 0x74, 0x63, 0x68, - 0x53, 0x69, 0x7a, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x6e, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0c, 0x65, 0x6e, 0x64, - 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0x53, 0x0a, 0x09, 0x52, 0x65, 0x63, - 0x6f, 0x72, 0x64, 0x4c, 0x6f, 0x67, 0x12, 0x15, 0x0a, 0x06, 0x6c, 0x6f, 0x67, 0x5f, 0x69, 0x64, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6c, 0x6f, 0x67, 0x49, 0x64, 0x12, 0x2f, 0x0a, + 0x64, 0x12, 0x2a, 0x0a, 0x11, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x66, 0x72, 0x6f, 0x6d, 0x5f, + 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0f, 0x73, 0x74, + 0x61, 0x72, 0x74, 0x46, 0x72, 0x6f, 0x6d, 0x4f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x1d, 0x0a, + 0x0a, 0x62, 0x61, 0x74, 0x63, 0x68, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x05, 0x52, 0x09, 0x62, 0x61, 0x74, 0x63, 0x68, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x23, 0x0a, 0x0d, + 0x65, 0x6e, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x03, 0x52, 0x0c, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, + 0x70, 0x22, 0x5b, 0x0a, 0x09, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x12, 0x1d, + 0x0a, 0x0a, 0x6c, 0x6f, 0x67, 0x5f, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x03, 0x52, 0x09, 0x6c, 0x6f, 0x67, 0x4f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x2f, 0x0a, 0x06, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x52, 0x06, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x22, 0x3f, 0x0a, 0x10, 0x50, 0x75, 0x6c, 0x6c, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2b, 0x0a, 0x07, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x63, - 0x6f, 0x72, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x07, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x73, 0x22, - 0x7e, 0x0a, 0x0e, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, - 0x6f, 0x12, 0x23, 0x0a, 0x0d, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, - 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x20, 0x0a, 0x0c, 0x66, 0x69, 0x72, 0x73, 0x74, 0x5f, - 0x6c, 0x6f, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x66, 0x69, - 0x72, 0x73, 0x74, 0x4c, 0x6f, 0x67, 0x49, 0x64, 0x12, 0x25, 0x0a, 0x0f, 0x66, 0x69, 0x72, 0x73, - 0x74, 0x5f, 0x6c, 0x6f, 0x67, 0x5f, 0x69, 0x64, 0x5f, 0x74, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x03, 0x52, 0x0c, 0x66, 0x69, 0x72, 0x73, 0x74, 0x4c, 0x6f, 0x67, 0x49, 0x64, 0x54, 0x73, 0x22, - 0x26, 0x0a, 0x24, 0x47, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x54, 0x6f, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x6f, 0x0a, 0x25, 0x47, 0x65, 0x74, 0x41, 0x6c, - 0x6c, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x54, - 0x6f, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x46, 0x0a, 0x13, 0x61, 0x6c, 0x6c, 0x5f, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, - 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x11, 0x61, 0x6c, 0x6c, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x32, 0x8e, 0x02, 0x0a, 0x0a, 0x4c, 0x6f, 0x67, - 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x3f, 0x0a, 0x08, 0x50, 0x75, 0x73, 0x68, 0x4c, - 0x6f, 0x67, 0x73, 0x12, 0x17, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x50, 0x75, 0x73, - 0x68, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x63, - 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x3f, 0x0a, 0x08, 0x50, 0x75, 0x6c, 0x6c, - 0x4c, 0x6f, 0x67, 0x73, 0x12, 0x17, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x50, 0x75, - 0x6c, 0x6c, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, - 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x50, 0x75, 0x6c, 0x6c, 0x4c, 0x6f, 0x67, 0x73, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x7e, 0x0a, 0x1d, 0x47, 0x65, 0x74, - 0x41, 0x6c, 0x6c, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, - 0x6f, 0x54, 0x6f, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x12, 0x2c, 0x2e, 0x63, 0x68, 0x72, - 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x54, 0x6f, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, - 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2d, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, - 0x61, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x54, 0x6f, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x39, 0x5a, 0x37, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2d, 0x63, - 0x6f, 0x72, 0x65, 0x2f, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x6b, - 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x6c, 0x6f, 0x67, 0x73, 0x65, 0x72, 0x76, 0x69, - 0x63, 0x65, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x4c, 0x6f, 0x67, + 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x52, 0x07, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x73, 0x22, + 0x81, 0x01, 0x0a, 0x0e, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, + 0x66, 0x6f, 0x12, 0x23, 0x0a, 0x0d, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x63, 0x6f, 0x6c, 0x6c, 0x65, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x28, 0x0a, 0x10, 0x66, 0x69, 0x72, 0x73, 0x74, + 0x5f, 0x6c, 0x6f, 0x67, 0x5f, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x03, 0x52, 0x0e, 0x66, 0x69, 0x72, 0x73, 0x74, 0x4c, 0x6f, 0x67, 0x4f, 0x66, 0x66, 0x73, 0x65, + 0x74, 0x12, 0x20, 0x0a, 0x0c, 0x66, 0x69, 0x72, 0x73, 0x74, 0x5f, 0x6c, 0x6f, 0x67, 0x5f, 0x74, + 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x66, 0x69, 0x72, 0x73, 0x74, 0x4c, 0x6f, + 0x67, 0x54, 0x73, 0x22, 0x26, 0x0a, 0x24, 0x47, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x43, 0x6f, 0x6c, + 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x54, 0x6f, 0x43, 0x6f, 0x6d, + 0x70, 0x61, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x6f, 0x0a, 0x25, 0x47, + 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, + 0x6e, 0x66, 0x6f, 0x54, 0x6f, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x46, 0x0a, 0x13, 0x61, 0x6c, 0x6c, 0x5f, 0x63, 0x6f, 0x6c, 0x6c, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x16, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x43, 0x6f, 0x6c, 0x6c, 0x65, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x11, 0x61, 0x6c, 0x6c, 0x43, 0x6f, + 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x32, 0x8e, 0x02, 0x0a, + 0x0a, 0x4c, 0x6f, 0x67, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x3f, 0x0a, 0x08, 0x50, + 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x73, 0x12, 0x17, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, + 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x18, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, + 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x3f, 0x0a, 0x08, + 0x50, 0x75, 0x6c, 0x6c, 0x4c, 0x6f, 0x67, 0x73, 0x12, 0x17, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, + 0x61, 0x2e, 0x50, 0x75, 0x6c, 0x6c, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x18, 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x50, 0x75, 0x6c, 0x6c, 0x4c, + 0x6f, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x7e, 0x0a, + 0x1d, 0x47, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x54, 0x6f, 0x43, 0x6f, 0x6d, 0x70, 0x61, 0x63, 0x74, 0x12, 0x2c, + 0x2e, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x43, 0x6f, + 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x54, 0x6f, 0x43, 0x6f, + 0x6d, 0x70, 0x61, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2d, 0x2e, 0x63, + 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x43, 0x6f, 0x6c, 0x6c, + 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x54, 0x6f, 0x43, 0x6f, 0x6d, 0x70, + 0x61, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x39, 0x5a, + 0x37, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x68, 0x72, 0x6f, + 0x6d, 0x61, 0x2d, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x63, 0x68, 0x72, 0x6f, 0x6d, 0x61, 0x2f, 0x67, + 0x6f, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x6c, 0x6f, 0x67, 0x73, + 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -540,7 +543,7 @@ var file_chromadb_proto_logservice_proto_goTypes = []interface{}{ (*PushLogsRequest)(nil), // 0: chroma.PushLogsRequest (*PushLogsResponse)(nil), // 1: chroma.PushLogsResponse (*PullLogsRequest)(nil), // 2: chroma.PullLogsRequest - (*RecordLog)(nil), // 3: chroma.RecordLog + (*LogRecord)(nil), // 3: chroma.LogRecord (*PullLogsResponse)(nil), // 4: chroma.PullLogsResponse (*CollectionInfo)(nil), // 5: chroma.CollectionInfo (*GetAllCollectionInfoToCompactRequest)(nil), // 6: chroma.GetAllCollectionInfoToCompactRequest @@ -549,8 +552,8 @@ var file_chromadb_proto_logservice_proto_goTypes = []interface{}{ } var file_chromadb_proto_logservice_proto_depIdxs = []int32{ 8, // 0: chroma.PushLogsRequest.records:type_name -> chroma.OperationRecord - 8, // 1: chroma.RecordLog.record:type_name -> chroma.OperationRecord - 3, // 2: chroma.PullLogsResponse.records:type_name -> chroma.RecordLog + 8, // 1: chroma.LogRecord.record:type_name -> chroma.OperationRecord + 3, // 2: chroma.PullLogsResponse.records:type_name -> chroma.LogRecord 5, // 3: chroma.GetAllCollectionInfoToCompactResponse.all_collection_info:type_name -> chroma.CollectionInfo 0, // 4: chroma.LogService.PushLogs:input_type -> chroma.PushLogsRequest 2, // 5: chroma.LogService.PullLogs:input_type -> chroma.PullLogsRequest @@ -608,7 +611,7 @@ func file_chromadb_proto_logservice_proto_init() { } } file_chromadb_proto_logservice_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RecordLog); i { + switch v := v.(*LogRecord); i { case 0: return &v.state case 1: diff --git a/idl/chromadb/proto/chroma.proto b/idl/chromadb/proto/chroma.proto index 70a684bb7f8..99fd51bbbc8 100644 --- a/idl/chromadb/proto/chroma.proto +++ b/idl/chromadb/proto/chroma.proto @@ -53,7 +53,7 @@ message Collection { optional int32 dimension = 5; string tenant = 6; string database = 7; - int64 logPosition = 8; + int64 log_position = 8; int32 version = 9; } @@ -79,7 +79,7 @@ message UpdateMetadata { map metadata = 1; } -// Represents an operation on the log +// Represents an operation the user submits message OperationRecord { string id = 1; optional Vector vector = 2; diff --git a/idl/chromadb/proto/logservice.proto b/idl/chromadb/proto/logservice.proto index f067af336d0..8c52a3165ce 100644 --- a/idl/chromadb/proto/logservice.proto +++ b/idl/chromadb/proto/logservice.proto @@ -16,25 +16,27 @@ message PushLogsResponse { message PullLogsRequest { string collection_id = 1; - int64 start_from_id = 2; + int64 start_from_offset = 2; int32 batch_size = 3; int64 end_timestamp = 4; } -message RecordLog { - int64 log_id = 1; +// Represents an operation from the log +message LogRecord { + int64 log_offset = 1; OperationRecord record = 2; } message PullLogsResponse { - repeated RecordLog records = 1; + repeated LogRecord records = 1; } message CollectionInfo { string collection_id = 1; - // The first log id of the collection that needs to be compacted - int64 first_log_id = 2; - int64 first_log_id_ts = 3; + // The log offset of the first log entry of the collection that needs to be compacted + int64 first_log_offset = 2; + // The timestamp of the first log entry of the collection that needs to be compacted + int64 first_log_ts = 3; } message GetAllCollectionInfoToCompactRequest { diff --git a/rust/worker/src/compactor/scheduler.rs b/rust/worker/src/compactor/scheduler.rs index 244146b3171..a7bd7f2194f 100644 --- a/rust/worker/src/compactor/scheduler.rs +++ b/rust/worker/src/compactor/scheduler.rs @@ -87,8 +87,8 @@ impl Scheduler { tenant_id: collection[0].tenant.clone(), // TODO: get the last compaction time from the sysdb last_compaction_time: 0, - first_record_time: collection_info.first_log_id_ts, - offset: collection_info.first_log_id, + first_record_time: collection_info.first_log_ts, + offset: collection_info.first_log_offset, }); } Err(e) => { @@ -272,8 +272,8 @@ mod tests { collection_id_1.clone(), Box::new(LogRecord { collection_id: collection_id_1.clone(), - log_id: 1, - log_id_ts: 1, + log_offset: 1, + log_ts: 1, record: EmbeddingRecord { id: "embedding_id_1".to_string(), seq_id: BigInt::from(1), @@ -292,8 +292,8 @@ mod tests { collection_id_2.clone(), Box::new(LogRecord { collection_id: collection_id_2.clone(), - log_id: 2, - log_id_ts: 2, + log_offset: 2, + log_ts: 2, record: EmbeddingRecord { id: "embedding_id_2".to_string(), seq_id: BigInt::from(2), diff --git a/rust/worker/src/execution/operators/pull_log.rs b/rust/worker/src/execution/operators/pull_log.rs index 8379667f2f2..90d4d64f151 100644 --- a/rust/worker/src/execution/operators/pull_log.rs +++ b/rust/worker/src/execution/operators/pull_log.rs @@ -160,8 +160,8 @@ mod tests { collection_id_1.clone(), Box::new(LogRecord { collection_id: collection_id_1.clone(), - log_id: 1, - log_id_ts: 1, + log_offset: 1, + log_ts: 1, record: EmbeddingRecord { id: "embedding_id_1".to_string(), seq_id: BigInt::from(1), @@ -177,8 +177,8 @@ mod tests { collection_id_1.clone(), Box::new(LogRecord { collection_id: collection_id_1.clone(), - log_id: 2, - log_id_ts: 2, + log_offset: 2, + log_ts: 2, record: EmbeddingRecord { id: "embedding_id_2".to_string(), seq_id: BigInt::from(2), diff --git a/rust/worker/src/log/log.rs b/rust/worker/src/log/log.rs index 466c02cff86..8aad93264c4 100644 --- a/rust/worker/src/log/log.rs +++ b/rust/worker/src/log/log.rs @@ -13,13 +13,16 @@ use std::fmt::Debug; use thiserror::Error; use uuid::Uuid; -// CollectionInfo is a struct that contains information about a collection for the -// compacting process. It contains information about the collection id, the first log id, -// and the first log id timestamp since last compaction. +/// CollectionInfo is a struct that contains information about a collection for the +/// compacting process. +/// Fields: +/// - collection_id: the id of the collection that needs to be compacted +/// - first_log_offset: the offset of the first log entry in the collection that needs to be compacted +/// - first_log_ts: the timestamp of the first log entry in the collection that needs to be compacted pub(crate) struct CollectionInfo { pub(crate) collection_id: String, - pub(crate) first_log_id: i64, - pub(crate) first_log_id_ts: i64, + pub(crate) first_log_offset: i64, + pub(crate) first_log_ts: i64, } #[derive(Clone, Debug)] @@ -129,7 +132,7 @@ impl Log for GrpcLog { }; let request = self.client.pull_logs(chroma_proto::PullLogsRequest { collection_id: collection_id.to_string(), - start_from_id: offset, + start_from_offset: offset, batch_size, end_timestamp, }); @@ -174,8 +177,8 @@ impl Log for GrpcLog { for collection in collections { result.push(CollectionInfo { collection_id: collection.collection_id, - first_log_id: collection.first_log_id, - first_log_id_ts: collection.first_log_id_ts, + first_log_offset: collection.first_log_offset, + first_log_ts: collection.first_log_ts, }); } Ok(result) @@ -226,8 +229,8 @@ impl ChromaError for GetCollectionsWithNewDataError { #[derive(Clone)] pub(crate) struct LogRecord { pub(crate) collection_id: String, - pub(crate) log_id: i64, - pub(crate) log_id_ts: i64, + pub(crate) log_offset: i64, + pub(crate) log_ts: i64, pub(crate) record: EmbeddingRecord, } @@ -235,8 +238,8 @@ impl Debug for LogRecord { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("LogRecord") .field("collection_id", &self.collection_id) - .field("log_id", &self.log_id) - .field("log_id_ts", &self.log_id_ts) + .field("log_offset", &self.log_offset) + .field("log_ts", &self.log_ts) .field("record", &self.record) .finish() } @@ -281,7 +284,7 @@ impl Log for InMemoryLog { }; let mut result = Vec::new(); for i in offset..(offset + batch_size as i64) { - if i < logs.len() as i64 && logs[i as usize].log_id_ts <= end_timestamp { + if i < logs.len() as i64 && logs[i as usize].log_ts <= end_timestamp { result.push(logs[i as usize].record.clone()); } } @@ -296,13 +299,13 @@ impl Log for InMemoryLog { if log_record.is_empty() { continue; } - // sort the logs by log_id + // sort the logs by log_offset let mut logs = log_record.clone(); - logs.sort_by(|a, b| a.log_id.cmp(&b.log_id)); + logs.sort_by(|a, b| a.log_offset.cmp(&b.log_offset)); collections.push(CollectionInfo { collection_id: collection_id.clone(), - first_log_id: logs[0].log_id, - first_log_id_ts: logs[0].log_id_ts, + first_log_offset: logs[0].log_offset, + first_log_ts: logs[0].log_ts, }); } Ok(collections) diff --git a/rust/worker/src/types/embedding_record.rs b/rust/worker/src/types/embedding_record.rs index 396be6c22c5..36259508e59 100644 --- a/rust/worker/src/types/embedding_record.rs +++ b/rust/worker/src/types/embedding_record.rs @@ -6,9 +6,8 @@ use crate::{ chroma_proto, errors::{ChromaError, ErrorCodes}, }; - +use chroma_proto::LogRecord; use chroma_proto::OperationRecord; -use chroma_proto::RecordLog; use num_bigint::BigInt; use thiserror::Error; use uuid::Uuid; @@ -98,7 +97,7 @@ impl TryFrom for EmbeddingRecord { } } -type RecordLogWithCollectionId = (RecordLog, Uuid); +type RecordLogWithCollectionId = (LogRecord, Uuid); impl TryFrom for EmbeddingRecord { type Error = EmbeddingRecordConversionError; @@ -112,7 +111,7 @@ impl TryFrom for EmbeddingRecord { ConversionError::DecodeError, ))?; - let seq_id = BigInt::from(record_log.log_id); + let seq_id = BigInt::from(record_log.log_offset); let op = match proto_submit.operation.try_into() { Ok(op) => op, Err(e) => return Err(EmbeddingRecordConversionError::OperationConversionError(e)), @@ -359,8 +358,8 @@ mod tests { metadata: Some(metadata), operation: chroma_proto::Operation::Add as i32, }; - let record_log = chroma_proto::RecordLog { - log_id: 42, + let record_log = chroma_proto::LogRecord { + log_offset: 42, record: Some(proto_submit), }; let converted_embedding_record =