From d44bd03ed23e1e8069327a248363ec3c88342ad6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20Egelund-M=C3=BCller?= Date: Tue, 12 Sep 2023 10:23:26 +0200 Subject: [PATCH] Simplify connector source and sink props (#3057) * Simplify connector source and sink props * Fix test * Add extract policy * Review comment * Self review --- proto/gen/rill/runtime/v1/catalog.pb.go | 487 ++++++------------ .../rill/runtime/v1/catalog.pb.validate.go | 139 ----- .../gen/rill/runtime/v1/runtime.swagger.yaml | 48 +- proto/rill/runtime/v1/catalog.proto | 19 - runtime/compilers/rillv1/connectors.go | 29 +- runtime/compilers/rillv1beta/connector.go | 32 +- runtime/connection_cache_test.go | 2 +- runtime/drivers/bigquery/bigquery.go | 6 +- runtime/drivers/bigquery/sql_store.go | 8 +- runtime/drivers/blob/blobdownloader.go | 3 +- runtime/drivers/blob/blobdownloader_test.go | 5 +- runtime/drivers/blob/extract_policy.go | 119 +++++ runtime/drivers/blob/extract_policy_test.go | 80 +++ runtime/drivers/blob/parquetreader.go | 11 +- runtime/drivers/blob/parquetreader_test.go | 5 +- runtime/drivers/blob/planner.go | 21 +- runtime/drivers/blob/rowplanner.go | 7 +- runtime/drivers/blob/textreader.go | 7 +- runtime/drivers/blob/textreader_test.go | 7 +- runtime/drivers/drivers.go | 2 +- runtime/drivers/druid/druid.go | 2 +- runtime/drivers/duckdb/duckdb.go | 2 +- .../duckdb/transporter/duckDB_to_duckDB.go | 17 +- .../duckdb/transporter/filestore_to_duckDB.go | 20 +- .../transporter/motherduck_to_duckDB.go | 29 +- .../transporter/objectStore_to_duckDB.go | 34 +- .../duckdb/transporter/sqlstore_to_duckDB.go | 20 +- .../duckdb/transporter/transporter_test.go | 48 +- runtime/drivers/duckdb/transporter/utils.go | 30 ++ runtime/drivers/file/file.go | 2 +- runtime/drivers/file/file_store.go | 5 +- runtime/drivers/file_store.go | 2 +- runtime/drivers/gcs/gcs.go | 38 +- runtime/drivers/github/github.go | 2 +- runtime/drivers/https/https.go | 9 +- runtime/drivers/object_store.go | 2 +- runtime/drivers/postgres/postgres.go | 4 +- runtime/drivers/s3/s3.go | 49 +- runtime/drivers/sql_store.go | 4 +- runtime/drivers/sqlite/sqlite.go | 4 +- runtime/drivers/transporter.go | 111 +--- runtime/reconcilers/source.go | 67 +-- .../catalog/artifacts/yaml/objects.go | 128 +---- .../catalog/artifacts/yaml/objects_test.go | 82 --- .../catalog/migrator/sources/sources.go | 85 +-- .../proto/gen/rill/runtime/v1/catalog_pb.ts | 102 +--- .../src/proto/gen/rill/ui/v1/dashboard_pb.ts | 2 +- .../src/runtime-client/gen/index.schemas.ts | 42 +- 48 files changed, 601 insertions(+), 1378 deletions(-) create mode 100644 runtime/drivers/blob/extract_policy.go create mode 100644 runtime/drivers/blob/extract_policy_test.go delete mode 100644 runtime/services/catalog/artifacts/yaml/objects_test.go diff --git a/proto/gen/rill/runtime/v1/catalog.pb.go b/proto/gen/rill/runtime/v1/catalog.pb.go index 64f2e7ee96a..b63009d868f 100644 --- a/proto/gen/rill/runtime/v1/catalog.pb.go +++ b/proto/gen/rill/runtime/v1/catalog.pb.go @@ -77,55 +77,6 @@ func (ObjectType) EnumDescriptor() ([]byte, []int) { return file_rill_runtime_v1_catalog_proto_rawDescGZIP(), []int{0} } -type Source_ExtractPolicy_Strategy int32 - -const ( - Source_ExtractPolicy_STRATEGY_UNSPECIFIED Source_ExtractPolicy_Strategy = 0 - Source_ExtractPolicy_STRATEGY_HEAD Source_ExtractPolicy_Strategy = 1 - Source_ExtractPolicy_STRATEGY_TAIL Source_ExtractPolicy_Strategy = 2 -) - -// Enum value maps for Source_ExtractPolicy_Strategy. -var ( - Source_ExtractPolicy_Strategy_name = map[int32]string{ - 0: "STRATEGY_UNSPECIFIED", - 1: "STRATEGY_HEAD", - 2: "STRATEGY_TAIL", - } - Source_ExtractPolicy_Strategy_value = map[string]int32{ - "STRATEGY_UNSPECIFIED": 0, - "STRATEGY_HEAD": 1, - "STRATEGY_TAIL": 2, - } -) - -func (x Source_ExtractPolicy_Strategy) Enum() *Source_ExtractPolicy_Strategy { - p := new(Source_ExtractPolicy_Strategy) - *p = x - return p -} - -func (x Source_ExtractPolicy_Strategy) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (Source_ExtractPolicy_Strategy) Descriptor() protoreflect.EnumDescriptor { - return file_rill_runtime_v1_catalog_proto_enumTypes[1].Descriptor() -} - -func (Source_ExtractPolicy_Strategy) Type() protoreflect.EnumType { - return &file_rill_runtime_v1_catalog_proto_enumTypes[1] -} - -func (x Source_ExtractPolicy_Strategy) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use Source_ExtractPolicy_Strategy.Descriptor instead. -func (Source_ExtractPolicy_Strategy) EnumDescriptor() ([]byte, []int) { - return file_rill_runtime_v1_catalog_proto_rawDescGZIP(), []int{1, 0, 0} -} - // Dialects supported for models type Model_Dialect int32 @@ -157,11 +108,11 @@ func (x Model_Dialect) String() string { } func (Model_Dialect) Descriptor() protoreflect.EnumDescriptor { - return file_rill_runtime_v1_catalog_proto_enumTypes[2].Descriptor() + return file_rill_runtime_v1_catalog_proto_enumTypes[1].Descriptor() } func (Model_Dialect) Type() protoreflect.EnumType { - return &file_rill_runtime_v1_catalog_proto_enumTypes[2] + return &file_rill_runtime_v1_catalog_proto_enumTypes[1] } func (x Model_Dialect) Number() protoreflect.EnumNumber { @@ -257,8 +208,6 @@ type Source struct { Properties *structpb.Struct `protobuf:"bytes,3,opt,name=properties,proto3" json:"properties,omitempty"` // Detected schema of the source Schema *StructType `protobuf:"bytes,5,opt,name=schema,proto3" json:"schema,omitempty"` - // extraction policy for the source - Policy *Source_ExtractPolicy `protobuf:"bytes,6,opt,name=policy,proto3" json:"policy,omitempty"` // timeout for source ingestion in seconds TimeoutSeconds int32 `protobuf:"varint,7,opt,name=timeout_seconds,json=timeoutSeconds,proto3" json:"timeout_seconds,omitempty"` } @@ -323,13 +272,6 @@ func (x *Source) GetSchema() *StructType { return nil } -func (x *Source) GetPolicy() *Source_ExtractPolicy { - if x != nil { - return x.Policy - } - return nil -} - func (x *Source) GetTimeoutSeconds() int32 { if x != nil { return x.TimeoutSeconds @@ -561,83 +503,6 @@ func (x *MetricsView) GetSecurity() *MetricsView_Security { return nil } -// Extract policy for glob connectors -type Source_ExtractPolicy struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // strategy for selecting rows in a file - RowsStrategy Source_ExtractPolicy_Strategy `protobuf:"varint,1,opt,name=rows_strategy,json=rowsStrategy,proto3,enum=rill.runtime.v1.Source_ExtractPolicy_Strategy" json:"rows_strategy,omitempty"` - // could in future add: uint64 rows_limit = n; - // limit on data fetched in bytes - RowsLimitBytes uint64 `protobuf:"varint,2,opt,name=rows_limit_bytes,json=rowsLimitBytes,proto3" json:"rows_limit_bytes,omitempty"` - // strategy for selecting files - FilesStrategy Source_ExtractPolicy_Strategy `protobuf:"varint,3,opt,name=files_strategy,json=filesStrategy,proto3,enum=rill.runtime.v1.Source_ExtractPolicy_Strategy" json:"files_strategy,omitempty"` - // limit on number of files - FilesLimit uint64 `protobuf:"varint,4,opt,name=files_limit,json=filesLimit,proto3" json:"files_limit,omitempty"` -} - -func (x *Source_ExtractPolicy) Reset() { - *x = Source_ExtractPolicy{} - if protoimpl.UnsafeEnabled { - mi := &file_rill_runtime_v1_catalog_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Source_ExtractPolicy) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Source_ExtractPolicy) ProtoMessage() {} - -func (x *Source_ExtractPolicy) ProtoReflect() protoreflect.Message { - mi := &file_rill_runtime_v1_catalog_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Source_ExtractPolicy.ProtoReflect.Descriptor instead. -func (*Source_ExtractPolicy) Descriptor() ([]byte, []int) { - return file_rill_runtime_v1_catalog_proto_rawDescGZIP(), []int{1, 0} -} - -func (x *Source_ExtractPolicy) GetRowsStrategy() Source_ExtractPolicy_Strategy { - if x != nil { - return x.RowsStrategy - } - return Source_ExtractPolicy_STRATEGY_UNSPECIFIED -} - -func (x *Source_ExtractPolicy) GetRowsLimitBytes() uint64 { - if x != nil { - return x.RowsLimitBytes - } - return 0 -} - -func (x *Source_ExtractPolicy) GetFilesStrategy() Source_ExtractPolicy_Strategy { - if x != nil { - return x.FilesStrategy - } - return Source_ExtractPolicy_STRATEGY_UNSPECIFIED -} - -func (x *Source_ExtractPolicy) GetFilesLimit() uint64 { - if x != nil { - return x.FilesLimit - } - return 0 -} - // Dimensions are columns to filter and group by type MetricsView_Dimension struct { state protoimpl.MessageState @@ -653,7 +518,7 @@ type MetricsView_Dimension struct { func (x *MetricsView_Dimension) Reset() { *x = MetricsView_Dimension{} if protoimpl.UnsafeEnabled { - mi := &file_rill_runtime_v1_catalog_proto_msgTypes[5] + mi := &file_rill_runtime_v1_catalog_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -666,7 +531,7 @@ func (x *MetricsView_Dimension) String() string { func (*MetricsView_Dimension) ProtoMessage() {} func (x *MetricsView_Dimension) ProtoReflect() protoreflect.Message { - mi := &file_rill_runtime_v1_catalog_proto_msgTypes[5] + mi := &file_rill_runtime_v1_catalog_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -727,7 +592,7 @@ type MetricsView_Measure struct { func (x *MetricsView_Measure) Reset() { *x = MetricsView_Measure{} if protoimpl.UnsafeEnabled { - mi := &file_rill_runtime_v1_catalog_proto_msgTypes[6] + mi := &file_rill_runtime_v1_catalog_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -740,7 +605,7 @@ func (x *MetricsView_Measure) String() string { func (*MetricsView_Measure) ProtoMessage() {} func (x *MetricsView_Measure) ProtoReflect() protoreflect.Message { - mi := &file_rill_runtime_v1_catalog_proto_msgTypes[6] + mi := &file_rill_runtime_v1_catalog_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -816,7 +681,7 @@ type MetricsView_Security struct { func (x *MetricsView_Security) Reset() { *x = MetricsView_Security{} if protoimpl.UnsafeEnabled { - mi := &file_rill_runtime_v1_catalog_proto_msgTypes[7] + mi := &file_rill_runtime_v1_catalog_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -829,7 +694,7 @@ func (x *MetricsView_Security) String() string { func (*MetricsView_Security) ProtoMessage() {} func (x *MetricsView_Security) ProtoReflect() protoreflect.Message { - mi := &file_rill_runtime_v1_catalog_proto_msgTypes[7] + mi := &file_rill_runtime_v1_catalog_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -886,7 +751,7 @@ type MetricsView_Security_FieldCondition struct { func (x *MetricsView_Security_FieldCondition) Reset() { *x = MetricsView_Security_FieldCondition{} if protoimpl.UnsafeEnabled { - mi := &file_rill_runtime_v1_catalog_proto_msgTypes[8] + mi := &file_rill_runtime_v1_catalog_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -899,7 +764,7 @@ func (x *MetricsView_Security_FieldCondition) String() string { func (*MetricsView_Security_FieldCondition) ProtoMessage() {} func (x *MetricsView_Security_FieldCondition) ProtoReflect() protoreflect.Message { - mi := &file_rill_runtime_v1_catalog_proto_msgTypes[8] + mi := &file_rill_runtime_v1_catalog_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -947,7 +812,7 @@ var file_rill_runtime_v1_catalog_proto_rawDesc = []byte{ 0x6c, 0x6c, 0x2e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x08, 0x52, 0x07, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x64, 0x22, 0xe5, 0x04, 0x0a, 0x06, 0x53, + 0x08, 0x52, 0x07, 0x6d, 0x61, 0x6e, 0x61, 0x67, 0x65, 0x64, 0x22, 0xd1, 0x01, 0x0a, 0x06, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x6f, @@ -958,142 +823,117 @@ var file_rill_runtime_v1_catalog_proto_rawDesc = []byte{ 0x12, 0x33, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x06, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x3d, 0x0a, 0x06, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x18, - 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, 0x72, 0x75, 0x6e, - 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x45, - 0x78, 0x74, 0x72, 0x61, 0x63, 0x74, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x52, 0x06, 0x70, 0x6f, - 0x6c, 0x69, 0x63, 0x79, 0x12, 0x27, 0x0a, 0x0f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, - 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0e, 0x74, - 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x1a, 0xd2, 0x02, - 0x0a, 0x0d, 0x45, 0x78, 0x74, 0x72, 0x61, 0x63, 0x74, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x12, - 0x53, 0x0a, 0x0d, 0x72, 0x6f, 0x77, 0x73, 0x5f, 0x73, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2e, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, 0x72, 0x75, - 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, - 0x45, 0x78, 0x74, 0x72, 0x61, 0x63, 0x74, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, 0x53, 0x74, - 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x52, 0x0c, 0x72, 0x6f, 0x77, 0x73, 0x53, 0x74, 0x72, 0x61, - 0x74, 0x65, 0x67, 0x79, 0x12, 0x28, 0x0a, 0x10, 0x72, 0x6f, 0x77, 0x73, 0x5f, 0x6c, 0x69, 0x6d, - 0x69, 0x74, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0e, - 0x72, 0x6f, 0x77, 0x73, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x55, - 0x0a, 0x0e, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x73, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2e, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, 0x72, 0x75, - 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, - 0x45, 0x78, 0x74, 0x72, 0x61, 0x63, 0x74, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x2e, 0x53, 0x74, - 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x52, 0x0d, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x53, 0x74, 0x72, - 0x61, 0x74, 0x65, 0x67, 0x79, 0x12, 0x1f, 0x0a, 0x0b, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x6c, - 0x69, 0x6d, 0x69, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0a, 0x66, 0x69, 0x6c, 0x65, - 0x73, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0x4a, 0x0a, 0x08, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, - 0x67, 0x79, 0x12, 0x18, 0x0a, 0x14, 0x53, 0x54, 0x52, 0x41, 0x54, 0x45, 0x47, 0x59, 0x5f, 0x55, - 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x11, 0x0a, 0x0d, - 0x53, 0x54, 0x52, 0x41, 0x54, 0x45, 0x47, 0x59, 0x5f, 0x48, 0x45, 0x41, 0x44, 0x10, 0x01, 0x12, - 0x11, 0x0a, 0x0d, 0x53, 0x54, 0x52, 0x41, 0x54, 0x45, 0x47, 0x59, 0x5f, 0x54, 0x41, 0x49, 0x4c, - 0x10, 0x02, 0x22, 0xf6, 0x01, 0x0a, 0x05, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x12, 0x12, 0x0a, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x12, 0x10, 0x0a, 0x03, 0x73, 0x71, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x73, - 0x71, 0x6c, 0x12, 0x38, 0x0a, 0x07, 0x64, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, 0x72, 0x75, 0x6e, 0x74, 0x69, - 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x2e, 0x44, 0x69, 0x61, 0x6c, - 0x65, 0x63, 0x74, 0x52, 0x07, 0x64, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x12, 0x33, 0x0a, 0x06, - 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x72, - 0x69, 0x6c, 0x6c, 0x2e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, - 0x74, 0x72, 0x75, 0x63, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x12, 0x20, 0x0a, 0x0b, 0x6d, 0x61, 0x74, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x6d, 0x61, 0x74, 0x65, 0x72, 0x69, 0x61, 0x6c, - 0x69, 0x7a, 0x65, 0x22, 0x36, 0x0a, 0x07, 0x44, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x12, 0x17, - 0x0a, 0x13, 0x44, 0x49, 0x41, 0x4c, 0x45, 0x43, 0x54, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, - 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x12, 0x0a, 0x0e, 0x44, 0x49, 0x41, 0x4c, 0x45, - 0x43, 0x54, 0x5f, 0x44, 0x55, 0x43, 0x4b, 0x44, 0x42, 0x10, 0x01, 0x22, 0xef, 0x08, 0x0a, 0x0b, - 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x56, 0x69, 0x65, 0x77, 0x12, 0x12, 0x0a, 0x04, 0x6e, - 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, - 0x14, 0x0a, 0x05, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x12, 0x25, 0x0a, 0x0e, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x64, 0x69, - 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x74, - 0x69, 0x6d, 0x65, 0x44, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x46, 0x0a, 0x0a, - 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x26, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, - 0x76, 0x31, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x56, 0x69, 0x65, 0x77, 0x2e, 0x44, - 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0a, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, - 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x40, 0x0a, 0x08, 0x6d, 0x65, 0x61, 0x73, 0x75, 0x72, 0x65, 0x73, - 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, 0x72, 0x75, - 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, - 0x56, 0x69, 0x65, 0x77, 0x2e, 0x4d, 0x65, 0x61, 0x73, 0x75, 0x72, 0x65, 0x52, 0x08, 0x6d, 0x65, - 0x61, 0x73, 0x75, 0x72, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, - 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x12, 0x20, 0x0a, 0x0b, - 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x4a, - 0x0a, 0x13, 0x73, 0x6d, 0x61, 0x6c, 0x6c, 0x65, 0x73, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f, - 0x67, 0x72, 0x61, 0x69, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1a, 0x2e, 0x72, 0x69, - 0x6c, 0x6c, 0x2e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x69, - 0x6d, 0x65, 0x47, 0x72, 0x61, 0x69, 0x6e, 0x52, 0x11, 0x73, 0x6d, 0x61, 0x6c, 0x6c, 0x65, 0x73, - 0x74, 0x54, 0x69, 0x6d, 0x65, 0x47, 0x72, 0x61, 0x69, 0x6e, 0x12, 0x2c, 0x0a, 0x12, 0x64, 0x65, - 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x72, 0x61, 0x6e, 0x67, 0x65, - 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x54, - 0x69, 0x6d, 0x65, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x30, 0x0a, 0x14, 0x61, 0x76, 0x61, 0x69, - 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x7a, 0x6f, 0x6e, 0x65, 0x73, - 0x18, 0x0b, 0x20, 0x03, 0x28, 0x09, 0x52, 0x12, 0x61, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, - 0x65, 0x54, 0x69, 0x6d, 0x65, 0x5a, 0x6f, 0x6e, 0x65, 0x73, 0x12, 0x41, 0x0a, 0x08, 0x73, 0x65, - 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x72, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x27, 0x0a, 0x0f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, + 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0e, + 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x22, 0xf6, + 0x01, 0x0a, 0x05, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, + 0x73, 0x71, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x73, 0x71, 0x6c, 0x12, 0x38, + 0x0a, 0x07, 0x64, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x1e, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x76, + 0x31, 0x2e, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x2e, 0x44, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x52, + 0x07, 0x64, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x12, 0x33, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, + 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, + 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x20, 0x0a, + 0x0b, 0x6d, 0x61, 0x74, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x0b, 0x6d, 0x61, 0x74, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x22, + 0x36, 0x0a, 0x07, 0x44, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x12, 0x17, 0x0a, 0x13, 0x44, 0x49, + 0x41, 0x4c, 0x45, 0x43, 0x54, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, + 0x44, 0x10, 0x00, 0x12, 0x12, 0x0a, 0x0e, 0x44, 0x49, 0x41, 0x4c, 0x45, 0x43, 0x54, 0x5f, 0x44, + 0x55, 0x43, 0x4b, 0x44, 0x42, 0x10, 0x01, 0x22, 0xef, 0x08, 0x0a, 0x0b, 0x4d, 0x65, 0x74, 0x72, + 0x69, 0x63, 0x73, 0x56, 0x69, 0x65, 0x77, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6d, + 0x6f, 0x64, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6d, 0x6f, 0x64, 0x65, + 0x6c, 0x12, 0x25, 0x0a, 0x0e, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, + 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x74, 0x69, 0x6d, 0x65, 0x44, + 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x46, 0x0a, 0x0a, 0x64, 0x69, 0x6d, 0x65, + 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, - 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x56, 0x69, 0x65, 0x77, 0x2e, 0x53, 0x65, 0x63, 0x75, 0x72, - 0x69, 0x74, 0x79, 0x52, 0x08, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x1a, 0x6f, 0x0a, - 0x09, 0x44, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, - 0x0a, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, - 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x1a, 0xc2, - 0x01, 0x0a, 0x07, 0x4d, 0x65, 0x61, 0x73, 0x75, 0x72, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, - 0x0a, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6c, - 0x61, 0x62, 0x65, 0x6c, 0x12, 0x1e, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, - 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, - 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x33, - 0x0a, 0x16, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x5f, 0x70, 0x65, 0x72, 0x63, 0x65, 0x6e, 0x74, 0x5f, - 0x6f, 0x66, 0x5f, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x13, - 0x76, 0x61, 0x6c, 0x69, 0x64, 0x50, 0x65, 0x72, 0x63, 0x65, 0x6e, 0x74, 0x4f, 0x66, 0x54, 0x6f, - 0x74, 0x61, 0x6c, 0x1a, 0xa7, 0x02, 0x0a, 0x08, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, - 0x12, 0x16, 0x0a, 0x06, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x06, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x72, 0x6f, 0x77, 0x5f, - 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x72, 0x6f, - 0x77, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x4e, 0x0a, 0x07, 0x69, 0x6e, 0x63, 0x6c, 0x75, - 0x64, 0x65, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, + 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x56, 0x69, 0x65, 0x77, 0x2e, 0x44, 0x69, 0x6d, 0x65, 0x6e, + 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x0a, 0x64, 0x69, 0x6d, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, + 0x12, 0x40, 0x0a, 0x08, 0x6d, 0x65, 0x61, 0x73, 0x75, 0x72, 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, + 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x56, 0x69, 0x65, 0x77, + 0x2e, 0x4d, 0x65, 0x61, 0x73, 0x75, 0x72, 0x65, 0x52, 0x08, 0x6d, 0x65, 0x61, 0x73, 0x75, 0x72, + 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x07, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, + 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x4a, 0x0a, 0x13, 0x73, 0x6d, + 0x61, 0x6c, 0x6c, 0x65, 0x73, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x67, 0x72, 0x61, 0x69, + 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1a, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, 0x72, + 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x47, 0x72, + 0x61, 0x69, 0x6e, 0x52, 0x11, 0x73, 0x6d, 0x61, 0x6c, 0x6c, 0x65, 0x73, 0x74, 0x54, 0x69, 0x6d, + 0x65, 0x47, 0x72, 0x61, 0x69, 0x6e, 0x12, 0x2c, 0x0a, 0x12, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, + 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x0a, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x10, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x52, + 0x61, 0x6e, 0x67, 0x65, 0x12, 0x30, 0x0a, 0x14, 0x61, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, + 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x7a, 0x6f, 0x6e, 0x65, 0x73, 0x18, 0x0b, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x12, 0x61, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x54, 0x69, 0x6d, + 0x65, 0x5a, 0x6f, 0x6e, 0x65, 0x73, 0x12, 0x41, 0x0a, 0x08, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, + 0x74, 0x79, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, - 0x63, 0x73, 0x56, 0x69, 0x65, 0x77, 0x2e, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x2e, - 0x46, 0x69, 0x65, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x07, - 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x12, 0x4e, 0x0a, 0x07, 0x65, 0x78, 0x63, 0x6c, 0x75, - 0x64, 0x65, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, - 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, - 0x63, 0x73, 0x56, 0x69, 0x65, 0x77, 0x2e, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x2e, - 0x46, 0x69, 0x65, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x07, - 0x65, 0x78, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x1a, 0x44, 0x0a, 0x0e, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x6f, 0x6e, - 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x6f, - 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x6e, 0x61, 0x6d, 0x65, 0x73, - 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x2a, 0x8d, 0x01, - 0x0a, 0x0a, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x17, - 0x4f, 0x42, 0x4a, 0x45, 0x43, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, - 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x15, 0x0a, 0x11, 0x4f, 0x42, 0x4a, - 0x45, 0x43, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x01, - 0x12, 0x16, 0x0a, 0x12, 0x4f, 0x42, 0x4a, 0x45, 0x43, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, - 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x10, 0x02, 0x12, 0x15, 0x0a, 0x11, 0x4f, 0x42, 0x4a, 0x45, - 0x43, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x4c, 0x10, 0x03, 0x12, - 0x1c, 0x0a, 0x18, 0x4f, 0x42, 0x4a, 0x45, 0x43, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4d, - 0x45, 0x54, 0x52, 0x49, 0x43, 0x53, 0x5f, 0x56, 0x49, 0x45, 0x57, 0x10, 0x04, 0x42, 0xb5, 0x01, - 0x0a, 0x13, 0x63, 0x6f, 0x6d, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, 0x72, 0x75, 0x6e, 0x74, 0x69, - 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x42, 0x0c, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x50, 0x72, - 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x32, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x72, 0x69, 0x6c, 0x6c, 0x64, 0x61, 0x74, 0x61, 0x2f, 0x72, 0x69, 0x6c, 0x6c, 0x2f, - 0x72, 0x69, 0x6c, 0x6c, 0x2f, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2f, 0x76, 0x31, 0x3b, - 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x76, 0x31, 0xa2, 0x02, 0x03, 0x52, 0x52, 0x58, 0xaa, - 0x02, 0x0f, 0x52, 0x69, 0x6c, 0x6c, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x56, - 0x31, 0xca, 0x02, 0x0f, 0x52, 0x69, 0x6c, 0x6c, 0x5c, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, - 0x5c, 0x56, 0x31, 0xe2, 0x02, 0x1b, 0x52, 0x69, 0x6c, 0x6c, 0x5c, 0x52, 0x75, 0x6e, 0x74, 0x69, - 0x6d, 0x65, 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0xea, 0x02, 0x11, 0x52, 0x69, 0x6c, 0x6c, 0x3a, 0x3a, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, - 0x65, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x63, 0x73, 0x56, 0x69, 0x65, 0x77, 0x2e, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x52, + 0x08, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x1a, 0x6f, 0x0a, 0x09, 0x44, 0x69, 0x6d, + 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x06, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x1a, 0xc2, 0x01, 0x0a, 0x07, 0x4d, + 0x65, 0x61, 0x73, 0x75, 0x72, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6c, 0x61, + 0x62, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, + 0x12, 0x1e, 0x0a, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x65, 0x78, 0x70, 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x06, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x33, 0x0a, 0x16, 0x76, 0x61, + 0x6c, 0x69, 0x64, 0x5f, 0x70, 0x65, 0x72, 0x63, 0x65, 0x6e, 0x74, 0x5f, 0x6f, 0x66, 0x5f, 0x74, + 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x13, 0x76, 0x61, 0x6c, 0x69, + 0x64, 0x50, 0x65, 0x72, 0x63, 0x65, 0x6e, 0x74, 0x4f, 0x66, 0x54, 0x6f, 0x74, 0x61, 0x6c, 0x1a, + 0xa7, 0x02, 0x0a, 0x08, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x12, 0x16, 0x0a, 0x06, + 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x63, + 0x63, 0x65, 0x73, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x72, 0x6f, 0x77, 0x5f, 0x66, 0x69, 0x6c, 0x74, + 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x72, 0x6f, 0x77, 0x46, 0x69, 0x6c, + 0x74, 0x65, 0x72, 0x12, 0x4e, 0x0a, 0x07, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x18, 0x03, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, 0x72, 0x75, 0x6e, 0x74, + 0x69, 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x56, 0x69, + 0x65, 0x77, 0x2e, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x2e, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x69, 0x6e, 0x63, 0x6c, + 0x75, 0x64, 0x65, 0x12, 0x4e, 0x0a, 0x07, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x18, 0x04, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, 0x72, 0x75, 0x6e, 0x74, + 0x69, 0x6d, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x56, 0x69, + 0x65, 0x77, 0x2e, 0x53, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x2e, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x65, 0x78, 0x63, 0x6c, + 0x75, 0x64, 0x65, 0x1a, 0x44, 0x0a, 0x0e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x43, 0x6f, 0x6e, 0x64, + 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, + 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, + 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x05, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x2a, 0x8d, 0x01, 0x0a, 0x0a, 0x4f, 0x62, + 0x6a, 0x65, 0x63, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x17, 0x4f, 0x42, 0x4a, 0x45, + 0x43, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, + 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x15, 0x0a, 0x11, 0x4f, 0x42, 0x4a, 0x45, 0x43, 0x54, 0x5f, + 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x01, 0x12, 0x16, 0x0a, 0x12, + 0x4f, 0x42, 0x4a, 0x45, 0x43, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x4f, 0x55, 0x52, + 0x43, 0x45, 0x10, 0x02, 0x12, 0x15, 0x0a, 0x11, 0x4f, 0x42, 0x4a, 0x45, 0x43, 0x54, 0x5f, 0x54, + 0x59, 0x50, 0x45, 0x5f, 0x4d, 0x4f, 0x44, 0x45, 0x4c, 0x10, 0x03, 0x12, 0x1c, 0x0a, 0x18, 0x4f, + 0x42, 0x4a, 0x45, 0x43, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4d, 0x45, 0x54, 0x52, 0x49, + 0x43, 0x53, 0x5f, 0x56, 0x49, 0x45, 0x57, 0x10, 0x04, 0x42, 0xb5, 0x01, 0x0a, 0x13, 0x63, 0x6f, + 0x6d, 0x2e, 0x72, 0x69, 0x6c, 0x6c, 0x2e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x76, + 0x31, 0x42, 0x0c, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, + 0x01, 0x5a, 0x32, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x72, 0x69, + 0x6c, 0x6c, 0x64, 0x61, 0x74, 0x61, 0x2f, 0x72, 0x69, 0x6c, 0x6c, 0x2f, 0x72, 0x69, 0x6c, 0x6c, + 0x2f, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2f, 0x76, 0x31, 0x3b, 0x72, 0x75, 0x6e, 0x74, + 0x69, 0x6d, 0x65, 0x76, 0x31, 0xa2, 0x02, 0x03, 0x52, 0x52, 0x58, 0xaa, 0x02, 0x0f, 0x52, 0x69, + 0x6c, 0x6c, 0x2e, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x0f, + 0x52, 0x69, 0x6c, 0x6c, 0x5c, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x5c, 0x56, 0x31, 0xe2, + 0x02, 0x1b, 0x52, 0x69, 0x6c, 0x6c, 0x5c, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x5c, 0x56, + 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x11, + 0x52, 0x69, 0x6c, 0x6c, 0x3a, 0x3a, 0x52, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x3a, 0x3a, 0x56, + 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1108,45 +948,40 @@ func file_rill_runtime_v1_catalog_proto_rawDescGZIP() []byte { return file_rill_runtime_v1_catalog_proto_rawDescData } -var file_rill_runtime_v1_catalog_proto_enumTypes = make([]protoimpl.EnumInfo, 3) -var file_rill_runtime_v1_catalog_proto_msgTypes = make([]protoimpl.MessageInfo, 9) +var file_rill_runtime_v1_catalog_proto_enumTypes = make([]protoimpl.EnumInfo, 2) +var file_rill_runtime_v1_catalog_proto_msgTypes = make([]protoimpl.MessageInfo, 8) var file_rill_runtime_v1_catalog_proto_goTypes = []interface{}{ (ObjectType)(0), // 0: rill.runtime.v1.ObjectType - (Source_ExtractPolicy_Strategy)(0), // 1: rill.runtime.v1.Source.ExtractPolicy.Strategy - (Model_Dialect)(0), // 2: rill.runtime.v1.Model.Dialect - (*Table)(nil), // 3: rill.runtime.v1.Table - (*Source)(nil), // 4: rill.runtime.v1.Source - (*Model)(nil), // 5: rill.runtime.v1.Model - (*MetricsView)(nil), // 6: rill.runtime.v1.MetricsView - (*Source_ExtractPolicy)(nil), // 7: rill.runtime.v1.Source.ExtractPolicy - (*MetricsView_Dimension)(nil), // 8: rill.runtime.v1.MetricsView.Dimension - (*MetricsView_Measure)(nil), // 9: rill.runtime.v1.MetricsView.Measure - (*MetricsView_Security)(nil), // 10: rill.runtime.v1.MetricsView.Security - (*MetricsView_Security_FieldCondition)(nil), // 11: rill.runtime.v1.MetricsView.Security.FieldCondition - (*StructType)(nil), // 12: rill.runtime.v1.StructType - (*structpb.Struct)(nil), // 13: google.protobuf.Struct - (TimeGrain)(0), // 14: rill.runtime.v1.TimeGrain + (Model_Dialect)(0), // 1: rill.runtime.v1.Model.Dialect + (*Table)(nil), // 2: rill.runtime.v1.Table + (*Source)(nil), // 3: rill.runtime.v1.Source + (*Model)(nil), // 4: rill.runtime.v1.Model + (*MetricsView)(nil), // 5: rill.runtime.v1.MetricsView + (*MetricsView_Dimension)(nil), // 6: rill.runtime.v1.MetricsView.Dimension + (*MetricsView_Measure)(nil), // 7: rill.runtime.v1.MetricsView.Measure + (*MetricsView_Security)(nil), // 8: rill.runtime.v1.MetricsView.Security + (*MetricsView_Security_FieldCondition)(nil), // 9: rill.runtime.v1.MetricsView.Security.FieldCondition + (*StructType)(nil), // 10: rill.runtime.v1.StructType + (*structpb.Struct)(nil), // 11: google.protobuf.Struct + (TimeGrain)(0), // 12: rill.runtime.v1.TimeGrain } var file_rill_runtime_v1_catalog_proto_depIdxs = []int32{ - 12, // 0: rill.runtime.v1.Table.schema:type_name -> rill.runtime.v1.StructType - 13, // 1: rill.runtime.v1.Source.properties:type_name -> google.protobuf.Struct - 12, // 2: rill.runtime.v1.Source.schema:type_name -> rill.runtime.v1.StructType - 7, // 3: rill.runtime.v1.Source.policy:type_name -> rill.runtime.v1.Source.ExtractPolicy - 2, // 4: rill.runtime.v1.Model.dialect:type_name -> rill.runtime.v1.Model.Dialect - 12, // 5: rill.runtime.v1.Model.schema:type_name -> rill.runtime.v1.StructType - 8, // 6: rill.runtime.v1.MetricsView.dimensions:type_name -> rill.runtime.v1.MetricsView.Dimension - 9, // 7: rill.runtime.v1.MetricsView.measures:type_name -> rill.runtime.v1.MetricsView.Measure - 14, // 8: rill.runtime.v1.MetricsView.smallest_time_grain:type_name -> rill.runtime.v1.TimeGrain - 10, // 9: rill.runtime.v1.MetricsView.security:type_name -> rill.runtime.v1.MetricsView.Security - 1, // 10: rill.runtime.v1.Source.ExtractPolicy.rows_strategy:type_name -> rill.runtime.v1.Source.ExtractPolicy.Strategy - 1, // 11: rill.runtime.v1.Source.ExtractPolicy.files_strategy:type_name -> rill.runtime.v1.Source.ExtractPolicy.Strategy - 11, // 12: rill.runtime.v1.MetricsView.Security.include:type_name -> rill.runtime.v1.MetricsView.Security.FieldCondition - 11, // 13: rill.runtime.v1.MetricsView.Security.exclude:type_name -> rill.runtime.v1.MetricsView.Security.FieldCondition - 14, // [14:14] is the sub-list for method output_type - 14, // [14:14] is the sub-list for method input_type - 14, // [14:14] is the sub-list for extension type_name - 14, // [14:14] is the sub-list for extension extendee - 0, // [0:14] is the sub-list for field type_name + 10, // 0: rill.runtime.v1.Table.schema:type_name -> rill.runtime.v1.StructType + 11, // 1: rill.runtime.v1.Source.properties:type_name -> google.protobuf.Struct + 10, // 2: rill.runtime.v1.Source.schema:type_name -> rill.runtime.v1.StructType + 1, // 3: rill.runtime.v1.Model.dialect:type_name -> rill.runtime.v1.Model.Dialect + 10, // 4: rill.runtime.v1.Model.schema:type_name -> rill.runtime.v1.StructType + 6, // 5: rill.runtime.v1.MetricsView.dimensions:type_name -> rill.runtime.v1.MetricsView.Dimension + 7, // 6: rill.runtime.v1.MetricsView.measures:type_name -> rill.runtime.v1.MetricsView.Measure + 12, // 7: rill.runtime.v1.MetricsView.smallest_time_grain:type_name -> rill.runtime.v1.TimeGrain + 8, // 8: rill.runtime.v1.MetricsView.security:type_name -> rill.runtime.v1.MetricsView.Security + 9, // 9: rill.runtime.v1.MetricsView.Security.include:type_name -> rill.runtime.v1.MetricsView.Security.FieldCondition + 9, // 10: rill.runtime.v1.MetricsView.Security.exclude:type_name -> rill.runtime.v1.MetricsView.Security.FieldCondition + 11, // [11:11] is the sub-list for method output_type + 11, // [11:11] is the sub-list for method input_type + 11, // [11:11] is the sub-list for extension type_name + 11, // [11:11] is the sub-list for extension extendee + 0, // [0:11] is the sub-list for field type_name } func init() { file_rill_runtime_v1_catalog_proto_init() } @@ -1206,18 +1041,6 @@ func file_rill_runtime_v1_catalog_proto_init() { } } file_rill_runtime_v1_catalog_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Source_ExtractPolicy); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_rill_runtime_v1_catalog_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MetricsView_Dimension); i { case 0: return &v.state @@ -1229,7 +1052,7 @@ func file_rill_runtime_v1_catalog_proto_init() { return nil } } - file_rill_runtime_v1_catalog_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + file_rill_runtime_v1_catalog_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MetricsView_Measure); i { case 0: return &v.state @@ -1241,7 +1064,7 @@ func file_rill_runtime_v1_catalog_proto_init() { return nil } } - file_rill_runtime_v1_catalog_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + file_rill_runtime_v1_catalog_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MetricsView_Security); i { case 0: return &v.state @@ -1253,7 +1076,7 @@ func file_rill_runtime_v1_catalog_proto_init() { return nil } } - file_rill_runtime_v1_catalog_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + file_rill_runtime_v1_catalog_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MetricsView_Security_FieldCondition); i { case 0: return &v.state @@ -1271,8 +1094,8 @@ func file_rill_runtime_v1_catalog_proto_init() { File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_rill_runtime_v1_catalog_proto_rawDesc, - NumEnums: 3, - NumMessages: 9, + NumEnums: 2, + NumMessages: 8, NumExtensions: 0, NumServices: 0, }, diff --git a/proto/gen/rill/runtime/v1/catalog.pb.validate.go b/proto/gen/rill/runtime/v1/catalog.pb.validate.go index fb0b9b0d9cd..88ede591f61 100644 --- a/proto/gen/rill/runtime/v1/catalog.pb.validate.go +++ b/proto/gen/rill/runtime/v1/catalog.pb.validate.go @@ -249,35 +249,6 @@ func (m *Source) validate(all bool) error { } } - if all { - switch v := interface{}(m.GetPolicy()).(type) { - case interface{ ValidateAll() error }: - if err := v.ValidateAll(); err != nil { - errors = append(errors, SourceValidationError{ - field: "Policy", - reason: "embedded message failed validation", - cause: err, - }) - } - case interface{ Validate() error }: - if err := v.Validate(); err != nil { - errors = append(errors, SourceValidationError{ - field: "Policy", - reason: "embedded message failed validation", - cause: err, - }) - } - } - } else if v, ok := interface{}(m.GetPolicy()).(interface{ Validate() error }); ok { - if err := v.Validate(); err != nil { - return SourceValidationError{ - field: "Policy", - reason: "embedded message failed validation", - cause: err, - } - } - } - // no validation rules for TimeoutSeconds if len(errors) > 0 { @@ -702,116 +673,6 @@ var _ interface { ErrorName() string } = MetricsViewValidationError{} -// Validate checks the field values on Source_ExtractPolicy with the rules -// defined in the proto definition for this message. If any rules are -// violated, the first error encountered is returned, or nil if there are no violations. -func (m *Source_ExtractPolicy) Validate() error { - return m.validate(false) -} - -// ValidateAll checks the field values on Source_ExtractPolicy with the rules -// defined in the proto definition for this message. If any rules are -// violated, the result is a list of violation errors wrapped in -// Source_ExtractPolicyMultiError, or nil if none found. -func (m *Source_ExtractPolicy) ValidateAll() error { - return m.validate(true) -} - -func (m *Source_ExtractPolicy) validate(all bool) error { - if m == nil { - return nil - } - - var errors []error - - // no validation rules for RowsStrategy - - // no validation rules for RowsLimitBytes - - // no validation rules for FilesStrategy - - // no validation rules for FilesLimit - - if len(errors) > 0 { - return Source_ExtractPolicyMultiError(errors) - } - - return nil -} - -// Source_ExtractPolicyMultiError is an error wrapping multiple validation -// errors returned by Source_ExtractPolicy.ValidateAll() if the designated -// constraints aren't met. -type Source_ExtractPolicyMultiError []error - -// Error returns a concatenation of all the error messages it wraps. -func (m Source_ExtractPolicyMultiError) Error() string { - var msgs []string - for _, err := range m { - msgs = append(msgs, err.Error()) - } - return strings.Join(msgs, "; ") -} - -// AllErrors returns a list of validation violation errors. -func (m Source_ExtractPolicyMultiError) AllErrors() []error { return m } - -// Source_ExtractPolicyValidationError is the validation error returned by -// Source_ExtractPolicy.Validate if the designated constraints aren't met. -type Source_ExtractPolicyValidationError struct { - field string - reason string - cause error - key bool -} - -// Field function returns field value. -func (e Source_ExtractPolicyValidationError) Field() string { return e.field } - -// Reason function returns reason value. -func (e Source_ExtractPolicyValidationError) Reason() string { return e.reason } - -// Cause function returns cause value. -func (e Source_ExtractPolicyValidationError) Cause() error { return e.cause } - -// Key function returns key value. -func (e Source_ExtractPolicyValidationError) Key() bool { return e.key } - -// ErrorName returns error name. -func (e Source_ExtractPolicyValidationError) ErrorName() string { - return "Source_ExtractPolicyValidationError" -} - -// Error satisfies the builtin error interface -func (e Source_ExtractPolicyValidationError) Error() string { - cause := "" - if e.cause != nil { - cause = fmt.Sprintf(" | caused by: %v", e.cause) - } - - key := "" - if e.key { - key = "key for " - } - - return fmt.Sprintf( - "invalid %sSource_ExtractPolicy.%s: %s%s", - key, - e.field, - e.reason, - cause) -} - -var _ error = Source_ExtractPolicyValidationError{} - -var _ interface { - Field() string - Reason() string - Key() bool - Cause() error - ErrorName() string -} = Source_ExtractPolicyValidationError{} - // Validate checks the field values on MetricsView_Dimension with the rules // defined in the proto definition for this message. If any rules are // violated, the first error encountered is returned, or nil if there are no violations. diff --git a/proto/gen/rill/runtime/v1/runtime.swagger.yaml b/proto/gen/rill/runtime/v1/runtime.swagger.yaml index d80151d6ddc..e1b4e1baf19 100644 --- a/proto/gen/rill/runtime/v1/runtime.swagger.yaml +++ b/proto/gen/rill/runtime/v1/runtime.swagger.yaml @@ -2266,6 +2266,13 @@ paths: tags: - ConnectorService definitions: + BucketExtractPolicyStrategy: + type: string + enum: + - STRATEGY_UNSPECIFIED + - STRATEGY_HEAD + - STRATEGY_TAIL + default: STRATEGY_UNSPECIFIED ColumnTimeSeriesRequestBasicMeasure: type: object properties: @@ -2487,33 +2494,6 @@ definitions: items: type: string title: Dimension/measure level access condition - SourceExtractPolicy: - type: object - properties: - rowsStrategy: - $ref: '#/definitions/SourceExtractPolicyStrategy' - title: strategy for selecting rows in a file - rowsLimitBytes: - type: string - format: uint64 - title: |- - could in future add: uint64 rows_limit = n; - limit on data fetched in bytes - filesStrategy: - $ref: '#/definitions/SourceExtractPolicyStrategy' - title: strategy for selecting files - filesLimit: - type: string - format: uint64 - title: limit on number of files - title: Extract policy for glob connectors - SourceExtractPolicyStrategy: - type: string - enum: - - STRATEGY_UNSPECIFIED - - STRATEGY_HEAD - - STRATEGY_TAIL - default: STRATEGY_UNSPECIFIED StructTypeField: type: object properties: @@ -2618,22 +2598,15 @@ definitions: type: object properties: rowsStrategy: - $ref: '#/definitions/v1BucketExtractPolicyStrategy' + $ref: '#/definitions/BucketExtractPolicyStrategy' rowsLimitBytes: type: string format: uint64 filesStrategy: - $ref: '#/definitions/v1BucketExtractPolicyStrategy' + $ref: '#/definitions/BucketExtractPolicyStrategy' filesLimit: type: string format: uint64 - v1BucketExtractPolicyStrategy: - type: string - enum: - - STRATEGY_UNSPECIFIED - - STRATEGY_HEAD - - STRATEGY_TAIL - default: STRATEGY_UNSPECIFIED v1BucketPlanner: type: object properties: @@ -4526,9 +4499,6 @@ definitions: schema: $ref: '#/definitions/v1StructType' title: Detected schema of the source - policy: - $ref: '#/definitions/SourceExtractPolicy' - title: extraction policy for the source timeoutSeconds: type: integer format: int32 diff --git a/proto/rill/runtime/v1/catalog.proto b/proto/rill/runtime/v1/catalog.proto index edc71d64944..963240dd067 100644 --- a/proto/rill/runtime/v1/catalog.proto +++ b/proto/rill/runtime/v1/catalog.proto @@ -37,25 +37,6 @@ message Source { google.protobuf.Struct properties = 3; // Detected schema of the source StructType schema = 5; - // Extract policy for glob connectors - message ExtractPolicy { - enum Strategy { - STRATEGY_UNSPECIFIED = 0; - STRATEGY_HEAD = 1; - STRATEGY_TAIL = 2; - } - // strategy for selecting rows in a file - Strategy rows_strategy = 1; - // could in future add: uint64 rows_limit = n; - // limit on data fetched in bytes - uint64 rows_limit_bytes = 2; - // strategy for selecting files - Strategy files_strategy = 3; - // limit on number of files - uint64 files_limit = 4; - } - // extraction policy for the source - ExtractPolicy policy = 6; // timeout for source ingestion in seconds int32 timeout_seconds = 7; } diff --git a/runtime/compilers/rillv1/connectors.go b/runtime/compilers/rillv1/connectors.go index f72cbb514bd..9229022003b 100644 --- a/runtime/compilers/rillv1/connectors.go +++ b/runtime/compilers/rillv1/connectors.go @@ -4,7 +4,6 @@ import ( "context" "fmt" - runtimev1 "github.com/rilldata/rill/proto/gen/rill/runtime/v1" "github.com/rilldata/rill/runtime/drivers" "go.uber.org/zap" "golang.org/x/exp/slices" @@ -64,7 +63,7 @@ func (p *Parser) AnalyzeConnectors(ctx context.Context) ([]*Connector, error) { break } // Poll for anon access - res, _ := connector.HasAnonymousSourceAccess(ctx, driverSourceForAnonAccessCheck(driver, r.SourceSpec), zap.NewNop()) + res, _ := connector.HasAnonymousSourceAccess(ctx, r.SourceSpec.Properties.AsMap(), zap.NewNop()) if !res { anonAccess = false break @@ -106,29 +105,3 @@ func (p *Parser) connectorForName(name string) (string, drivers.Driver, error) { } return driver, connector, nil } - -func driverSourceForAnonAccessCheck(connector string, src *runtimev1.SourceSpec) drivers.Source { - props := src.Properties.AsMap() - switch connector { - case "s3": - return &drivers.BucketSource{ - Properties: props, - } - case "gcs": - return &drivers.BucketSource{ - Properties: props, - } - case "https": - return &drivers.FileSource{ - Properties: props, - } - case "local_file": - return &drivers.FileSource{ - Properties: props, - } - case "motherduck": - return &drivers.DatabaseSource{} - default: - return nil - } -} diff --git a/runtime/compilers/rillv1beta/connector.go b/runtime/compilers/rillv1beta/connector.go index eeec8f9608f..32b9d1b1dad 100644 --- a/runtime/compilers/rillv1beta/connector.go +++ b/runtime/compilers/rillv1beta/connector.go @@ -71,7 +71,7 @@ func ExtractConnectors(ctx context.Context, projectPath string) ([]*Connector, e } // ignoring error since failure to resolve this should not break the deployment flow // this can fail under cases such as full or host/bucket of URI is a variable - access, _ := connector.HasAnonymousSourceAccess(ctx, source(src.Connector, src), zap.NewNop()) + access, _ := connector.HasAnonymousSourceAccess(ctx, src.Properties.AsMap(), zap.NewNop()) c := key{Name: src.Connector, Type: src.Connector, AnonymousAccess: access} srcs, ok := connectorMap[c] if !ok { @@ -158,33 +158,3 @@ type key struct { Type string AnonymousAccess bool } - -func source(connector string, src *runtimev1.Source) drivers.Source { - props := src.Properties.AsMap() - switch connector { - case "s3": - return &drivers.BucketSource{ - Properties: props, - } - case "gcs": - return &drivers.BucketSource{ - Properties: props, - } - case "https": - return &drivers.FileSource{ - Properties: props, - } - case "local_file": - return &drivers.FileSource{ - Properties: props, - } - case "motherduck": - return &drivers.DatabaseSource{} - case "bigquery": - return &drivers.DatabaseSource{ - Props: props, - } - default: - return nil - } -} diff --git a/runtime/connection_cache_test.go b/runtime/connection_cache_test.go index a6f06d6be1a..c3e3a3f4867 100644 --- a/runtime/connection_cache_test.go +++ b/runtime/connection_cache_test.go @@ -276,7 +276,7 @@ func (*mockDriver) Drop(config map[string]any, logger *zap.Logger) error { } // HasAnonymousSourceAccess implements drivers.Driver. -func (*mockDriver) HasAnonymousSourceAccess(ctx context.Context, src drivers.Source, logger *zap.Logger) (bool, error) { +func (*mockDriver) HasAnonymousSourceAccess(ctx context.Context, src map[string]any, logger *zap.Logger) (bool, error) { panic("unimplemented") } diff --git a/runtime/drivers/bigquery/bigquery.go b/runtime/drivers/bigquery/bigquery.go index 9f7a89f1fbc..68bd9ef3119 100644 --- a/runtime/drivers/bigquery/bigquery.go +++ b/runtime/drivers/bigquery/bigquery.go @@ -122,7 +122,7 @@ func (d driver) Spec() drivers.Spec { return spec } -func (d driver) HasAnonymousSourceAccess(ctx context.Context, src drivers.Source, logger *zap.Logger) (bool, error) { +func (d driver) HasAnonymousSourceAccess(ctx context.Context, src map[string]any, logger *zap.Logger) (bool, error) { // gcp provides public access to the data via a project return false, nil } @@ -204,6 +204,7 @@ func (c *Connection) AsFileStore() (drivers.FileStore, bool) { type sourceProperties struct { ProjectID string `mapstructure:"project_id"` + SQL string `mapstructure:"sql"` } func parseSourceProperties(props map[string]any) (*sourceProperties, error) { @@ -212,6 +213,9 @@ func parseSourceProperties(props map[string]any) (*sourceProperties, error) { if err != nil { return nil, err } + if conf.SQL == "" { + return nil, fmt.Errorf("property 'sql' is mandatory for connector \"bigquery\"") + } if conf.ProjectID == "" { conf.ProjectID = bigquery.DetectProjectID } diff --git a/runtime/drivers/bigquery/sql_store.go b/runtime/drivers/bigquery/sql_store.go index 93922a5a291..69733e4627e 100644 --- a/runtime/drivers/bigquery/sql_store.go +++ b/runtime/drivers/bigquery/sql_store.go @@ -26,12 +26,12 @@ const rowGroupBufferSize = int64(datasize.MB) * 512 const _jsonDownloadLimitBytes = 100 * int64(datasize.MB) // Query implements drivers.SQLStore -func (c *Connection) Query(ctx context.Context, props map[string]any, sql string) (drivers.RowIterator, error) { +func (c *Connection) Query(ctx context.Context, props map[string]any) (drivers.RowIterator, error) { return nil, fmt.Errorf("not implemented") } // QueryAsFiles implements drivers.SQLStore -func (c *Connection) QueryAsFiles(ctx context.Context, props map[string]any, sql string, opt *drivers.QueryOption, p drivers.Progress) (drivers.FileIterator, error) { +func (c *Connection) QueryAsFiles(ctx context.Context, props map[string]any, opt *drivers.QueryOption, p drivers.Progress) (drivers.FileIterator, error) { srcProps, err := parseSourceProperties(props) if err != nil { return nil, err @@ -56,7 +56,7 @@ func (c *Connection) QueryAsFiles(ctx context.Context, props map[string]any, sql } now := time.Now() - q := client.Query(sql) + q := client.Query(srcProps.SQL) it, err := q.Read(ctx) if err != nil && !strings.Contains(err.Error(), "Syntax error") { // close the read storage API client @@ -70,7 +70,7 @@ func (c *Connection) QueryAsFiles(ctx context.Context, props map[string]any, sql return nil, fmt.Errorf("failed to create bigquery client: %w", err) } - q := client.Query(sql) + q := client.Query(srcProps.SQL) it, err = q.Read(ctx) } if err != nil { diff --git a/runtime/drivers/blob/blobdownloader.go b/runtime/drivers/blob/blobdownloader.go index f345c179ed6..07e667ea8e7 100644 --- a/runtime/drivers/blob/blobdownloader.go +++ b/runtime/drivers/blob/blobdownloader.go @@ -10,7 +10,6 @@ import ( "cloud.google.com/go/storage" "github.com/bmatcuk/doublestar/v4" - runtimev1 "github.com/rilldata/rill/proto/gen/rill/runtime/v1" "github.com/rilldata/rill/runtime/drivers" "github.com/rilldata/rill/runtime/pkg/fileutil" "github.com/rilldata/rill/runtime/pkg/observability" @@ -59,7 +58,7 @@ type Options struct { GlobMaxObjectsMatched int GlobMaxObjectsListed int64 GlobPageSize int - ExtractPolicy *runtimev1.Source_ExtractPolicy + ExtractPolicy *ExtractPolicy GlobPattern string // Although at this point GlobMaxTotalSize and StorageLimitInBytes have same impl but // this is total size the source should consume on disk and is calculated upstream basis how much data one instance has already consumed diff --git a/runtime/drivers/blob/blobdownloader_test.go b/runtime/drivers/blob/blobdownloader_test.go index fa2b88a81c6..1cfed5d0a95 100644 --- a/runtime/drivers/blob/blobdownloader_test.go +++ b/runtime/drivers/blob/blobdownloader_test.go @@ -5,7 +5,6 @@ import ( "os" "testing" - runtimev1 "github.com/rilldata/rill/proto/gen/rill/runtime/v1" "github.com/rilldata/rill/runtime/pkg/fileutil" "github.com/stretchr/testify/require" "go.uber.org/zap" @@ -127,7 +126,7 @@ func TestFetchFileNamesWithParitionLimits(t *testing.T) { name: "listing head limits", args: args{context.Background(), prepareBucket(t), - Options{ExtractPolicy: &runtimev1.Source_ExtractPolicy{FilesStrategy: runtimev1.Source_ExtractPolicy_STRATEGY_HEAD, FilesLimit: 2}, GlobPattern: "2020/**", StorageLimitInBytes: TenGB}, + Options{ExtractPolicy: &ExtractPolicy{FilesStrategy: ExtractPolicyStrategyHead, FilesLimit: 2}, GlobPattern: "2020/**", StorageLimitInBytes: TenGB}, }, want: map[string]struct{}{"hello": {}, "world": {}}, wantErr: false, @@ -137,7 +136,7 @@ func TestFetchFileNamesWithParitionLimits(t *testing.T) { args: args{ context.Background(), prepareBucket(t), - Options{ExtractPolicy: &runtimev1.Source_ExtractPolicy{FilesStrategy: runtimev1.Source_ExtractPolicy_STRATEGY_TAIL, FilesLimit: 2}, GlobPattern: "2020/**", StorageLimitInBytes: TenGB}, + Options{ExtractPolicy: &ExtractPolicy{FilesStrategy: ExtractPolicyStrategyTail, FilesLimit: 2}, GlobPattern: "2020/**", StorageLimitInBytes: TenGB}, }, want: map[string]struct{}{"test": {}, "writing": {}}, wantErr: false, diff --git a/runtime/drivers/blob/extract_policy.go b/runtime/drivers/blob/extract_policy.go new file mode 100644 index 00000000000..9e9944eb8da --- /dev/null +++ b/runtime/drivers/blob/extract_policy.go @@ -0,0 +1,119 @@ +package blob + +import ( + "fmt" + "strconv" + "strings" + + "github.com/c2h5oh/datasize" + "github.com/mitchellh/mapstructure" +) + +type ExtractPolicy struct { + RowsStrategy ExtractPolicyStrategy + RowsLimitBytes uint64 + FilesStrategy ExtractPolicyStrategy + FilesLimit uint64 +} + +type ExtractPolicyStrategy int + +const ( + ExtractPolicyStrategyUnspecified ExtractPolicyStrategy = 0 + ExtractPolicyStrategyHead ExtractPolicyStrategy = 1 + ExtractPolicyStrategyTail ExtractPolicyStrategy = 2 +) + +func (s ExtractPolicyStrategy) String() string { + switch s { + case ExtractPolicyStrategyHead: + return "head" + case ExtractPolicyStrategyTail: + return "tail" + default: + return "unspecified" + } +} + +type rawExtractPolicy struct { + Rows *struct { + Strategy string `mapstructure:"strategy"` + Size string `mapstructure:"size"` + } `mapstructure:"rows"` + Files *struct { + Strategy string `mapstructure:"strategy"` + Size string `mapstructure:"size"` + } `mapstructure:"files"` +} + +func ParseExtractPolicy(cfg map[string]any) (*ExtractPolicy, error) { + if len(cfg) == 0 { + return nil, nil + } + + raw := &rawExtractPolicy{} + if err := mapstructure.WeakDecode(cfg, raw); err != nil { + return nil, err + } + + res := &ExtractPolicy{} + + // Parse files + if raw.Files != nil { + strategy, err := parseStrategy(raw.Files.Strategy) + if err != nil { + return nil, err + } + res.FilesStrategy = strategy + + size, err := strconv.ParseUint(raw.Files.Size, 10, 64) + if err != nil { + return nil, fmt.Errorf("invalid size, parse failed with error %w", err) + } + if size <= 0 { + return nil, fmt.Errorf("invalid size %q", size) + } + res.FilesLimit = size + } + + // Parse rows + if raw.Rows != nil { + strategy, err := parseStrategy(raw.Rows.Strategy) + if err != nil { + return nil, err + } + res.RowsStrategy = strategy + + // TODO: Add support for number of rows + size, err := parseBytes(raw.Rows.Size) + if err != nil { + return nil, fmt.Errorf("invalid size, parse failed with error %w", err) + } + if size <= 0 { + return nil, fmt.Errorf("invalid size %q", size) + } + res.RowsLimitBytes = size + } + + return res, nil +} + +func parseStrategy(s string) (ExtractPolicyStrategy, error) { + switch strings.ToLower(s) { + case "head": + return ExtractPolicyStrategyHead, nil + case "tail": + return ExtractPolicyStrategyTail, nil + default: + return ExtractPolicyStrategyUnspecified, fmt.Errorf("invalid extract strategy %q", s) + } +} + +func parseBytes(str string) (uint64, error) { + var s datasize.ByteSize + if err := s.UnmarshalText([]byte(str)); err != nil { + return 0, err + } + + return s.Bytes(), nil +} diff --git a/runtime/drivers/blob/extract_policy_test.go b/runtime/drivers/blob/extract_policy_test.go new file mode 100644 index 00000000000..1e98e33f963 --- /dev/null +++ b/runtime/drivers/blob/extract_policy_test.go @@ -0,0 +1,80 @@ +package blob + +import ( + "reflect" + "testing" +) + +func Test_fromExtractArtifact(t *testing.T) { + tests := []struct { + name string + input map[string]any + want *ExtractPolicy + wantErr bool + }{ + { + name: "nil input", + input: nil, + want: nil, + wantErr: false, + }, + { + name: "parse row", + input: map[string]any{"rows": map[string]any{"strategy": "tail", "size": "23 KB"}}, + want: &ExtractPolicy{ + RowsStrategy: ExtractPolicyStrategyTail, + RowsLimitBytes: 23552, + }, + wantErr: false, + }, + { + name: "parse files", + input: map[string]any{"files": map[string]any{"strategy": "head", "size": "23"}}, + want: &ExtractPolicy{ + FilesStrategy: ExtractPolicyStrategyHead, + FilesLimit: 23, + }, + wantErr: false, + }, + { + name: "parse both", + input: map[string]any{"files": map[string]any{"strategy": "tail", "size": "23"}, "rows": map[string]any{"strategy": "tail", "size": "512 B"}}, + want: &ExtractPolicy{ + FilesStrategy: ExtractPolicyStrategyTail, + FilesLimit: 23, + RowsStrategy: ExtractPolicyStrategyTail, + RowsLimitBytes: 512, + }, + wantErr: false, + }, + { + name: "more examples", + input: map[string]any{"files": map[string]any{"strategy": "tail", "size": "23"}, "rows": map[string]any{"strategy": "tail", "size": "23 gb"}}, + want: &ExtractPolicy{ + FilesStrategy: ExtractPolicyStrategyTail, + FilesLimit: 23, + RowsStrategy: ExtractPolicyStrategyTail, + RowsLimitBytes: 23 * 1024 * 1024 * 1024, + }, + wantErr: false, + }, + { + name: "invalid", + input: map[string]any{"files": map[string]any{"strategy": "tail", "size": "23"}, "rows": map[string]any{"strategy": "tail", "size": "23%"}}, + want: nil, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := ParseExtractPolicy(tt.input) + if (err != nil) != tt.wantErr { + t.Errorf("fromExtractArtifact() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("fromExtractArtifact() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/runtime/drivers/blob/parquetreader.go b/runtime/drivers/blob/parquetreader.go index 60f7a39a1e7..7efc6fed627 100644 --- a/runtime/drivers/blob/parquetreader.go +++ b/runtime/drivers/blob/parquetreader.go @@ -12,7 +12,6 @@ import ( "github.com/apache/arrow/go/v13/parquet/compress" "github.com/apache/arrow/go/v13/parquet/file" "github.com/apache/arrow/go/v13/parquet/pqarrow" - runtimev1 "github.com/rilldata/rill/proto/gen/rill/runtime/v1" "github.com/rilldata/rill/runtime/pkg/arrayutil" "github.com/rilldata/rill/runtime/pkg/container" "gocloud.dev/blob" @@ -82,7 +81,7 @@ func downloadParquet(ctx context.Context, bucket *blob.Bucket, obj *blob.ListObj // estimateRecords estimates the number of rows to fetch based on extract policy // each arrow.Record will hold batchSize number of rows func estimateRecords(ctx context.Context, reader *file.Reader, pqToArrowReader *pqarrow.FileReader, config *extractOption) ([]arrow.Record, error) { - rowIndexes := arrayutil.RangeInt(0, reader.NumRowGroups(), config.strategy == runtimev1.Source_ExtractPolicy_STRATEGY_TAIL) + rowIndexes := arrayutil.RangeInt(0, reader.NumRowGroups(), config.strategy == ExtractPolicyStrategyTail) var ( // row group indices that we need @@ -133,12 +132,12 @@ func estimateRecords(ctx context.Context, reader *file.Reader, pqToArrowReader * return c.Items(), nil } -func containerForRecordLimiting(strategy runtimev1.Source_ExtractPolicy_Strategy, limit int) (container.Container[arrow.Record], error) { +func containerForRecordLimiting(strategy ExtractPolicyStrategy, limit int) (container.Container[arrow.Record], error) { switch strategy { - case runtimev1.Source_ExtractPolicy_STRATEGY_TAIL: - return container.NewFIFO(limit, func(rec arrow.Record) { rec.Release() }) - case runtimev1.Source_ExtractPolicy_STRATEGY_HEAD: + case ExtractPolicyStrategyHead: return container.NewBounded[arrow.Record](limit) + case ExtractPolicyStrategyTail: + return container.NewFIFO(limit, func(rec arrow.Record) { rec.Release() }) default: // No option selected - this should not be used for partial downloads though // in case of no extract policy we should be directly downloading the entire file diff --git a/runtime/drivers/blob/parquetreader_test.go b/runtime/drivers/blob/parquetreader_test.go index e8f76490483..ec263fb0e17 100644 --- a/runtime/drivers/blob/parquetreader_test.go +++ b/runtime/drivers/blob/parquetreader_test.go @@ -10,7 +10,6 @@ import ( "github.com/apache/arrow/go/v13/arrow/memory" "github.com/apache/arrow/go/v13/parquet/file" "github.com/apache/arrow/go/v13/parquet/pqarrow" - runtimev1 "github.com/rilldata/rill/proto/gen/rill/runtime/v1" "github.com/rilldata/rill/runtime/pkg/fileutil" "github.com/stretchr/testify/require" "gocloud.dev/blob" @@ -34,13 +33,13 @@ func TestDownload(t *testing.T) { }{ { name: "download partial head", - args: args{ctx: context.Background(), bucket: bucket, obj: object, option: &extractOption{strategy: runtimev1.Source_ExtractPolicy_STRATEGY_HEAD, limitInBytes: 1}}, + args: args{ctx: context.Background(), bucket: bucket, obj: object, option: &extractOption{strategy: ExtractPolicyStrategyHead, limitInBytes: 1}}, wantErr: false, want: getInt32Array(1024, false), }, { name: "download partial tail", - args: args{ctx: context.Background(), bucket: bucket, obj: object, option: &extractOption{strategy: runtimev1.Source_ExtractPolicy_STRATEGY_TAIL, limitInBytes: 1}}, + args: args{ctx: context.Background(), bucket: bucket, obj: object, option: &extractOption{strategy: ExtractPolicyStrategyTail, limitInBytes: 1}}, wantErr: false, want: getInt32Array(2000, false)[1024:], }, diff --git a/runtime/drivers/blob/planner.go b/runtime/drivers/blob/planner.go index 8ba9f6c44d5..2b3727604d5 100644 --- a/runtime/drivers/blob/planner.go +++ b/runtime/drivers/blob/planner.go @@ -1,7 +1,6 @@ package blob import ( - runtimev1 "github.com/rilldata/rill/proto/gen/rill/runtime/v1" "github.com/rilldata/rill/runtime/pkg/container" "gocloud.dev/blob" ) @@ -10,7 +9,7 @@ import ( // it adds objects in the container which stops consuming files once it reaches file extract policy limits // every objects has details about what is the download strategy for that object type planner struct { - policy *runtimev1.Source_ExtractPolicy + policy *ExtractPolicy // rowPlanner adds support for row extract policy rowPlanner rowPlanner // keeps collection of objects to be downloaded @@ -18,7 +17,7 @@ type planner struct { container container.Container[*objectWithPlan] } -func newPlanner(policy *runtimev1.Source_ExtractPolicy) (*planner, error) { +func newPlanner(policy *ExtractPolicy) (*planner, error) { c, err := containerForFileStrategy(policy) if err != nil { return nil, err @@ -49,8 +48,8 @@ func (p *planner) items() []*objectWithPlan { return p.container.Items() } -func containerForFileStrategy(policy *runtimev1.Source_ExtractPolicy) (container.Container[*objectWithPlan], error) { - strategy := runtimev1.Source_ExtractPolicy_STRATEGY_UNSPECIFIED +func containerForFileStrategy(policy *ExtractPolicy) (container.Container[*objectWithPlan], error) { + strategy := ExtractPolicyStrategyUnspecified limit := 0 if policy != nil { strategy = policy.FilesStrategy @@ -58,23 +57,23 @@ func containerForFileStrategy(policy *runtimev1.Source_ExtractPolicy) (container } switch strategy { - case runtimev1.Source_ExtractPolicy_STRATEGY_TAIL: - return container.NewFIFO[*objectWithPlan](limit, nil) - case runtimev1.Source_ExtractPolicy_STRATEGY_HEAD: + case ExtractPolicyStrategyHead: return container.NewBounded[*objectWithPlan](limit) + case ExtractPolicyStrategyTail: + return container.NewFIFO[*objectWithPlan](limit, nil) default: // No option selected return container.NewUnbounded[*objectWithPlan]() } } -func rowPlannerForRowStrategy(policy *runtimev1.Source_ExtractPolicy) rowPlanner { +func rowPlannerForRowStrategy(policy *ExtractPolicy) rowPlanner { if policy == nil { return &plannerWithoutLimits{} } - if policy.RowsStrategy != runtimev1.Source_ExtractPolicy_STRATEGY_UNSPECIFIED { - if policy.FilesStrategy != runtimev1.Source_ExtractPolicy_STRATEGY_UNSPECIFIED { + if policy.RowsStrategy != ExtractPolicyStrategyUnspecified { + if policy.FilesStrategy != ExtractPolicyStrategyUnspecified { // file strategy specified row limits are per file return &plannerWithPerFileLimits{strategy: policy.RowsStrategy, limitInBytes: policy.RowsLimitBytes} } diff --git a/runtime/drivers/blob/rowplanner.go b/runtime/drivers/blob/rowplanner.go index f18d4cb2550..5dc98a2947c 100644 --- a/runtime/drivers/blob/rowplanner.go +++ b/runtime/drivers/blob/rowplanner.go @@ -1,7 +1,6 @@ package blob import ( - runtimev1 "github.com/rilldata/rill/proto/gen/rill/runtime/v1" "gocloud.dev/blob" ) @@ -19,7 +18,7 @@ type objectWithPlan struct { type extractOption struct { limitInBytes uint64 - strategy runtimev1.Source_ExtractPolicy_Strategy + strategy ExtractPolicyStrategy } // rowPlanner is an interface that creates download plan of a cloud object @@ -34,7 +33,7 @@ type rowPlanner interface { // the limitInBytes is a combined limit on all files type plannerWithGlobalLimits struct { cumsizeInBytes uint64 - strategy runtimev1.Source_ExtractPolicy_Strategy + strategy ExtractPolicyStrategy limitInBytes uint64 full bool } @@ -58,7 +57,7 @@ func (r *plannerWithGlobalLimits) done() bool { // plannerWithPerFileLimits implements rowPlanner interface // limitInBytes is on individual file type plannerWithPerFileLimits struct { - strategy runtimev1.Source_ExtractPolicy_Strategy + strategy ExtractPolicyStrategy limitInBytes uint64 } diff --git a/runtime/drivers/blob/textreader.go b/runtime/drivers/blob/textreader.go index b50263782fc..959a0ab7602 100644 --- a/runtime/drivers/blob/textreader.go +++ b/runtime/drivers/blob/textreader.go @@ -8,7 +8,6 @@ import ( "io" "os" - runtimev1 "github.com/rilldata/rill/proto/gen/rill/runtime/v1" "gocloud.dev/blob" ) @@ -35,12 +34,12 @@ func downloadText(ctx context.Context, bucket *blob.Bucket, obj *blob.ListObject func rows(reader *ObjectReader, option *textExtractOption) ([]byte, error) { switch option.extractOption.strategy { - case runtimev1.Source_ExtractPolicy_STRATEGY_HEAD: + case ExtractPolicyStrategyHead: return rowsHead(reader, option.extractOption) - case runtimev1.Source_ExtractPolicy_STRATEGY_TAIL: + case ExtractPolicyStrategyTail: return rowsTail(reader, option) default: - panic(fmt.Sprintf("unsupported strategy %s", option.extractOption.strategy)) + panic(fmt.Sprintf("unsupported strategy %s", option.extractOption.strategy.String())) } } diff --git a/runtime/drivers/blob/textreader_test.go b/runtime/drivers/blob/textreader_test.go index 33fdc5ab3b7..7a788d9dae7 100644 --- a/runtime/drivers/blob/textreader_test.go +++ b/runtime/drivers/blob/textreader_test.go @@ -7,7 +7,6 @@ import ( "os" "testing" - runtimev1 "github.com/rilldata/rill/proto/gen/rill/runtime/v1" "github.com/rilldata/rill/runtime/pkg/fileutil" "github.com/stretchr/testify/require" "gocloud.dev/blob" @@ -38,7 +37,7 @@ func TestDownloadCSV(t *testing.T) { ctx: context.Background(), bucket: bucket, obj: object, - option: &textExtractOption{extractOption: &extractOption{strategy: runtimev1.Source_ExtractPolicy_STRATEGY_HEAD, limitInBytes: uint64(object.Size - 5)}, hasCSVHeader: true}, + option: &textExtractOption{extractOption: &extractOption{strategy: ExtractPolicyStrategyHead, limitInBytes: uint64(object.Size - 5)}, hasCSVHeader: true}, fw: getTempFile(t, object.Key), }, want: testData[:len(testData)-1], @@ -49,7 +48,7 @@ func TestDownloadCSV(t *testing.T) { ctx: context.Background(), bucket: bucket, obj: object, - option: &textExtractOption{extractOption: &extractOption{strategy: runtimev1.Source_ExtractPolicy_STRATEGY_TAIL, limitInBytes: uint64(object.Size - 5)}, hasCSVHeader: true}, + option: &textExtractOption{extractOption: &extractOption{strategy: ExtractPolicyStrategyTail, limitInBytes: uint64(object.Size - 5)}, hasCSVHeader: true}, fw: getTempFile(t, object.Key), }, want: resultTail, @@ -88,7 +87,7 @@ func TestDownloadCSVSingleLineHead(t *testing.T) { object, err := bucket.List(&blob.ListOptions{Prefix: file}).Next(ctx) require.NoError(t, err) - extractOption := &extractOption{strategy: runtimev1.Source_ExtractPolicy_STRATEGY_HEAD, limitInBytes: uint64(object.Size)} + extractOption := &extractOption{strategy: ExtractPolicyStrategyHead, limitInBytes: uint64(object.Size)} fw := getTempFile(t, "temp.csv") err = downloadText(ctx, bucket, object, &textExtractOption{extractOption: extractOption, hasCSVHeader: true}, fw) require.NoError(t, err) diff --git a/runtime/drivers/drivers.go b/runtime/drivers/drivers.go index fd1b5b0d32f..68b8c1d386c 100644 --- a/runtime/drivers/drivers.go +++ b/runtime/drivers/drivers.go @@ -78,7 +78,7 @@ type Driver interface { Drop(config map[string]any, logger *zap.Logger) error // HasAnonymousSourceAccess returns true if external system can be accessed without credentials - HasAnonymousSourceAccess(ctx context.Context, src Source, logger *zap.Logger) (bool, error) + HasAnonymousSourceAccess(ctx context.Context, src map[string]any, logger *zap.Logger) (bool, error) } // Handle represents a connection to an underlying DB. diff --git a/runtime/drivers/druid/druid.go b/runtime/drivers/druid/druid.go index 9939bf77625..1c9a5cc9720 100644 --- a/runtime/drivers/druid/druid.go +++ b/runtime/drivers/druid/druid.go @@ -52,7 +52,7 @@ func (d driver) Spec() drivers.Spec { return drivers.Spec{} } -func (d driver) HasAnonymousSourceAccess(ctx context.Context, src drivers.Source, logger *zap.Logger) (bool, error) { +func (d driver) HasAnonymousSourceAccess(ctx context.Context, src map[string]any, logger *zap.Logger) (bool, error) { return false, fmt.Errorf("not implemented") } diff --git a/runtime/drivers/duckdb/duckdb.go b/runtime/drivers/duckdb/duckdb.go index 6583566ecbb..1f4158f2efc 100644 --- a/runtime/drivers/duckdb/duckdb.go +++ b/runtime/drivers/duckdb/duckdb.go @@ -130,7 +130,7 @@ func (d Driver) Spec() drivers.Spec { return spec } -func (d Driver) HasAnonymousSourceAccess(ctx context.Context, src drivers.Source, logger *zap.Logger) (bool, error) { +func (d Driver) HasAnonymousSourceAccess(ctx context.Context, src map[string]any, logger *zap.Logger) (bool, error) { return false, nil } diff --git a/runtime/drivers/duckdb/transporter/duckDB_to_duckDB.go b/runtime/drivers/duckdb/transporter/duckDB_to_duckDB.go index 8720f2bc179..4a0c4386064 100644 --- a/runtime/drivers/duckdb/transporter/duckDB_to_duckDB.go +++ b/runtime/drivers/duckdb/transporter/duckDB_to_duckDB.go @@ -22,16 +22,17 @@ func NewDuckDBToDuckDB(to drivers.OLAPStore, logger *zap.Logger) drivers.Transpo var _ drivers.Transporter = &duckDBToDuckDB{} -func (t *duckDBToDuckDB) Transfer(ctx context.Context, source drivers.Source, sink drivers.Sink, opts *drivers.TransferOpts, p drivers.Progress) error { - src, ok := source.DatabaseSource() - if !ok { - return fmt.Errorf("type of source should `drivers.DatabaseSource`") +func (t *duckDBToDuckDB) Transfer(ctx context.Context, srcProps, sinkProps map[string]any, opts *drivers.TransferOpts, p drivers.Progress) error { + srcCfg, err := parseSourceProperties(srcProps) + if err != nil { + return err } - fSink, ok := sink.DatabaseSink() - if !ok { - return fmt.Errorf("type of source should `drivers.DatabaseSink`") + + sinkCfg, err := parseSinkProperties(sinkProps) + if err != nil { + return err } - qry := fmt.Sprintf("CREATE OR REPLACE TABLE %s AS (%s)", safeName(fSink.Table), src.SQL) + qry := fmt.Sprintf("CREATE OR REPLACE TABLE %s AS (%s)", safeName(sinkCfg.Table), srcCfg.SQL) return t.to.Exec(ctx, &drivers.Statement{Query: qry, Priority: 1}) } diff --git a/runtime/drivers/duckdb/transporter/filestore_to_duckDB.go b/runtime/drivers/duckdb/transporter/filestore_to_duckDB.go index ea5bac17ed5..fce9a54b237 100644 --- a/runtime/drivers/duckdb/transporter/filestore_to_duckDB.go +++ b/runtime/drivers/duckdb/transporter/filestore_to_duckDB.go @@ -25,17 +25,13 @@ func NewFileStoreToDuckDB(from drivers.FileStore, to drivers.OLAPStore, logger * var _ drivers.Transporter = &fileStoreToDuckDB{} -func (t *fileStoreToDuckDB) Transfer(ctx context.Context, source drivers.Source, sink drivers.Sink, opts *drivers.TransferOpts, p drivers.Progress) error { - src, ok := source.FileSource() - if !ok { - return fmt.Errorf("type of source should `drivers.FilesSource`") - } - fSink, ok := sink.DatabaseSink() - if !ok { - return fmt.Errorf("type of source should `drivers.DatabaseSink`") +func (t *fileStoreToDuckDB) Transfer(ctx context.Context, srcProps, sinkProps map[string]any, opts *drivers.TransferOpts, p drivers.Progress) error { + sinkCfg, err := parseSinkProperties(sinkProps) + if err != nil { + return err } - localPaths, err := t.from.FilePaths(ctx, src) + localPaths, err := t.from.FilePaths(ctx, srcProps) if err != nil { return err } @@ -51,14 +47,14 @@ func (t *fileStoreToDuckDB) Transfer(ctx context.Context, source drivers.Source, p.Target(size, drivers.ProgressUnitByte) var format string - if val, ok := src.Properties["format"].(string); ok { + if val, ok := srcProps["format"].(string); ok { format = fmt.Sprintf(".%s", val) } else { format = fileutil.FullExt(localPaths[0]) } var ingestionProps map[string]any - if duckDBProps, ok := src.Properties["duckdb"].(map[string]any); ok { + if duckDBProps, ok := srcProps["duckdb"].(map[string]any); ok { ingestionProps = duckDBProps } else { ingestionProps = map[string]any{} @@ -70,7 +66,7 @@ func (t *fileStoreToDuckDB) Transfer(ctx context.Context, source drivers.Source, return err } - qry := fmt.Sprintf("CREATE OR REPLACE TABLE %s AS (SELECT * FROM %s)", safeName(fSink.Table), from) + qry := fmt.Sprintf("CREATE OR REPLACE TABLE %s AS (SELECT * FROM %s)", safeName(sinkCfg.Table), from) err = t.to.Exec(ctx, &drivers.Statement{Query: qry, Priority: 1}) if err != nil { return err diff --git a/runtime/drivers/duckdb/transporter/motherduck_to_duckDB.go b/runtime/drivers/duckdb/transporter/motherduck_to_duckDB.go index ff9317da8d0..1e6961d7aa2 100644 --- a/runtime/drivers/duckdb/transporter/motherduck_to_duckDB.go +++ b/runtime/drivers/duckdb/transporter/motherduck_to_duckDB.go @@ -27,19 +27,20 @@ func NewMotherduckToDuckDB(from drivers.Handle, to drivers.OLAPStore, logger *za } } -// TODO :: should it run count from user_query to set target in progress ? -func (t *motherduckToDuckDB) Transfer(ctx context.Context, source drivers.Source, sink drivers.Sink, opts *drivers.TransferOpts, p drivers.Progress) error { - src, ok := source.DatabaseSource() - if !ok { - return fmt.Errorf("type of source should `drivers.DatabaseSource`") +// TODO: should it run count from user_query to set target in progress ? +func (t *motherduckToDuckDB) Transfer(ctx context.Context, srcProps, sinkProps map[string]any, opts *drivers.TransferOpts, p drivers.Progress) error { + srcCfg, err := parseSourceProperties(srcProps) + if err != nil { + return err } - fSink, ok := sink.DatabaseSink() - if !ok { - return fmt.Errorf("type of source should `drivers.DatabaseSink`") + + sinkCfg, err := parseSinkProperties(sinkProps) + if err != nil { + return err } config := t.from.Config() - err := t.to.WithConnection(ctx, 1, func(ctx, ensuredCtx context.Context, _ *sql.Conn) error { + err = t.to.WithConnection(ctx, 1, func(ctx, ensuredCtx context.Context, _ *sql.Conn) error { res, err := t.to.Execute(ctx, &drivers.Statement{Query: "SELECT current_database();"}) if err != nil { return err @@ -76,7 +77,7 @@ func (t *motherduckToDuckDB) Transfer(ctx context.Context, source drivers.Source var names []string - db := src.Database + db := srcCfg.Database if db == "" { // get list of all motherduck databases res, err = t.to.Execute(ctx, &drivers.Statement{Query: "SELECT name FROM md_databases();"}) @@ -112,13 +113,13 @@ func (t *motherduckToDuckDB) Transfer(ctx context.Context, source drivers.Source }(ensuredCtx) } - if src.SQL == "" { - return fmt.Errorf("property \"query\" is mandatory for connector \"motherduck\"") + if srcCfg.SQL == "" { + return fmt.Errorf("property \"sql\" is mandatory for connector \"motherduck\"") } - userQuery := strings.TrimSpace(src.SQL) + userQuery := strings.TrimSpace(srcCfg.SQL) userQuery, _ = strings.CutSuffix(userQuery, ";") // trim trailing semi colon - query := fmt.Sprintf("CREATE OR REPLACE TABLE %s.%s AS (%s);", safeName(localDB), safeName(fSink.Table), userQuery) + query := fmt.Sprintf("CREATE OR REPLACE TABLE %s.%s AS (%s);", safeName(localDB), safeName(sinkCfg.Table), userQuery) return t.to.Exec(ctx, &drivers.Statement{Query: query}) }) return err diff --git a/runtime/drivers/duckdb/transporter/objectStore_to_duckDB.go b/runtime/drivers/duckdb/transporter/objectStore_to_duckDB.go index b663021aa5e..c94c50ecd1d 100644 --- a/runtime/drivers/duckdb/transporter/objectStore_to_duckDB.go +++ b/runtime/drivers/duckdb/transporter/objectStore_to_duckDB.go @@ -30,17 +30,13 @@ func NewObjectStoreToDuckDB(from drivers.ObjectStore, to drivers.OLAPStore, logg } } -func (t *objectStoreToDuckDB) Transfer(ctx context.Context, source drivers.Source, sink drivers.Sink, opts *drivers.TransferOpts, p drivers.Progress) error { - src, ok := source.BucketSource() - if !ok { - return fmt.Errorf("type of source should `drivers.BucketSource`") - } - dbSink, ok := sink.DatabaseSink() - if !ok { - return fmt.Errorf("type of source should `drivers.DatabaseSink`") +func (t *objectStoreToDuckDB) Transfer(ctx context.Context, srcProps, sinkProps map[string]any, opts *drivers.TransferOpts, p drivers.Progress) error { + sinkCfg, err := parseSinkProperties(sinkProps) + if err != nil { + return err } - iterator, err := t.from.DownloadFiles(ctx, src) + iterator, err := t.from.DownloadFiles(ctx, srcProps) if err != nil { return err } @@ -51,27 +47,27 @@ func (t *objectStoreToDuckDB) Transfer(ctx context.Context, source drivers.Sourc return drivers.ErrIngestionLimitExceeded } - sql, hasSQL := src.Properties["sql"].(string) // if sql is specified use ast rewrite to fill in the downloaded files + sql, hasSQL := srcProps["sql"].(string) if hasSQL { - return t.ingestDuckDBSQL(ctx, sql, iterator, dbSink, opts, p) + return t.ingestDuckDBSQL(ctx, sql, iterator, sinkCfg, opts, p) } p.Target(size, drivers.ProgressUnitByte) appendToTable := false var format string - val, formatDefined := src.Properties["format"].(string) + val, formatDefined := srcProps["format"].(string) if formatDefined { format = fmt.Sprintf(".%s", val) } - allowSchemaRelaxation, err := schemaRelaxationProperty(src.Properties) + allowSchemaRelaxation, err := schemaRelaxationProperty(srcProps) if err != nil { return err } var ingestionProps map[string]any - if duckDBProps, ok := src.Properties["duckdb"].(map[string]any); ok { + if duckDBProps, ok := srcProps["duckdb"].(map[string]any); ok { ingestionProps = duckDBProps } else { ingestionProps = map[string]any{} @@ -81,7 +77,7 @@ func (t *objectStoreToDuckDB) Transfer(ctx context.Context, source drivers.Sourc ingestionProps["union_by_name"] = true } - a := newAppender(t.to, dbSink, ingestionProps, allowSchemaRelaxation, t.logger) + a := newAppender(t.to, sinkCfg, ingestionProps, allowSchemaRelaxation, t.logger) for iterator.HasNext() { files, err := iterator.NextBatch(opts.IteratorBatch) @@ -106,7 +102,7 @@ func (t *objectStoreToDuckDB) Transfer(ctx context.Context, source drivers.Sourc return err } - query := fmt.Sprintf("CREATE OR REPLACE TABLE %s AS (SELECT * FROM %s);", safeName(dbSink.Table), from) + query := fmt.Sprintf("CREATE OR REPLACE TABLE %s AS (SELECT * FROM %s);", safeName(sinkCfg.Table), from) if err := t.to.Exec(ctx, &drivers.Statement{Query: query, Priority: 1}); err != nil { return err } @@ -122,14 +118,14 @@ func (t *objectStoreToDuckDB) Transfer(ctx context.Context, source drivers.Sourc type appender struct { to drivers.OLAPStore - sink *drivers.DatabaseSink + sink *sinkProperties ingestionProps map[string]any allowSchemaRelaxation bool tableSchema map[string]string logger *zap.Logger } -func newAppender(to drivers.OLAPStore, sink *drivers.DatabaseSink, ingestionProps map[string]any, +func newAppender(to drivers.OLAPStore, sink *sinkProperties, ingestionProps map[string]any, allowSchemaRelaxation bool, logger *zap.Logger, ) *appender { return &appender{ @@ -273,7 +269,7 @@ func (t *objectStoreToDuckDB) ingestDuckDBSQL( ctx context.Context, originalSQL string, iterator drivers.FileIterator, - dbSink *drivers.DatabaseSink, + dbSink *sinkProperties, opts *drivers.TransferOpts, p drivers.Progress, ) error { diff --git a/runtime/drivers/duckdb/transporter/sqlstore_to_duckDB.go b/runtime/drivers/duckdb/transporter/sqlstore_to_duckDB.go index 0f295d1bfef..8e0780f98a9 100644 --- a/runtime/drivers/duckdb/transporter/sqlstore_to_duckDB.go +++ b/runtime/drivers/duckdb/transporter/sqlstore_to_duckDB.go @@ -27,24 +27,20 @@ func NewSQLStoreToDuckDB(from drivers.SQLStore, to drivers.OLAPStore, logger *za } } -func (s *sqlStoreToDuckDB) Transfer(ctx context.Context, source drivers.Source, sink drivers.Sink, opts *drivers.TransferOpts, p drivers.Progress) (transferErr error) { - src, ok := source.DatabaseSource() - if !ok { - return fmt.Errorf("type of source should `drivers.DatabaseSource`") - } - dbSink, ok := sink.DatabaseSink() - if !ok { - return fmt.Errorf("type of source should `drivers.DatabaseSink`") +func (s *sqlStoreToDuckDB) Transfer(ctx context.Context, srcProps, sinkProps map[string]any, opts *drivers.TransferOpts, p drivers.Progress) (transferErr error) { + sinkCfg, err := parseSinkProperties(sinkProps) + if err != nil { + return err } - iter, err := s.from.QueryAsFiles(ctx, src.Props, src.SQL, &drivers.QueryOption{TotalLimitInBytes: opts.LimitInBytes}, p) + iter, err := s.from.QueryAsFiles(ctx, srcProps, &drivers.QueryOption{TotalLimitInBytes: opts.LimitInBytes}, p) if err != nil { return err } defer iter.Close() start := time.Now() - s.logger.Info("started transfer from local file to duckdb", zap.String("sink_table", dbSink.Table), observability.ZapCtx(ctx)) + s.logger.Info("started transfer from local file to duckdb", zap.String("sink_table", sinkCfg.Table), observability.ZapCtx(ctx)) defer func() { s.logger.Info("transfer finished", zap.Duration("duration", time.Since(start)), @@ -69,10 +65,10 @@ func (s *sqlStoreToDuckDB) Transfer(ctx context.Context, source drivers.Source, var query string if create { - query = fmt.Sprintf("CREATE OR REPLACE TABLE %s AS (SELECT * FROM %s);", safeName(dbSink.Table), from) + query = fmt.Sprintf("CREATE OR REPLACE TABLE %s AS (SELECT * FROM %s);", safeName(sinkCfg.Table), from) create = false } else { - query = fmt.Sprintf("INSERT INTO %s (SELECT * FROM %s);", safeName(dbSink.Table), from) + query = fmt.Sprintf("INSERT INTO %s (SELECT * FROM %s);", safeName(sinkCfg.Table), from) } if err := s.to.Exec(ctx, &drivers.Statement{Query: query, Priority: 1}); err != nil { diff --git a/runtime/drivers/duckdb/transporter/transporter_test.go b/runtime/drivers/duckdb/transporter/transporter_test.go index fa6930e37d0..ee8b43887bd 100644 --- a/runtime/drivers/duckdb/transporter/transporter_test.go +++ b/runtime/drivers/duckdb/transporter/transporter_test.go @@ -19,7 +19,7 @@ type mockObjectStore struct { mockIterator drivers.FileIterator } -func (m *mockObjectStore) DownloadFiles(ctx context.Context, src *drivers.BucketSource) (drivers.FileIterator, error) { +func (m *mockObjectStore) DownloadFiles(ctx context.Context, srcProps map[string]any) (drivers.FileIterator, error) { return m.mockIterator, nil } @@ -159,18 +159,14 @@ mum,8.2`) ctx := context.Background() tr := transporter.NewObjectStoreToDuckDB(mockConnector, olap, zap.NewNop()) - var src *drivers.BucketSource + var src map[string]any if test.query { - src = &drivers.BucketSource{ - Properties: map[string]any{"sql": "select * from read_csv_auto('path',union_by_name=true,sample_size=200000)"}, - } + src = map[string]any{"sql": "select * from read_csv_auto('path',union_by_name=true,sample_size=200000)"} } else { - src = &drivers.BucketSource{ - Properties: map[string]any{"allow_schema_relaxation": true}, - } + src = map[string]any{"allow_schema_relaxation": true} } - err = tr.Transfer(ctx, src, &drivers.DatabaseSink{Table: test.name}, drivers.NewTransferOpts(), + err = tr.Transfer(ctx, src, map[string]any{"table": test.name}, drivers.NewTransferOpts(), drivers.NoOpProgress{}) require.NoError(t, err, "no err expected test %s", test.name) @@ -309,16 +305,14 @@ mum,8.2`) ctx := context.Background() tr := transporter.NewObjectStoreToDuckDB(mockConnector, olap, zap.NewNop()) - var src *drivers.BucketSource + var src map[string]any if test.query { - src = &drivers.BucketSource{ - Properties: map[string]any{"sql": "select * from read_csv_auto('path')"}, - } + src = map[string]any{"sql": "select * from read_csv_auto('path')"} } else { - src = &drivers.BucketSource{} + src = map[string]any{} } - err = tr.Transfer(ctx, src, &drivers.DatabaseSink{Table: test.name}, + err = tr.Transfer(ctx, src, map[string]any{"table": test.name}, drivers.NewTransferOpts(), drivers.NoOpProgress{}) if test.hasError { require.Error(t, err, fmt.Errorf("error expected for %s got nil", test.name)) @@ -411,18 +405,14 @@ func TestIterativeParquetIngestionWithVariableSchema(t *testing.T) { ctx := context.Background() tr := transporter.NewObjectStoreToDuckDB(mockConnector, olap, zap.NewNop()) - var src *drivers.BucketSource + var src map[string]any if test.query { - src = &drivers.BucketSource{ - Properties: map[string]any{"sql": "select * from read_parquet('path',union_by_name=true,hive_partitioning=true)"}, - } + src = map[string]any{"sql": "select * from read_parquet('path',union_by_name=true,hive_partitioning=true)"} } else { - src = &drivers.BucketSource{ - Properties: map[string]any{"allow_schema_relaxation": true}, - } + src = map[string]any{"allow_schema_relaxation": true} } - err := tr.Transfer(ctx, src, &drivers.DatabaseSink{Table: test.name}, + err := tr.Transfer(ctx, src, map[string]any{"table": test.name}, drivers.NewTransferOpts(), drivers.NoOpProgress{}) require.NoError(t, err) @@ -557,18 +547,14 @@ func TestIterativeJSONIngestionWithVariableSchema(t *testing.T) { ctx := context.Background() tr := transporter.NewObjectStoreToDuckDB(mockConnector, olap, zap.NewNop()) - var src *drivers.BucketSource + var src map[string]any if test.query { - src = &drivers.BucketSource{ - Properties: map[string]any{"sql": "select * from read_json('path',format='auto',union_by_name=true,auto_detect=true,sample_size=200000)"}, - } + src = map[string]any{"sql": "select * from read_json('path',format='auto',union_by_name=true,auto_detect=true,sample_size=200000)"} } else { - src = &drivers.BucketSource{ - Properties: map[string]any{"allow_schema_relaxation": true}, - } + src = map[string]any{"allow_schema_relaxation": true} } - err := tr.Transfer(ctx, src, &drivers.DatabaseSink{Table: test.name}, + err := tr.Transfer(ctx, src, map[string]any{"table": test.name}, drivers.NewTransferOpts(), drivers.NoOpProgress{}) require.NoError(t, err, "no err expected test %s", test.name) diff --git a/runtime/drivers/duckdb/transporter/utils.go b/runtime/drivers/duckdb/transporter/utils.go index 22657049f6b..5a816390e24 100644 --- a/runtime/drivers/duckdb/transporter/utils.go +++ b/runtime/drivers/duckdb/transporter/utils.go @@ -5,8 +5,38 @@ import ( "os" "path/filepath" "strings" + + "github.com/mitchellh/mapstructure" ) +type sourceProperties struct { + Database string `mapstructure:"db"` + SQL string `mapstructure:"sql"` +} + +func parseSourceProperties(props map[string]any) (*sourceProperties, error) { + cfg := &sourceProperties{} + if err := mapstructure.Decode(props, cfg); err != nil { + return nil, fmt.Errorf("failed to parse source properties: %w", err) + } + if cfg.SQL == "" { + return nil, fmt.Errorf("property 'sql' is mandatory") + } + return cfg, nil +} + +type sinkProperties struct { + Table string `mapstructure:"table"` +} + +func parseSinkProperties(props map[string]any) (*sinkProperties, error) { + cfg := &sinkProperties{} + if err := mapstructure.Decode(props, cfg); err != nil { + return nil, fmt.Errorf("failed to parse sink properties: %w", err) + } + return cfg, nil +} + func sourceReader(paths []string, format string, ingestionProps map[string]any) (string, error) { // Generate a "read" statement if containsAny(format, []string{".csv", ".tsv", ".txt"}) { diff --git a/runtime/drivers/file/file.go b/runtime/drivers/file/file.go index e8575fd554b..8346c01e2d6 100644 --- a/runtime/drivers/file/file.go +++ b/runtime/drivers/file/file.go @@ -86,7 +86,7 @@ func (d driver) Spec() drivers.Spec { return spec } -func (d driver) HasAnonymousSourceAccess(ctx context.Context, src drivers.Source, logger *zap.Logger) (bool, error) { +func (d driver) HasAnonymousSourceAccess(ctx context.Context, src map[string]any, logger *zap.Logger) (bool, error) { return true, nil } diff --git a/runtime/drivers/file/file_store.go b/runtime/drivers/file/file_store.go index a41e8a232b2..cd937d8facb 100644 --- a/runtime/drivers/file/file_store.go +++ b/runtime/drivers/file/file_store.go @@ -5,13 +5,12 @@ import ( "fmt" "github.com/bmatcuk/doublestar/v4" - "github.com/rilldata/rill/runtime/drivers" "github.com/rilldata/rill/runtime/pkg/fileutil" ) // FilePaths implements drivers.FileStore -func (c *connection) FilePaths(ctx context.Context, src *drivers.FileSource) ([]string, error) { - conf, err := parseSourceProperties(src.Properties) +func (c *connection) FilePaths(ctx context.Context, src map[string]any) ([]string, error) { + conf, err := parseSourceProperties(src) if err != nil { return nil, err } diff --git a/runtime/drivers/file_store.go b/runtime/drivers/file_store.go index 0143f3e8d56..b1b346e191c 100644 --- a/runtime/drivers/file_store.go +++ b/runtime/drivers/file_store.go @@ -4,5 +4,5 @@ import "context" type FileStore interface { // FilePaths returns local absolute paths where files are stored - FilePaths(ctx context.Context, src *FileSource) ([]string, error) + FilePaths(ctx context.Context, src map[string]any) ([]string, error) } diff --git a/runtime/drivers/gcs/gcs.go b/runtime/drivers/gcs/gcs.go index 5c8a1207071..c3029e1dae1 100644 --- a/runtime/drivers/gcs/gcs.go +++ b/runtime/drivers/gcs/gcs.go @@ -122,13 +122,8 @@ func (d driver) Spec() drivers.Spec { return spec } -func (d driver) HasAnonymousSourceAccess(ctx context.Context, src drivers.Source, logger *zap.Logger) (bool, error) { - b, ok := src.BucketSource() - if !ok { - return false, fmt.Errorf("require bucket source") - } - - conf, err := parseSourceProperties(b.Properties) +func (d driver) HasAnonymousSourceAccess(ctx context.Context, src map[string]any, logger *zap.Logger) (bool, error) { + conf, err := parseSourceProperties(src) if err != nil { return false, fmt.Errorf("failed to parse config: %w", err) } @@ -143,35 +138,44 @@ func (d driver) HasAnonymousSourceAccess(ctx context.Context, src drivers.Source } type sourceProperties struct { - Path string `key:"path"` - GlobMaxTotalSize int64 `mapstructure:"glob.max_total_size"` - GlobMaxObjectsMatched int `mapstructure:"glob.max_objects_matched"` - GlobMaxObjectsListed int64 `mapstructure:"glob.max_objects_listed"` - GlobPageSize int `mapstructure:"glob.page_size"` + Path string `key:"path"` + Extract map[string]any `mapstructure:"extract"` + GlobMaxTotalSize int64 `mapstructure:"glob.max_total_size"` + GlobMaxObjectsMatched int `mapstructure:"glob.max_objects_matched"` + GlobMaxObjectsListed int64 `mapstructure:"glob.max_objects_listed"` + GlobPageSize int `mapstructure:"glob.page_size"` url *globutil.URL + extractPolicy *rillblob.ExtractPolicy } func parseSourceProperties(props map[string]any) (*sourceProperties, error) { conf := &sourceProperties{} - err := mapstructure.Decode(props, conf) + err := mapstructure.WeakDecode(props, conf) if err != nil { return nil, err } + if !doublestar.ValidatePattern(conf.Path) { // ideally this should be validated at much earlier stage // keeping it here to have gcs specific validations return nil, fmt.Errorf("glob pattern %s is invalid", conf.Path) } + url, err := globutil.ParseBucketURL(conf.Path) if err != nil { return nil, fmt.Errorf("failed to parse path %q, %w", conf.Path, err) } + conf.url = url if url.Scheme != "gs" { return nil, fmt.Errorf("invalid gcs path %q, should start with gs://", conf.Path) } - conf.url = url + conf.extractPolicy, err = rillblob.ParseExtractPolicy(conf.Extract) + if err != nil { + return nil, fmt.Errorf("failed to parse extract config: %w", err) + } + return conf, nil } @@ -251,8 +255,8 @@ func (c *Connection) AsSQLStore() (drivers.SQLStore, bool) { // DownloadFiles returns a file iterator over objects stored in gcs. // The credential json is read from config google_application_credentials. // Additionally in case `allow_host_credentials` is true it looks for "Application Default Credentials" as well -func (c *Connection) DownloadFiles(ctx context.Context, source *drivers.BucketSource) (drivers.FileIterator, error) { - conf, err := parseSourceProperties(source.Properties) +func (c *Connection) DownloadFiles(ctx context.Context, props map[string]any) (drivers.FileIterator, error) { + conf, err := parseSourceProperties(props) if err != nil { return nil, fmt.Errorf("failed to parse config: %w", err) } @@ -274,7 +278,7 @@ func (c *Connection) DownloadFiles(ctx context.Context, source *drivers.BucketSo GlobMaxObjectsListed: conf.GlobMaxObjectsListed, GlobPageSize: conf.GlobPageSize, GlobPattern: conf.url.Path, - ExtractPolicy: source.ExtractPolicy, + ExtractPolicy: conf.extractPolicy, } iter, err := rillblob.NewIterator(ctx, bucketObj, opts, c.logger) diff --git a/runtime/drivers/github/github.go b/runtime/drivers/github/github.go index f79968f442e..a02c9f73464 100644 --- a/runtime/drivers/github/github.go +++ b/runtime/drivers/github/github.go @@ -96,7 +96,7 @@ func (d driver) Spec() drivers.Spec { return drivers.Spec{} } -func (d driver) HasAnonymousSourceAccess(ctx context.Context, src drivers.Source, logger *zap.Logger) (bool, error) { +func (d driver) HasAnonymousSourceAccess(ctx context.Context, src map[string]any, logger *zap.Logger) (bool, error) { return false, fmt.Errorf("not implemented") } diff --git a/runtime/drivers/https/https.go b/runtime/drivers/https/https.go index c56a5b3f1eb..d03a470337e 100644 --- a/runtime/drivers/https/https.go +++ b/runtime/drivers/https/https.go @@ -55,7 +55,7 @@ func (d driver) Spec() drivers.Spec { return spec } -func (d driver) HasAnonymousSourceAccess(ctx context.Context, src drivers.Source, logger *zap.Logger) (bool, error) { +func (d driver) HasAnonymousSourceAccess(ctx context.Context, src map[string]any, logger *zap.Logger) (bool, error) { return true, nil } @@ -145,8 +145,8 @@ func (c *connection) AsSQLStore() (drivers.SQLStore, bool) { } // FilePaths implements drivers.FileStore -func (c *connection) FilePaths(ctx context.Context, src *drivers.FileSource) ([]string, error) { - conf, err := parseSourceProperties(src.Properties) +func (c *connection) FilePaths(ctx context.Context, src map[string]any) ([]string, error) { + conf, err := parseSourceProperties(src) if err != nil { return nil, fmt.Errorf("failed to parse config: %w", err) } @@ -176,8 +176,7 @@ func (c *connection) FilePaths(ctx context.Context, src *drivers.FileSource) ([] return nil, fmt.Errorf("failed to fetch url %s: %s", conf.Path, resp.Status) } - // TODO :: I don't like src.Name - file, size, err := fileutil.CopyToTempFile(resp.Body, src.Name, extension) + file, size, err := fileutil.CopyToTempFile(resp.Body, "", extension) if err != nil { return nil, err } diff --git a/runtime/drivers/object_store.go b/runtime/drivers/object_store.go index 3a19262173e..0d298fb4e75 100644 --- a/runtime/drivers/object_store.go +++ b/runtime/drivers/object_store.go @@ -4,7 +4,7 @@ import "context" type ObjectStore interface { // DownloadFiles provides an iterator for downloading and consuming files - DownloadFiles(ctx context.Context, src *BucketSource) (FileIterator, error) + DownloadFiles(ctx context.Context, src map[string]any) (FileIterator, error) } // FileIterator provides ways to iteratively download files from external sources diff --git a/runtime/drivers/postgres/postgres.go b/runtime/drivers/postgres/postgres.go index 6320555b82d..883b9cb4a80 100644 --- a/runtime/drivers/postgres/postgres.go +++ b/runtime/drivers/postgres/postgres.go @@ -43,8 +43,8 @@ func (d driver) Spec() drivers.Spec { return drivers.Spec{} } -func (d driver) HasAnonymousSourceAccess(ctx context.Context, src drivers.Source, logger *zap.Logger) (bool, error) { - return false, fmt.Errorf("not implemented") +func (d driver) HasAnonymousSourceAccess(ctx context.Context, src map[string]any, logger *zap.Logger) (bool, error) { + return false, nil } type connection struct { diff --git a/runtime/drivers/s3/s3.go b/runtime/drivers/s3/s3.go index 909edfd7dc4..075495b0b1d 100644 --- a/runtime/drivers/s3/s3.go +++ b/runtime/drivers/s3/s3.go @@ -86,18 +86,19 @@ type configProperties struct { } // Open implements drivers.Driver -func (d driver) Open(config map[string]any, shared bool, client activity.Client, logger *zap.Logger) (drivers.Handle, error) { +func (d driver) Open(cfgMap map[string]any, shared bool, client activity.Client, logger *zap.Logger) (drivers.Handle, error) { if shared { return nil, fmt.Errorf("s3 driver can't be shared") } - conf := &configProperties{} - err := mapstructure.Decode(config, conf) + + cfg := &configProperties{} + err := mapstructure.Decode(cfgMap, cfg) if err != nil { return nil, err } conn := &Connection{ - config: conf, + config: cfg, logger: logger, } return conn, nil @@ -112,12 +113,8 @@ func (d driver) Spec() drivers.Spec { return spec } -func (d driver) HasAnonymousSourceAccess(ctx context.Context, src drivers.Source, logger *zap.Logger) (bool, error) { - b, ok := src.BucketSource() - if !ok { - return false, fmt.Errorf("require bucket source") - } - conf, err := parseSourceProperties(b.Properties) +func (d driver) HasAnonymousSourceAccess(ctx context.Context, props map[string]any, logger *zap.Logger) (bool, error) { + conf, err := parseSourceProperties(props) if err != nil { return false, fmt.Errorf("failed to parse config: %w", err) } @@ -213,19 +210,21 @@ func (c *Connection) AsSQLStore() (drivers.SQLStore, bool) { } type sourceProperties struct { - Path string `mapstructure:"path"` - AWSRegion string `mapstructure:"region"` - GlobMaxTotalSize int64 `mapstructure:"glob.max_total_size"` - GlobMaxObjectsMatched int `mapstructure:"glob.max_objects_matched"` - GlobMaxObjectsListed int64 `mapstructure:"glob.max_objects_listed"` - GlobPageSize int `mapstructure:"glob.page_size"` - S3Endpoint string `mapstructure:"endpoint"` + Path string `mapstructure:"path"` + AWSRegion string `mapstructure:"region"` + GlobMaxTotalSize int64 `mapstructure:"glob.max_total_size"` + GlobMaxObjectsMatched int `mapstructure:"glob.max_objects_matched"` + GlobMaxObjectsListed int64 `mapstructure:"glob.max_objects_listed"` + GlobPageSize int `mapstructure:"glob.page_size"` + S3Endpoint string `mapstructure:"endpoint"` + Extract map[string]any `mapstructure:"extract"` url *globutil.URL + extractPolicy *rillblob.ExtractPolicy } func parseSourceProperties(props map[string]any) (*sourceProperties, error) { conf := &sourceProperties{} - err := mapstructure.Decode(props, conf) + err := mapstructure.WeakDecode(props, conf) if err != nil { return nil, err } @@ -238,11 +237,17 @@ func parseSourceProperties(props map[string]any) (*sourceProperties, error) { if err != nil { return nil, fmt.Errorf("failed to parse path %q, %w", conf.Path, err) } + conf.url = url if url.Scheme != "s3" { return nil, fmt.Errorf("invalid s3 path %q, should start with s3://", conf.Path) } - conf.url = url + + conf.extractPolicy, err = rillblob.ParseExtractPolicy(conf.Extract) + if err != nil { + return nil, fmt.Errorf("failed to parse extract config: %w", err) + } + return conf, nil } @@ -254,8 +259,8 @@ func parseSourceProperties(props map[string]any) (*sourceProperties, error) { // - aws_session_token // // Additionally in case allow_host_credentials is true it looks for credentials stored on host machine as well -func (c *Connection) DownloadFiles(ctx context.Context, src *drivers.BucketSource) (drivers.FileIterator, error) { - conf, err := parseSourceProperties(src.Properties) +func (c *Connection) DownloadFiles(ctx context.Context, src map[string]any) (drivers.FileIterator, error) { + conf, err := parseSourceProperties(src) if err != nil { return nil, fmt.Errorf("failed to parse config: %w", err) } @@ -277,7 +282,7 @@ func (c *Connection) DownloadFiles(ctx context.Context, src *drivers.BucketSourc GlobMaxObjectsListed: conf.GlobMaxObjectsListed, GlobPageSize: conf.GlobPageSize, GlobPattern: conf.url.Path, - ExtractPolicy: src.ExtractPolicy, + ExtractPolicy: conf.extractPolicy, } it, err := rillblob.NewIterator(ctx, bucketObj, opts, c.logger) diff --git a/runtime/drivers/sql_store.go b/runtime/drivers/sql_store.go index 1b06c5a1c32..2ecd9739f17 100644 --- a/runtime/drivers/sql_store.go +++ b/runtime/drivers/sql_store.go @@ -14,9 +14,9 @@ var ErrIteratorDone = errors.New("empty iterator") // May be call it DataWarehouse to differentiate from OLAP or postgres? type SQLStore interface { // Query returns driver.RowIterator to iterate over results row by row - Query(ctx context.Context, props map[string]any, sql string) (RowIterator, error) + Query(ctx context.Context, props map[string]any) (RowIterator, error) // QueryAsFiles downloads results into files and returns an iterator to iterate over them - QueryAsFiles(ctx context.Context, props map[string]any, sql string, opt *QueryOption, p Progress) (FileIterator, error) + QueryAsFiles(ctx context.Context, props map[string]any, opt *QueryOption, p Progress) (FileIterator, error) } type QueryOption struct { diff --git a/runtime/drivers/sqlite/sqlite.go b/runtime/drivers/sqlite/sqlite.go index c1961b4f91f..b34b94e6554 100644 --- a/runtime/drivers/sqlite/sqlite.go +++ b/runtime/drivers/sqlite/sqlite.go @@ -58,8 +58,8 @@ func (d driver) Spec() drivers.Spec { return drivers.Spec{} } -func (d driver) HasAnonymousSourceAccess(ctx context.Context, src drivers.Source, logger *zap.Logger) (bool, error) { - return false, fmt.Errorf("not implemented") +func (d driver) HasAnonymousSourceAccess(ctx context.Context, src map[string]any, logger *zap.Logger) (bool, error) { + return false, nil } type connection struct { diff --git a/runtime/drivers/transporter.go b/runtime/drivers/transporter.go index 3c68843ddc6..8863da010b2 100644 --- a/runtime/drivers/transporter.go +++ b/runtime/drivers/transporter.go @@ -3,14 +3,12 @@ package drivers import ( "context" "math" - - runtimev1 "github.com/rilldata/rill/proto/gen/rill/runtime/v1" ) // Transporter implements logic for moving data between two connectors // (the actual connector objects are provided in AsTransporter) type Transporter interface { - Transfer(ctx context.Context, source Source, sink Sink, t *TransferOpts, p Progress) error + Transfer(ctx context.Context, source map[string]any, sink map[string]any, t *TransferOpts, p Progress) error } type TransferOpts struct { @@ -44,113 +42,6 @@ func WithLimitInBytes(limit int64) TransferOption { } } -// A Source is expected to only return ok=true for one of the source types. -// The caller will know which type based on the connector type. -type Source interface { - BucketSource() (*BucketSource, bool) - DatabaseSource() (*DatabaseSource, bool) - FileSource() (*FileSource, bool) -} - -// A Sink is expected to only return ok=true for one of the sink types. -// The caller will know which type based on the connector type. -type Sink interface { - BucketSink() (*BucketSink, bool) - DatabaseSink() (*DatabaseSink, bool) -} - -type BucketSource struct { - ExtractPolicy *runtimev1.Source_ExtractPolicy - Properties map[string]any -} - -var _ Source = &BucketSource{} - -func (b *BucketSource) BucketSource() (*BucketSource, bool) { - return b, true -} - -func (b *BucketSource) DatabaseSource() (*DatabaseSource, bool) { - return nil, false -} - -func (b *BucketSource) FileSource() (*FileSource, bool) { - return nil, false -} - -type BucketSink struct { - Path string - // Format FileFormat - // NOTE: In future, may add file name and output partitioning config here -} - -var _ Sink = &BucketSink{} - -func (b *BucketSink) BucketSink() (*BucketSink, bool) { - return b, true -} - -func (b *BucketSink) DatabaseSink() (*DatabaseSink, bool) { - return nil, false -} - -type DatabaseSource struct { - // Pass only SQL OR Table - SQL string - Table string - Database string - Limit int - Props map[string]any -} - -var _ Source = &DatabaseSource{} - -func (d *DatabaseSource) BucketSource() (*BucketSource, bool) { - return nil, false -} - -func (d *DatabaseSource) DatabaseSource() (*DatabaseSource, bool) { - return d, true -} - -func (d *DatabaseSource) FileSource() (*FileSource, bool) { - return nil, false -} - -type DatabaseSink struct { - Table string - Append bool -} - -var _ Sink = &DatabaseSink{} - -func (d *DatabaseSink) BucketSink() (*BucketSink, bool) { - return nil, false -} - -func (d *DatabaseSink) DatabaseSink() (*DatabaseSink, bool) { - return d, true -} - -type FileSource struct { - Name string - Properties map[string]any -} - -var _ Source = &FileSource{} - -func (f *FileSource) BucketSource() (*BucketSource, bool) { - return nil, false -} - -func (f *FileSource) DatabaseSource() (*DatabaseSource, bool) { - return nil, false -} - -func (f *FileSource) FileSource() (*FileSource, bool) { - return f, true -} - // Progress is an interface for communicating progress info type Progress interface { Target(val int64, unit ProgressUnit) diff --git a/runtime/reconcilers/source.go b/runtime/reconcilers/source.go index 2c8f9632720..017aff3e3fa 100644 --- a/runtime/reconcilers/source.go +++ b/runtime/reconcilers/source.go @@ -363,70 +363,11 @@ func (r *SourceReconciler) ingestSource(ctx context.Context, src *runtimev1.Sour return err } -func driversSource(conn drivers.Handle, propsPB *structpb.Struct) (drivers.Source, error) { +func driversSource(conn drivers.Handle, propsPB *structpb.Struct) (map[string]any, error) { props := propsPB.AsMap() - switch conn.Driver() { - case "s3": - return &drivers.BucketSource{ - // ExtractPolicy: src.Policy, // TODO: Add - Properties: props, - }, nil - case "gcs": - return &drivers.BucketSource{ - // ExtractPolicy: src.Policy, // TODO: Add - Properties: props, - }, nil - case "https": - return &drivers.FileSource{ - Properties: props, - }, nil - case "local_file": - return &drivers.FileSource{ - Properties: props, - }, nil - case "motherduck": - query, ok := props["sql"].(string) - if !ok { - return nil, fmt.Errorf("property \"sql\" is mandatory for connector \"motherduck\"") - } - var db string - if val, ok := props["db"].(string); ok { - db = val - } - - return &drivers.DatabaseSource{ - SQL: query, - Database: db, - }, nil - case "duckdb": - query, ok := props["sql"].(string) - if !ok { - return nil, fmt.Errorf("property \"sql\" is mandatory for connector \"duckdb\"") - } - return &drivers.DatabaseSource{ - SQL: query, - }, nil - case "bigquery": - query, ok := props["sql"].(string) - if !ok { - return nil, fmt.Errorf("property \"sql\" is mandatory for connector \"bigquery\"") - } - return &drivers.DatabaseSource{ - SQL: query, - Props: props, - }, nil - default: - return nil, fmt.Errorf("source connector %q not supported", conn.Driver()) - } + return props, nil } -func driversSink(conn drivers.Handle, tableName string) (drivers.Sink, error) { - switch conn.Driver() { - case "duckdb": - return &drivers.DatabaseSink{ - Table: tableName, - }, nil - default: - return nil, fmt.Errorf("sink connector %q not supported", conn.Driver()) - } +func driversSink(conn drivers.Handle, tableName string) (map[string]any, error) { + return map[string]any{"table": tableName}, nil } diff --git a/runtime/services/catalog/artifacts/yaml/objects.go b/runtime/services/catalog/artifacts/yaml/objects.go index a67775089da..1dc63af35fa 100644 --- a/runtime/services/catalog/artifacts/yaml/objects.go +++ b/runtime/services/catalog/artifacts/yaml/objects.go @@ -3,11 +3,9 @@ package yaml import ( "errors" "fmt" - "strconv" "strings" "time" - "github.com/c2h5oh/datasize" "github.com/jinzhu/copier" "github.com/mitchellh/mapstructure" runtimev1 "github.com/rilldata/rill/proto/gen/rill/runtime/v1" @@ -37,8 +35,8 @@ type Source struct { GlobPageSize int `yaml:"glob.page_size,omitempty" mapstructure:"glob.page_size,omitempty"` HivePartition *bool `yaml:"hive_partitioning,omitempty" mapstructure:"hive_partitioning,omitempty"` Timeout int32 `yaml:"timeout,omitempty"` - ExtractPolicy *ExtractPolicy `yaml:"extract,omitempty"` Format string `yaml:"format,omitempty" mapstructure:"format,omitempty"` + Extract map[string]any `yaml:"extract,omitempty" mapstructure:"extract,omitempty"` DuckDBProps map[string]any `yaml:"duckdb,omitempty" mapstructure:"duckdb,omitempty"` Headers map[string]any `yaml:"headers,omitempty" mapstructure:"headers,omitempty"` AllowSchemaRelaxation *bool `yaml:"ingest.allow_schema_relaxation,omitempty" mapstructure:"allow_schema_relaxation,omitempty"` @@ -47,16 +45,6 @@ type Source struct { ProjectID string `yaml:"project_id,omitempty" mapstructure:"project_id,omitempty"` } -type ExtractPolicy struct { - Row *ExtractConfig `yaml:"rows,omitempty" mapstructure:"rows,omitempty"` - File *ExtractConfig `yaml:"files,omitempty" mapstructure:"files,omitempty"` -} - -type ExtractConfig struct { - Strategy string `yaml:"strategy,omitempty" mapstructure:"strategy,omitempty"` - Size string `yaml:"size,omitempty" mapstructure:"size,omitempty"` -} - type MetricsView struct { Label string `yaml:"title"` DisplayName string `yaml:"display_name,omitempty"` // for backwards compatibility @@ -119,39 +107,9 @@ func toSourceArtifact(catalog *drivers.CatalogEntry) (*Source, error) { source.Path = "" } - extract, err := toExtractArtifact(catalog.GetSource().GetPolicy()) - if err != nil { - return nil, err - } - - source.ExtractPolicy = extract return source, nil } -func toExtractArtifact(extract *runtimev1.Source_ExtractPolicy) (*ExtractPolicy, error) { - if extract == nil { - return nil, nil - } - - sourceExtract := &ExtractPolicy{} - // set file - if extract.FilesStrategy != runtimev1.Source_ExtractPolicy_STRATEGY_UNSPECIFIED { - sourceExtract.File = &ExtractConfig{} - sourceExtract.File.Strategy = extract.FilesStrategy.String() - sourceExtract.File.Size = fmt.Sprintf("%v", extract.FilesLimit) - } - - // set row - if extract.RowsStrategy != runtimev1.Source_ExtractPolicy_STRATEGY_UNSPECIFIED { - sourceExtract.Row = &ExtractConfig{} - sourceExtract.Row.Strategy = extract.RowsStrategy.String() - bytes := datasize.ByteSize(extract.RowsLimitBytes) - sourceExtract.Row.Size = bytes.HumanReadable() - } - - return sourceExtract, nil -} - func toMetricsViewArtifact(catalog *drivers.CatalogEntry) (*MetricsView, error) { metricsArtifact := &MetricsView{} err := copier.Copy(metricsArtifact, catalog.Object) @@ -175,6 +133,10 @@ func fromSourceArtifact(source *Source, path string) (*drivers.CatalogEntry, err props["region"] = source.Region } + if source.Extract != nil { + props["extract"] = source.Extract + } + if source.DuckDBProps != nil { props["duckdb"] = source.DuckDBProps } @@ -244,11 +206,6 @@ func fromSourceArtifact(source *Source, path string) (*drivers.CatalogEntry, err return nil, err } - extract, err := fromExtractArtifact(source.ExtractPolicy) - if err != nil { - return nil, err - } - name := fileutil.Stem(path) return &drivers.CatalogEntry{ Name: name, @@ -258,86 +215,11 @@ func fromSourceArtifact(source *Source, path string) (*drivers.CatalogEntry, err Name: name, Connector: source.Type, Properties: propsPB, - Policy: extract, TimeoutSeconds: source.Timeout, }, }, nil } -func fromExtractArtifact(policy *ExtractPolicy) (*runtimev1.Source_ExtractPolicy, error) { - if policy == nil { - return nil, nil - } - - extractPolicy := &runtimev1.Source_ExtractPolicy{} - - // parse file - if policy.File != nil { - // parse strategy - strategy, err := parseStrategy(policy.File.Strategy) - if err != nil { - return nil, err - } - - extractPolicy.FilesStrategy = strategy - - // parse size - size, err := strconv.ParseUint(policy.File.Size, 10, 64) - if err != nil { - return nil, fmt.Errorf("invalid size, parse failed with error %w", err) - } - if size <= 0 { - return nil, fmt.Errorf("invalid size %q", size) - } - - extractPolicy.FilesLimit = size - } - - // parse rows - if policy.Row != nil { - // parse strategy - strategy, err := parseStrategy(policy.Row.Strategy) - if err != nil { - return nil, err - } - - extractPolicy.RowsStrategy = strategy - - // parse size - // todo :: add support for number of rows - size, err := getBytes(policy.Row.Size) - if err != nil { - return nil, fmt.Errorf("invalid size, parse failed with error %w", err) - } - if size <= 0 { - return nil, fmt.Errorf("invalid size %q", size) - } - - extractPolicy.RowsLimitBytes = size - } - return extractPolicy, nil -} - -func parseStrategy(s string) (runtimev1.Source_ExtractPolicy_Strategy, error) { - switch strings.ToLower(s) { - case "tail": - return runtimev1.Source_ExtractPolicy_STRATEGY_TAIL, nil - case "head": - return runtimev1.Source_ExtractPolicy_STRATEGY_HEAD, nil - default: - return runtimev1.Source_ExtractPolicy_STRATEGY_UNSPECIFIED, fmt.Errorf("invalid extract strategy %q", s) - } -} - -func getBytes(size string) (uint64, error) { - var s datasize.ByteSize - if err := s.UnmarshalText([]byte(size)); err != nil { - return 0, err - } - - return s.Bytes(), nil -} - func fromMetricsViewArtifact(metrics *MetricsView, path string) (*drivers.CatalogEntry, error) { if metrics.DisplayName != "" && metrics.Label == "" { // backwards compatibility diff --git a/runtime/services/catalog/artifacts/yaml/objects_test.go b/runtime/services/catalog/artifacts/yaml/objects_test.go deleted file mode 100644 index 96003b422a3..00000000000 --- a/runtime/services/catalog/artifacts/yaml/objects_test.go +++ /dev/null @@ -1,82 +0,0 @@ -package yaml - -import ( - "reflect" - "testing" - - runtimev1 "github.com/rilldata/rill/proto/gen/rill/runtime/v1" -) - -func Test_fromExtractArtifact(t *testing.T) { - tests := []struct { - name string - input *ExtractPolicy - want *runtimev1.Source_ExtractPolicy - wantErr bool - }{ - { - name: "nil input", - input: nil, - want: nil, - wantErr: false, - }, - { - name: "parse row", - input: &ExtractPolicy{Row: &ExtractConfig{Strategy: "tail", Size: "23 KB"}}, - want: &runtimev1.Source_ExtractPolicy{ - RowsStrategy: runtimev1.Source_ExtractPolicy_STRATEGY_TAIL, - RowsLimitBytes: 23552, - }, - wantErr: false, - }, - { - name: "parse files", - input: &ExtractPolicy{File: &ExtractConfig{Strategy: "head", Size: "23"}}, - want: &runtimev1.Source_ExtractPolicy{ - FilesStrategy: runtimev1.Source_ExtractPolicy_STRATEGY_HEAD, - FilesLimit: 23, - }, - wantErr: false, - }, - { - name: "parse both", - input: &ExtractPolicy{File: &ExtractConfig{Strategy: "tail", Size: "23"}, Row: &ExtractConfig{Strategy: "tail", Size: "512 B"}}, - want: &runtimev1.Source_ExtractPolicy{ - FilesStrategy: runtimev1.Source_ExtractPolicy_STRATEGY_TAIL, - FilesLimit: 23, - RowsStrategy: runtimev1.Source_ExtractPolicy_STRATEGY_TAIL, - RowsLimitBytes: 512, - }, - wantErr: false, - }, - { - name: "more examples", - input: &ExtractPolicy{File: &ExtractConfig{Strategy: "tail", Size: "23"}, Row: &ExtractConfig{Strategy: "tail", Size: "23 gb"}}, - want: &runtimev1.Source_ExtractPolicy{ - FilesStrategy: runtimev1.Source_ExtractPolicy_STRATEGY_TAIL, - FilesLimit: 23, - RowsStrategy: runtimev1.Source_ExtractPolicy_STRATEGY_TAIL, - RowsLimitBytes: 23 * 1024 * 1024 * 1024, - }, - wantErr: false, - }, - { - name: "invalid", - input: &ExtractPolicy{File: &ExtractConfig{Strategy: "tail", Size: "23"}, Row: &ExtractConfig{Strategy: "tail", Size: "23%"}}, - want: nil, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, err := fromExtractArtifact(tt.input) - if (err != nil) != tt.wantErr { - t.Errorf("fromExtractArtifact() error = %v, wantErr %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("fromExtractArtifact() = %v, want %v", got, tt.want) - } - }) - } -} diff --git a/runtime/services/catalog/migrator/sources/sources.go b/runtime/services/catalog/migrator/sources/sources.go index 3677bac5170..b4b55d85b16 100644 --- a/runtime/services/catalog/migrator/sources/sources.go +++ b/runtime/services/catalog/migrator/sources/sources.go @@ -123,9 +123,6 @@ func (m *sourceMigrator) IsEqual(ctx context.Context, cat1, cat2 *drivers.Catalo if !isSQLSource && cat1.GetSource().Connector != cat2.GetSource().Connector { return false } - if !comparePolicy(cat1.GetSource().GetPolicy(), cat2.GetSource().GetPolicy()) { - return false - } map2 := cat2.GetSource().Properties.AsMap() if isSQLSource { @@ -136,21 +133,6 @@ func (m *sourceMigrator) IsEqual(ctx context.Context, cat1, cat2 *drivers.Catalo return equal(cat1.GetSource().Properties.AsMap(), map2) } -func comparePolicy(p1, p2 *runtimev1.Source_ExtractPolicy) bool { - if (p1 != nil) == (p2 != nil) { - if p1 != nil { - // both non nil - return p1.FilesStrategy == p2.FilesStrategy && - p1.FilesLimit == p2.FilesLimit && - p1.RowsStrategy == p2.RowsStrategy && - p1.RowsLimitBytes == p2.RowsLimitBytes - } - // both nil - return true - } - return false -} - func (m *sourceMigrator) ExistsInOlap(ctx context.Context, olap drivers.OLAPStore, catalog *drivers.CatalogEntry) (bool, error) { _, err := olap.InformationSchema().Lookup(ctx, catalog.Name) if errors.Is(err, drivers.ErrNotFound) { @@ -351,72 +333,13 @@ func (p *progress) Observe(val int64, unit drivers.ProgressUnit) { } } -func source(connector string, src *runtimev1.Source) (drivers.Source, error) { +func source(connector string, src *runtimev1.Source) (map[string]any, error) { props := src.Properties.AsMap() - switch connector { - case "s3": - return &drivers.BucketSource{ - ExtractPolicy: src.Policy, - Properties: props, - }, nil - case "gcs": - return &drivers.BucketSource{ - ExtractPolicy: src.Policy, - Properties: props, - }, nil - case "https": - return &drivers.FileSource{ - Properties: props, - }, nil - case "local_file": - return &drivers.FileSource{ - Properties: props, - }, nil - case "motherduck": - query, ok := props["sql"].(string) - if !ok { - return nil, fmt.Errorf("property \"sql\" is mandatory for connector \"motherduck\"") - } - var db string - if val, ok := props["db"].(string); ok { - db = val - } - - return &drivers.DatabaseSource{ - SQL: query, - Database: db, - }, nil - case "duckdb": - query, ok := props["sql"].(string) - if !ok { - return nil, fmt.Errorf("property \"sql\" is mandatory for connector \"duckdb\"") - } - return &drivers.DatabaseSource{ - SQL: query, - }, nil - case "bigquery": - query, ok := props["sql"].(string) - if !ok { - return nil, fmt.Errorf("property \"sql\" is mandatory for connector \"bigquery\"") - } - return &drivers.DatabaseSource{ - SQL: query, - Props: props, - }, nil - default: - return nil, fmt.Errorf("connector %v not supported", connector) - } + return props, nil } -func sink(connector, tableName string) drivers.Sink { - switch connector { - case "duckdb": - return &drivers.DatabaseSink{ - Table: tableName, - } - default: - return nil - } +func sink(connector, tableName string) map[string]any { + return map[string]any{"table": tableName} } func connectorVariables(src *runtimev1.Source, env map[string]string, repoRoot string) map[string]any { diff --git a/web-common/src/proto/gen/rill/runtime/v1/catalog_pb.ts b/web-common/src/proto/gen/rill/runtime/v1/catalog_pb.ts index 5d54c2ef48b..d4d5727e775 100644 --- a/web-common/src/proto/gen/rill/runtime/v1/catalog_pb.ts +++ b/web-common/src/proto/gen/rill/runtime/v1/catalog_pb.ts @@ -4,7 +4,7 @@ // @ts-nocheck import type { BinaryReadOptions, FieldList, JsonReadOptions, JsonValue, PartialMessage, PlainMessage } from "@bufbuild/protobuf"; -import { Message, proto3, protoInt64, Struct } from "@bufbuild/protobuf"; +import { Message, proto3, Struct } from "@bufbuild/protobuf"; import { StructType } from "./schema_pb.js"; import { TimeGrain } from "./time_grain_pb.js"; @@ -142,13 +142,6 @@ export class Source extends Message { */ schema?: StructType; - /** - * extraction policy for the source - * - * @generated from field: rill.runtime.v1.Source.ExtractPolicy policy = 6; - */ - policy?: Source_ExtractPolicy; - /** * timeout for source ingestion in seconds * @@ -168,7 +161,6 @@ export class Source extends Message { { no: 2, name: "connector", kind: "scalar", T: 9 /* ScalarType.STRING */ }, { no: 3, name: "properties", kind: "message", T: Struct }, { no: 5, name: "schema", kind: "message", T: StructType }, - { no: 6, name: "policy", kind: "message", T: Source_ExtractPolicy }, { no: 7, name: "timeout_seconds", kind: "scalar", T: 5 /* ScalarType.INT32 */ }, ]); @@ -189,98 +181,6 @@ export class Source extends Message { } } -/** - * Extract policy for glob connectors - * - * @generated from message rill.runtime.v1.Source.ExtractPolicy - */ -export class Source_ExtractPolicy extends Message { - /** - * strategy for selecting rows in a file - * - * @generated from field: rill.runtime.v1.Source.ExtractPolicy.Strategy rows_strategy = 1; - */ - rowsStrategy = Source_ExtractPolicy_Strategy.UNSPECIFIED; - - /** - * could in future add: uint64 rows_limit = n; - * limit on data fetched in bytes - * - * @generated from field: uint64 rows_limit_bytes = 2; - */ - rowsLimitBytes = protoInt64.zero; - - /** - * strategy for selecting files - * - * @generated from field: rill.runtime.v1.Source.ExtractPolicy.Strategy files_strategy = 3; - */ - filesStrategy = Source_ExtractPolicy_Strategy.UNSPECIFIED; - - /** - * limit on number of files - * - * @generated from field: uint64 files_limit = 4; - */ - filesLimit = protoInt64.zero; - - constructor(data?: PartialMessage) { - super(); - proto3.util.initPartial(data, this); - } - - static readonly runtime: typeof proto3 = proto3; - static readonly typeName = "rill.runtime.v1.Source.ExtractPolicy"; - static readonly fields: FieldList = proto3.util.newFieldList(() => [ - { no: 1, name: "rows_strategy", kind: "enum", T: proto3.getEnumType(Source_ExtractPolicy_Strategy) }, - { no: 2, name: "rows_limit_bytes", kind: "scalar", T: 4 /* ScalarType.UINT64 */ }, - { no: 3, name: "files_strategy", kind: "enum", T: proto3.getEnumType(Source_ExtractPolicy_Strategy) }, - { no: 4, name: "files_limit", kind: "scalar", T: 4 /* ScalarType.UINT64 */ }, - ]); - - static fromBinary(bytes: Uint8Array, options?: Partial): Source_ExtractPolicy { - return new Source_ExtractPolicy().fromBinary(bytes, options); - } - - static fromJson(jsonValue: JsonValue, options?: Partial): Source_ExtractPolicy { - return new Source_ExtractPolicy().fromJson(jsonValue, options); - } - - static fromJsonString(jsonString: string, options?: Partial): Source_ExtractPolicy { - return new Source_ExtractPolicy().fromJsonString(jsonString, options); - } - - static equals(a: Source_ExtractPolicy | PlainMessage | undefined, b: Source_ExtractPolicy | PlainMessage | undefined): boolean { - return proto3.util.equals(Source_ExtractPolicy, a, b); - } -} - -/** - * @generated from enum rill.runtime.v1.Source.ExtractPolicy.Strategy - */ -export enum Source_ExtractPolicy_Strategy { - /** - * @generated from enum value: STRATEGY_UNSPECIFIED = 0; - */ - UNSPECIFIED = 0, - - /** - * @generated from enum value: STRATEGY_HEAD = 1; - */ - HEAD = 1, - - /** - * @generated from enum value: STRATEGY_TAIL = 2; - */ - TAIL = 2, -} -// Retrieve enum metadata with: proto3.getEnumType(Source_ExtractPolicy_Strategy) -proto3.util.setEnumType(Source_ExtractPolicy_Strategy, "rill.runtime.v1.Source.ExtractPolicy.Strategy", [ - { no: 0, name: "STRATEGY_UNSPECIFIED" }, - { no: 1, name: "STRATEGY_HEAD" }, - { no: 2, name: "STRATEGY_TAIL" }, -]); - /** * Model is the internal representation of a model definition * diff --git a/web-common/src/proto/gen/rill/ui/v1/dashboard_pb.ts b/web-common/src/proto/gen/rill/ui/v1/dashboard_pb.ts index 41c85f0c84d..308231a41cf 100644 --- a/web-common/src/proto/gen/rill/ui/v1/dashboard_pb.ts +++ b/web-common/src/proto/gen/rill/ui/v1/dashboard_pb.ts @@ -218,7 +218,7 @@ proto3.util.setEnumType(DashboardState_LeaderboardSortDirection, "rill.ui.v1.Das ]); /** - * + * * * SortType is used to determine how to sort the leaderboard * and dimension detail table, as well as where to place the * sort arrow. diff --git a/web-common/src/runtime-client/gen/index.schemas.ts b/web-common/src/runtime-client/gen/index.schemas.ts index c9544c86f64..81094afd5b5 100644 --- a/web-common/src/runtime-client/gen/index.schemas.ts +++ b/web-common/src/runtime-client/gen/index.schemas.ts @@ -606,7 +606,6 @@ export interface V1Source { connector?: string; properties?: V1SourceProperties; schema?: V1StructType; - policy?: SourceExtractPolicy; timeoutSeconds?: number; } @@ -1702,20 +1701,10 @@ export interface V1BucketPlanner { state?: V1BucketPlannerState; } -export type V1BucketExtractPolicyStrategy = - (typeof V1BucketExtractPolicyStrategy)[keyof typeof V1BucketExtractPolicyStrategy]; - -// eslint-disable-next-line @typescript-eslint/no-redeclare -export const V1BucketExtractPolicyStrategy = { - STRATEGY_UNSPECIFIED: "STRATEGY_UNSPECIFIED", - STRATEGY_HEAD: "STRATEGY_HEAD", - STRATEGY_TAIL: "STRATEGY_TAIL", -} as const; - export interface V1BucketExtractPolicy { - rowsStrategy?: V1BucketExtractPolicyStrategy; + rowsStrategy?: BucketExtractPolicyStrategy; rowsLimitBytes?: string; - filesStrategy?: V1BucketExtractPolicyStrategy; + filesStrategy?: BucketExtractPolicyStrategy; filesLimit?: string; } @@ -1784,23 +1773,6 @@ export interface StructTypeField { type?: Runtimev1Type; } -export type SourceExtractPolicyStrategy = - (typeof SourceExtractPolicyStrategy)[keyof typeof SourceExtractPolicyStrategy]; - -// eslint-disable-next-line @typescript-eslint/no-redeclare -export const SourceExtractPolicyStrategy = { - STRATEGY_UNSPECIFIED: "STRATEGY_UNSPECIFIED", - STRATEGY_HEAD: "STRATEGY_HEAD", - STRATEGY_TAIL: "STRATEGY_TAIL", -} as const; - -export interface SourceExtractPolicy { - rowsStrategy?: SourceExtractPolicyStrategy; - rowsLimitBytes?: string; - filesStrategy?: SourceExtractPolicyStrategy; - filesLimit?: string; -} - export interface SecurityV2FieldConditionV2 { condition?: string; names?: string[]; @@ -1915,3 +1887,13 @@ export interface ColumnTimeSeriesRequestBasicMeasure { expression?: string; sqlName?: string; } + +export type BucketExtractPolicyStrategy = + (typeof BucketExtractPolicyStrategy)[keyof typeof BucketExtractPolicyStrategy]; + +// eslint-disable-next-line @typescript-eslint/no-redeclare +export const BucketExtractPolicyStrategy = { + STRATEGY_UNSPECIFIED: "STRATEGY_UNSPECIFIED", + STRATEGY_HEAD: "STRATEGY_HEAD", + STRATEGY_TAIL: "STRATEGY_TAIL", +} as const;