Skip to content

Commit

Permalink
feat: add bulk insert support for Functions (#36715)
Browse files Browse the repository at this point in the history
issue: #35853 and
#35856

Signed-off-by: Buqian Zheng <[email protected]>
  • Loading branch information
zhengbuqian authored Oct 12, 2024
1 parent d45e267 commit 82c5cf2
Show file tree
Hide file tree
Showing 16 changed files with 235 additions and 51 deletions.
2 changes: 1 addition & 1 deletion internal/datanode/importv2/hash.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ func newHashedData(schema *schemapb.CollectionSchema, channelNum, partitionNum i
for i := 0; i < channelNum; i++ {
res[i] = make([]*storage.InsertData, partitionNum)
for j := 0; j < partitionNum; j++ {
res[i][j], err = storage.NewInsertData(schema)
res[i][j], err = storage.NewInsertDataWithFunctionOutputField(schema)
if err != nil {
return nil, err
}
Expand Down
17 changes: 16 additions & 1 deletion internal/datanode/importv2/task_import.go
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,12 @@ func (t *ImportTask) importFile(reader importutilv2.Reader) error {
if err != nil {
return err
}
if !importutilv2.IsBackup(t.req.GetOptions()) {
err = RunEmbeddingFunction(t, data)
if err != nil {
return err
}
}
hashedData, err := HashData(t, data)
if err != nil {
return err
Expand Down Expand Up @@ -236,8 +242,17 @@ func (t *ImportTask) sync(hashedData HashedData) ([]*conc.Future[struct{}], []sy
if err != nil {
return nil, nil, err
}
bm25Stats := make(map[int64]*storage.BM25Stats)
for _, fn := range t.req.GetSchema().GetFunctions() {
if fn.GetType() == schemapb.FunctionType_BM25 {
// BM25 function guarantees single output field
outputSparseFieldId := fn.GetOutputFieldIds()[0]
bm25Stats[outputSparseFieldId] = storage.NewBM25Stats()
bm25Stats[outputSparseFieldId].AppendFieldData(data.Data[outputSparseFieldId].(*storage.SparseFloatVectorFieldData))
}
}
syncTask, err := NewSyncTask(t.ctx, t.allocator, t.metaCaches, t.req.GetTs(),
segmentID, partitionID, t.GetCollectionID(), channel, data, nil)
segmentID, partitionID, t.GetCollectionID(), channel, data, nil, bm25Stats)
if err != nil {
return nil, nil, err
}
Expand Down
2 changes: 1 addition & 1 deletion internal/datanode/importv2/task_l0_import.go
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ func (t *L0ImportTask) syncDelete(delData []*storage.DeleteData) ([]*conc.Future
return nil, nil, err
}
syncTask, err := NewSyncTask(t.ctx, t.allocator, t.metaCaches, t.req.GetTs(),
segmentID, partitionID, t.GetCollectionID(), channel, nil, data)
segmentID, partitionID, t.GetCollectionID(), channel, nil, data, nil)
if err != nil {
return nil, nil, err
}
Expand Down
48 changes: 48 additions & 0 deletions internal/datanode/importv2/util.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ import (
"github.com/milvus-io/milvus/internal/flushcommon/syncmgr"
"github.com/milvus-io/milvus/internal/proto/datapb"
"github.com/milvus-io/milvus/internal/storage"
"github.com/milvus-io/milvus/internal/util/function"
"github.com/milvus-io/milvus/pkg/common"
"github.com/milvus-io/milvus/pkg/log"
"github.com/milvus-io/milvus/pkg/metrics"
Expand All @@ -52,6 +53,7 @@ func NewSyncTask(ctx context.Context,
segmentID, partitionID, collectionID int64, vchannel string,
insertData *storage.InsertData,
deleteData *storage.DeleteData,
bm25Stats map[int64]*storage.BM25Stats,
) (syncmgr.Task, error) {
metaCache := metaCaches[vchannel]
if _, ok := metaCache.GetSegmentByID(segmentID); !ok {
Expand Down Expand Up @@ -94,6 +96,9 @@ func NewSyncTask(ctx context.Context,
WithLevel(segmentLevel).
WithDataSource(metrics.BulkinsertDataSourceLabel).
WithBatchSize(int64(insertData.GetRowNum()))
if bm25Stats != nil {
syncPack.WithBM25Stats(bm25Stats)
}

return serializer.EncodeBuffer(ctx, syncPack)
}
Expand Down Expand Up @@ -202,6 +207,49 @@ func AppendSystemFieldsData(task *ImportTask, data *storage.InsertData) error {
return nil
}

func RunEmbeddingFunction(task *ImportTask, data *storage.InsertData) error {
fns := task.GetSchema().GetFunctions()
for _, fn := range fns {
runner, err := function.NewFunctionRunner(task.GetSchema(), fn)
if err != nil {
return err
}
inputDatas := make([]any, 0, len(fn.InputFieldIds))
for _, inputFieldID := range fn.InputFieldIds {
inputDatas = append(inputDatas, data.Data[inputFieldID].GetDataRows())
}
outputFieldData, err := runner.BatchRun(inputDatas...)
if err != nil {
return err
}
for i, outputFieldID := range fn.OutputFieldIds {
outputField := typeutil.GetField(task.GetSchema(), outputFieldID)
// TODO: added support for vector output field only, scalar output field in function is not supported yet
switch outputField.GetDataType() {
case schemapb.DataType_FloatVector:
data.Data[outputFieldID] = outputFieldData[i].(*storage.FloatVectorFieldData)
case schemapb.DataType_BFloat16Vector:
data.Data[outputFieldID] = outputFieldData[i].(*storage.BFloat16VectorFieldData)
case schemapb.DataType_Float16Vector:
data.Data[outputFieldID] = outputFieldData[i].(*storage.Float16VectorFieldData)
case schemapb.DataType_BinaryVector:
data.Data[outputFieldID] = outputFieldData[i].(*storage.BinaryVectorFieldData)
case schemapb.DataType_SparseFloatVector:
sparseArray := outputFieldData[i].(*schemapb.SparseFloatArray)
data.Data[outputFieldID] = &storage.SparseFloatVectorFieldData{
SparseFloatArray: schemapb.SparseFloatArray{
Dim: sparseArray.GetDim(),
Contents: sparseArray.GetContents(),
},
}
default:
return fmt.Errorf("unsupported output data type for embedding function: %s", outputField.GetDataType().String())
}
}
}
return nil
}

func GetInsertDataRowCount(data *storage.InsertData, schema *schemapb.CollectionSchema) int {
fields := lo.KeyBy(schema.GetFields(), func(field *schemapb.FieldSchema) int64 {
return field.GetFieldID()
Expand Down
16 changes: 14 additions & 2 deletions internal/storage/insert_data.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,10 +50,14 @@ type InsertData struct {
}

func NewInsertData(schema *schemapb.CollectionSchema) (*InsertData, error) {
return NewInsertDataWithCap(schema, 0)
return NewInsertDataWithCap(schema, 0, false)
}

func NewInsertDataWithCap(schema *schemapb.CollectionSchema, cap int) (*InsertData, error) {
func NewInsertDataWithFunctionOutputField(schema *schemapb.CollectionSchema) (*InsertData, error) {
return NewInsertDataWithCap(schema, 0, true)
}

func NewInsertDataWithCap(schema *schemapb.CollectionSchema, cap int, withFunctionOutput bool) (*InsertData, error) {
if schema == nil {
return nil, merr.WrapErrParameterMissing("collection schema")
}
Expand All @@ -69,6 +73,14 @@ func NewInsertDataWithCap(schema *schemapb.CollectionSchema, cap int) (*InsertDa
if field.IsPartitionKey && field.GetNullable() {
return nil, merr.WrapErrParameterInvalidMsg("partition key field not support nullable")
}
if field.IsFunctionOutput {
if field.IsPrimaryKey || field.IsPartitionKey {
return nil, merr.WrapErrParameterInvalidMsg("function output field should not be primary key or partition key")
}
if !withFunctionOutput {
continue
}
}
fieldData, err := NewFieldData(field.DataType, field, cap)
if err != nil {
return nil, err
Expand Down
1 change: 1 addition & 0 deletions internal/util/importutilv2/binlog/util.go
Original file line number Diff line number Diff line change
Expand Up @@ -114,5 +114,6 @@ func verify(schema *schemapb.CollectionSchema, insertLogs map[int64][]string) er
fieldID, len(logs), common.RowIDField, len(insertLogs[common.RowIDField])))
}
}
// for Function output field, we do not re-run the Function when restoring from a backup.
return nil
}
18 changes: 18 additions & 0 deletions internal/util/importutilv2/csv/reader_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,24 @@ func (suite *ReaderSuite) run(dataType schemapb.DataType, elemType schemapb.Data
},
},
}
if dataType == schemapb.DataType_VarChar {
// Add a BM25 function if data type is VarChar
schema.Fields = append(schema.Fields, &schemapb.FieldSchema{
FieldID: 103,
Name: "sparse",
DataType: schemapb.DataType_SparseFloatVector,
IsFunctionOutput: true,
})
schema.Functions = append(schema.Functions, &schemapb.FunctionSchema{
Id: 1000,
Name: "bm25",
Type: schemapb.FunctionType_BM25,
InputFieldIds: []int64{102},
InputFieldNames: []string{dataType.String()},
OutputFieldIds: []int64{103},
OutputFieldNames: []string{"sparse"},
})
}

// config
// csv separator
Expand Down
32 changes: 14 additions & 18 deletions internal/util/importutilv2/csv/row_parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,20 @@ type rowParser struct {
}

func NewRowParser(schema *schemapb.CollectionSchema, header []string, nullkey string) (RowParser, error) {
name2Field := lo.KeyBy(schema.GetFields(),
func(field *schemapb.FieldSchema) string {
return field.GetName()
})
pkField, err := typeutil.GetPrimaryFieldSchema(schema)
if err != nil {
return nil, err
}
dynamicField := typeutil.GetDynamicField(schema)

name2Field := lo.SliceToMap(
lo.Filter(schema.GetFields(), func(field *schemapb.FieldSchema, _ int) bool {
return !field.GetIsFunctionOutput() && !typeutil.IsAutoPKField(field) && field.GetName() != dynamicField.GetName()
}),
func(field *schemapb.FieldSchema) (string, *schemapb.FieldSchema) {
return field.GetName(), field
},
)

name2Dim := make(map[string]int)
for name, field := range name2Field {
Expand All @@ -42,20 +52,6 @@ func NewRowParser(schema *schemapb.CollectionSchema, header []string, nullkey st
}
}

pkField, err := typeutil.GetPrimaryFieldSchema(schema)
if err != nil {
return nil, err
}

if pkField.GetAutoID() {
delete(name2Field, pkField.GetName())
}

dynamicField := typeutil.GetDynamicField(schema)
if dynamicField != nil {
delete(name2Field, dynamicField.GetName())
}

// check if csv header provides the primary key while it should be auto-generated
if pkField.GetAutoID() && lo.Contains(header, pkField.GetName()) {
return nil, merr.WrapErrImportFailed(
Expand Down
19 changes: 19 additions & 0 deletions internal/util/importutilv2/json/reader_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,25 @@ func (suite *ReaderSuite) run(dataType schemapb.DataType, elemType schemapb.Data
},
}

if dataType == schemapb.DataType_VarChar {
// Add a BM25 function if data type is VarChar
schema.Fields = append(schema.Fields, &schemapb.FieldSchema{
FieldID: 103,
Name: "sparse",
DataType: schemapb.DataType_SparseFloatVector,
IsFunctionOutput: true,
})
schema.Functions = append(schema.Functions, &schemapb.FunctionSchema{
Id: 1000,
Name: "bm25",
Type: schemapb.FunctionType_BM25,
InputFieldIds: []int64{102},
InputFieldNames: []string{dataType.String()},
OutputFieldIds: []int64{103},
OutputFieldNames: []string{"sparse"},
})
}

insertData, err := testutil.CreateInsertData(schema, suite.numRows)
suite.NoError(err)

Expand Down
17 changes: 7 additions & 10 deletions internal/util/importutilv2/json/row_parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,20 +62,17 @@ func NewRowParser(schema *schemapb.CollectionSchema) (RowParser, error) {
if err != nil {
return nil, err
}
dynamicField := typeutil.GetDynamicField(schema)

name2FieldID := lo.SliceToMap(schema.GetFields(),
name2FieldID := lo.SliceToMap(
lo.Filter(schema.GetFields(), func(field *schemapb.FieldSchema, _ int) bool {
return !field.GetIsFunctionOutput() && !typeutil.IsAutoPKField(field) && field.GetName() != dynamicField.GetName()
}),
func(field *schemapb.FieldSchema) (string, int64) {
return field.GetName(), field.GetFieldID()
})

if pkField.GetAutoID() {
delete(name2FieldID, pkField.GetName())
}
},
)

dynamicField := typeutil.GetDynamicField(schema)
if dynamicField != nil {
delete(name2FieldID, dynamicField.GetName())
}
return &rowParser{
id2Dim: id2Dim,
id2Field: id2Field,
Expand Down
16 changes: 12 additions & 4 deletions internal/util/importutilv2/numpy/reader.go
Original file line number Diff line number Diff line change
Expand Up @@ -132,24 +132,32 @@ func CreateReaders(ctx context.Context, cm storage.ChunkManager, schema *schemap
return name, path
})
for _, field := range schema.GetFields() {
path, hasPath := nameToPath[field.GetName()]
if field.GetIsPrimaryKey() && field.GetAutoID() {
if _, ok := nameToPath[field.GetName()]; ok {
if hasPath {
return nil, merr.WrapErrImportFailed(
fmt.Sprintf("the primary key '%s' is auto-generated, no need to provide", field.GetName()))
}
continue
}
if _, ok := nameToPath[field.GetName()]; !ok {
if field.GetIsFunctionOutput() {
if hasPath {
return nil, merr.WrapErrImportFailed(
fmt.Sprintf("field %s is Function output, should not be provided. Provided files: %v", field.GetName(), lo.Values(nameToPath)))
}
continue
}
if !hasPath {
if field.GetIsDynamic() {
continue
}
return nil, merr.WrapErrImportFailed(
fmt.Sprintf("no file for field: %s, files: %v", field.GetName(), lo.Values(nameToPath)))
}
reader, err := cm.Reader(ctx, nameToPath[field.GetName()])
reader, err := cm.Reader(ctx, path)
if err != nil {
return nil, merr.WrapErrImportFailed(
fmt.Sprintf("failed to read the file '%s', error: %s", nameToPath[field.GetName()], err.Error()))
fmt.Sprintf("failed to read the file '%s', error: %s", path, err.Error()))
}
readers[field.GetFieldID()] = reader
}
Expand Down
38 changes: 38 additions & 0 deletions internal/util/importutilv2/numpy/reader_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -111,13 +111,36 @@ func (suite *ReaderSuite) run(dt schemapb.DataType) {
},
},
}

if dt == schemapb.DataType_VarChar {
// Add a BM25 function if data type is VarChar
schema.Fields = append(schema.Fields, &schemapb.FieldSchema{
FieldID: 103,
Name: "sparse",
DataType: schemapb.DataType_SparseFloatVector,
IsFunctionOutput: true,
})
schema.Functions = append(schema.Functions, &schemapb.FunctionSchema{
Id: 1000,
Name: "bm25",
Type: schemapb.FunctionType_BM25,
InputFieldIds: []int64{102},
InputFieldNames: []string{dt.String()},
OutputFieldIds: []int64{103},
OutputFieldNames: []string{"sparse"},
})
}

insertData, err := testutil.CreateInsertData(schema, suite.numRows)
suite.NoError(err)
fieldIDToField := lo.KeyBy(schema.GetFields(), func(field *schemapb.FieldSchema) int64 {
return field.GetFieldID()
})
files := make(map[int64]string)
for _, field := range schema.GetFields() {
if field.GetIsFunctionOutput() {
continue
}
files[field.GetFieldID()] = fmt.Sprintf("%s.npy", field.GetName())
}

Expand Down Expand Up @@ -452,4 +475,19 @@ func TestCreateReaders(t *testing.T) {
}
_, err = CreateReaders(ctx, cm, schema, []string{"pk", "vec"})
assert.NoError(t, err)

// auto id and Function
schema = &schemapb.CollectionSchema{
Fields: []*schemapb.FieldSchema{
{Name: "pk", DataType: schemapb.DataType_Int64, IsPrimaryKey: true, AutoID: true},
{Name: "vec", DataType: schemapb.DataType_FloatVector},
{Name: "text", DataType: schemapb.DataType_VarChar},
{Name: "sparse", DataType: schemapb.DataType_SparseFloatVector, IsFunctionOutput: true},
},
Functions: []*schemapb.FunctionSchema{
{Name: "bm25", InputFieldNames: []string{"text"}, OutputFieldNames: []string{"sparse"}},
},
}
_, err = CreateReaders(ctx, cm, schema, []string{"vec", "text"})
assert.NoError(t, err)
}
Loading

0 comments on commit 82c5cf2

Please sign in to comment.