From f49604bcf8904c8e6382ff6238c6bb0f23620b5c Mon Sep 17 00:00:00 2001 From: Arpit Bhayani Date: Tue, 3 Dec 2024 11:39:31 +0530 Subject: [PATCH 01/17] ELiminating redundant checks --- internal/eval/bytearray.go | 2 +- internal/eval/eval_test.go | 4 ++-- internal/eval/store_eval.go | 33 +++++++++----------------- internal/eval/type_string.go | 46 ++++++++++++++++-------------------- internal/object/deep_copy.go | 2 +- internal/object/object.go | 4 ---- 6 files changed, 36 insertions(+), 55 deletions(-) diff --git a/internal/eval/bytearray.go b/internal/eval/bytearray.go index 45900aa21..dd0d1813d 100644 --- a/internal/eval/bytearray.go +++ b/internal/eval/bytearray.go @@ -36,7 +36,7 @@ func NewByteArrayFromObj(obj *object.Obj) (*ByteArray, error) { } func getValueAsByteSlice(obj *object.Obj) ([]byte, error) { - oType := object.ExtractType(obj) + oType := obj.Type switch oType { case object.ObjTypeInt: return []byte(strconv.FormatInt(obj.Value.(int64), 10)), nil diff --git a/internal/eval/eval_test.go b/internal/eval/eval_test.go index 61ae149fe..1ecab654c 100644 --- a/internal/eval/eval_test.go +++ b/internal/eval/eval_test.go @@ -6719,7 +6719,7 @@ func testEvalAPPEND(t *testing.T, store *dstore.Store) { migratedOutput: EvalResponse{Result: 3, Error: nil}, validator: func(output []byte) { obj := store.Get("key") - oType := object.ExtractType(obj) + oType := obj.Type if oType != object.ObjTypeInt { t.Errorf("unexpected encoding") } @@ -6774,7 +6774,7 @@ func testEvalAPPEND(t *testing.T, store *dstore.Store) { migratedOutput: EvalResponse{Result: 2, Error: nil}, validator: func(output []byte) { obj := store.Get("key") - oType := object.ExtractType(obj) + oType := obj.Type if oType != object.ObjTypeString { t.Errorf("unexpected encoding") } diff --git a/internal/eval/store_eval.go b/internal/eval/store_eval.go index 37b694f5e..3566a76ff 100644 --- a/internal/eval/store_eval.go +++ b/internal/eval/store_eval.go @@ -338,7 +338,7 @@ func evalGET(args []string, store *dstore.Store) *EvalResponse { } // Decode and return the value based on its encoding - switch oType := object.ExtractType(obj); oType { + switch oType := obj.Type; oType { case object.ObjTypeInt: // Value is stored as an int64, so use type assertion if IsInt64(obj.Value) { @@ -642,7 +642,7 @@ func evalGETRANGE(args []string, store *dstore.Store) *EvalResponse { } var str string - switch oType := object.ExtractType(obj); oType { + switch oType := obj.Type; oType { case object.ObjTypeString: if val, ok := obj.Value.(string); ok { str = val @@ -1095,18 +1095,7 @@ func evalAPPEND(args []string, store *dstore.Store) *EvalResponse { // Key does not exist, create a new key if obj == nil { - // Deduce type and encoding based on the value if no leading zeros - oType := deduceType(value) - - // Transform the value based on the type and encoding - storedValue, err := storeValueWithType(value, oType) - if err != nil { - return &EvalResponse{ - Result: nil, - Error: err, - } - } - + storedValue, oType := getRawStringOrInt(value) store.Put(key, store.NewObj(storedValue, exDurationMs, oType)) return &EvalResponse{ Result: len(value), @@ -1121,7 +1110,7 @@ func evalAPPEND(args []string, store *dstore.Store) *EvalResponse { Error: diceerrors.ErrWrongTypeOperation, } } - oType := object.ExtractType(obj) + oType := obj.Type // Transform the value based on the current encoding currentValue, err := convertValueToString(obj, oType) @@ -4441,7 +4430,7 @@ func evalGETDEL(args []string, store *dstore.Store) *EvalResponse { objVal := store.GetDel(key) // Decode and return the value based on its encoding - switch oType := object.ExtractType(objVal); oType { + switch oType := objVal.Type; oType { case object.ObjTypeInt: // Value is stored as an int64, so use type assertion if IsInt64(objVal.Value) { @@ -5082,7 +5071,7 @@ func evalSETBIT(args []string, store *dstore.Store) *EvalResponse { object.AssertType(obj.Type, object.ObjTypeString) == nil || object.AssertType(obj.Type, object.ObjTypeInt) == nil { var byteArray *ByteArray - oType := object.ExtractType(obj) + oType := obj.Type switch oType { case object.ObjTypeByteArray: @@ -5180,7 +5169,7 @@ func evalGETBIT(args []string, store *dstore.Store) *EvalResponse { } requiredByteArraySize := offset>>3 + 1 - switch oType := object.ExtractType(obj); oType { + switch oType := obj.Type; oType { case object.ObjTypeSet: return &EvalResponse{ Result: nil, @@ -5429,7 +5418,7 @@ func bitfieldEvalGeneric(args []string, store *dstore.Store, isReadOnly bool) *E var value *ByteArray var err error - switch oType := object.ExtractType(obj); oType { + switch oType := obj.Type; oType { case object.ObjTypeByteArray: value = obj.Value.(*ByteArray) case object.ObjTypeString, object.ObjTypeInt: @@ -6383,7 +6372,7 @@ func evalTYPE(args []string, store *dstore.Store) *EvalResponse { } var typeStr string - switch oType := object.ExtractType(obj); oType { + switch oType := obj.Type; oType { case object.ObjTypeString, object.ObjTypeInt, object.ObjTypeByteArray: typeStr = "string" case object.ObjTypeDequeue: @@ -6430,7 +6419,7 @@ func evalTYPE(args []string, store *dstore.Store) *EvalResponse { // var value []byte -// switch oType, _ := object.ExtractType(obj); oType { +// switch oType, _ := obj.Type; oType { // case object.ObjTypeByteArray: // byteArray := obj.Value.(*ByteArray) // byteArrayObject := *byteArray @@ -6491,7 +6480,7 @@ func evalTYPE(args []string, store *dstore.Store) *EvalResponse { // values[i] = make([]byte, 0) // } else { // // handle the case when it is byte array -// switch oType, _ := object.ExtractType(obj); oType { +// switch oType, _ := obj.Type; oType { // case object.ObjTypeByteArray: // byteArray := obj.Value.(*ByteArray) // byteArrayObject := *byteArray diff --git a/internal/eval/type_string.go b/internal/eval/type_string.go index dc4c716be..f320f5510 100644 --- a/internal/eval/type_string.go +++ b/internal/eval/type_string.go @@ -7,8 +7,22 @@ import ( "github.com/dicedb/dice/internal/object" ) -// Similar to -// tryObjectEncoding function in Redis +type String struct { + Value string + Type uint8 +} + +func NewString(value string) *String { + return &String{ + Value: value, + Type: object.ObjTypeString, + } +} + +func (s *String) Serialize() []byte { + return []byte{} +} + func deduceType(v string) (o uint8) { // Check if the value has leading zero if len(v) > 1 && v[0] == '0' { @@ -21,30 +35,12 @@ func deduceType(v string) (o uint8) { return object.ObjTypeString } -// Function to handle converting the value based on the encoding type -func storeValueWithType(value string, oType uint8) (interface{}, error) { - var returnValue interface{} - - // treat as string if value has leading zero - if len(value) > 1 && value[0] == '0' { - // If so, treat as string - return value, nil +func getRawStringOrInt(value string) (interface{}, uint8) { + intValue, err := strconv.ParseInt(value, 10, 64) + if err != nil { // value is not an integer, hence a string + return value, object.ObjTypeString } - - switch oType { - case object.ObjTypeInt: - intValue, err := strconv.ParseInt(value, 10, 64) - if err != nil { - return nil, diceerrors.ErrWrongTypeOperation - } - returnValue = intValue - case object.ObjTypeString: - returnValue = value - default: - return nil, diceerrors.ErrWrongTypeOperation - } - - return returnValue, nil + return intValue, object.ObjTypeInt // value is an integer } // Function to convert the value to a string for concatenation or manipulation diff --git a/internal/object/deep_copy.go b/internal/object/deep_copy.go index 18a10b73d..0e211be9f 100644 --- a/internal/object/deep_copy.go +++ b/internal/object/deep_copy.go @@ -19,7 +19,7 @@ func (obj *Obj) DeepCopy() *Obj { newObj.Value = copier.DeepCopy() } else { // Handle types that are not DeepCopyable - sourceType := ExtractType(obj) + sourceType := obj.Type switch sourceType { case ObjTypeString: sourceValue := obj.Value.(string) diff --git a/internal/object/object.go b/internal/object/object.go index 63b6b0c30..630230980 100644 --- a/internal/object/object.go +++ b/internal/object/object.go @@ -82,7 +82,3 @@ var ObjTypeSortedSet uint8 = 8 var ObjTypeCountMinSketch uint8 = 9 var ObjTypeBF uint8 = 10 var ObjTypeDequeue uint8 = 11 - -func ExtractType(obj *Obj) (e1 uint8) { - return obj.Type -} From 45fb9864749ea2216c25f7ad7b30d191fd83be64 Mon Sep 17 00:00:00 2001 From: Arpit Bhayani Date: Tue, 3 Dec 2024 11:45:56 +0530 Subject: [PATCH 02/17] Creating a type for ObjectType instead of vanilla uint8 --- internal/eval/bytearray.go | 2 +- internal/eval/eval.go | 2 +- internal/eval/store_eval.go | 6 +++--- internal/eval/type_string.go | 20 ++++++-------------- internal/object/object.go | 30 +++++++++++++++++++----------- internal/object/typeencoding.go | 4 ++-- internal/store/store.go | 2 +- 7 files changed, 33 insertions(+), 33 deletions(-) diff --git a/internal/eval/bytearray.go b/internal/eval/bytearray.go index dd0d1813d..4bf4f1f0a 100644 --- a/internal/eval/bytearray.go +++ b/internal/eval/bytearray.go @@ -81,7 +81,7 @@ func getByteArrayValueAsByteSlice(obj *object.Obj) ([]byte, error) { } // ByteSliceToObj converts a byte slice to an Obj of the specified type and encoding -func ByteSliceToObj(store *dstore.Store, oldObj *object.Obj, b []byte, objType uint8) (*object.Obj, error) { +func ByteSliceToObj(store *dstore.Store, oldObj *object.Obj, b []byte, objType object.ObjectType) (*object.Obj, error) { switch objType { case object.ObjTypeInt: return ByteSliceToIntObj(store, oldObj, b) diff --git a/internal/eval/eval.go b/internal/eval/eval.go index 7c1173b1f..95f708c96 100644 --- a/internal/eval/eval.go +++ b/internal/eval/eval.go @@ -165,7 +165,7 @@ func evalMSET(args []string, store *dstore.Store) []byte { insertMap := make(map[string]*object.Obj, len(args)/2) for i := 0; i < len(args); i += 2 { key, value := args[i], args[i+1] - oType := deduceType(value) + _, oType := getRawStringOrInt(value) var storedValue interface{} switch oType { case object.ObjTypeInt: diff --git a/internal/eval/store_eval.go b/internal/eval/store_eval.go index 3566a76ff..4fe6f04cd 100644 --- a/internal/eval/store_eval.go +++ b/internal/eval/store_eval.go @@ -201,7 +201,7 @@ func evalSET(args []string, store *dstore.Store) *EvalResponse { var oldVal *interface{} key, value = args[0], args[1] - oType := deduceType(value) + _, oType := getRawStringOrInt(value) for i := 2; i < len(args); i++ { arg := strings.ToUpper(args[i]) @@ -2477,7 +2477,7 @@ func incrByFloatCmd(args []string, incr float64, store *dstore.Store) *EvalRespo if obj == nil { strValue := formatFloat(incr, false) - oType := deduceType(strValue) + _, oType := getRawStringOrInt(strValue) obj = store.NewObj(strValue, -1, oType) store.Put(key, obj) return &EvalResponse{ @@ -2511,7 +2511,7 @@ func incrByFloatCmd(args []string, incr float64, store *dstore.Store) *EvalRespo } strValue := formatFloat(value, true) - oType := deduceType(strValue) + _, oType := getRawStringOrInt(strValue) // Remove the trailing decimal for integer values // to maintain consistency with redis diff --git a/internal/eval/type_string.go b/internal/eval/type_string.go index f320f5510..3553a00f5 100644 --- a/internal/eval/type_string.go +++ b/internal/eval/type_string.go @@ -9,7 +9,7 @@ import ( type String struct { Value string - Type uint8 + Type object.ObjectType } func NewString(value string) *String { @@ -23,28 +23,20 @@ func (s *String) Serialize() []byte { return []byte{} } -func deduceType(v string) (o uint8) { - // Check if the value has leading zero +func getRawStringOrInt(v string) (interface{}, object.ObjectType) { if len(v) > 1 && v[0] == '0' { // If so, treat as string - return object.ObjTypeString + return v, object.ObjTypeString } - if _, err := strconv.ParseInt(v, 10, 64); err == nil { - return object.ObjTypeInt - } - return object.ObjTypeString -} - -func getRawStringOrInt(value string) (interface{}, uint8) { - intValue, err := strconv.ParseInt(value, 10, 64) + intValue, err := strconv.ParseInt(v, 10, 64) if err != nil { // value is not an integer, hence a string - return value, object.ObjTypeString + return v, object.ObjTypeString } return intValue, object.ObjTypeInt // value is an integer } // Function to convert the value to a string for concatenation or manipulation -func convertValueToString(obj *object.Obj, oType uint8) (string, error) { +func convertValueToString(obj *object.Obj, oType object.ObjectType) (string, error) { var currentValueStr string switch oType { diff --git a/internal/object/object.go b/internal/object/object.go index 630230980..24c8527a5 100644 --- a/internal/object/object.go +++ b/internal/object/object.go @@ -25,7 +25,7 @@ package object // objects (e.g., strings, numbers, complex data structures like lists or maps). type Obj struct { // Type holds the type of the object (e.g., string, int, complex structure) - Type uint8 + Type ObjectType // LastAccessedAt stores the last access timestamp of the object. // It helps track when the object was last accessed and may be used for cache eviction or freshness tracking. @@ -72,13 +72,21 @@ type InternalObj struct { ExDuration int64 } -var ObjTypeString uint8 = 0 -var ObjTypeJSON uint8 = 3 -var ObjTypeByteArray uint8 = 4 -var ObjTypeInt uint8 = 5 -var ObjTypeSet uint8 = 6 -var ObjTypeHashMap uint8 = 7 -var ObjTypeSortedSet uint8 = 8 -var ObjTypeCountMinSketch uint8 = 9 -var ObjTypeBF uint8 = 10 -var ObjTypeDequeue uint8 = 11 +// ObjectType represents the type of a DiceDB object +type ObjectType uint8 + +// Define object types as constants +const ( + ObjTypeString ObjectType = iota + _ // skip 1 and 2 to maintain compatibility + _ + ObjTypeJSON + ObjTypeByteArray + ObjTypeInt + ObjTypeSet + ObjTypeHashMap + ObjTypeSortedSet + ObjTypeCountMinSketch + ObjTypeBF + ObjTypeDequeue +) diff --git a/internal/object/typeencoding.go b/internal/object/typeencoding.go index b8f677066..8934fe710 100644 --- a/internal/object/typeencoding.go +++ b/internal/object/typeencoding.go @@ -6,14 +6,14 @@ import ( diceerrors "github.com/dicedb/dice/internal/errors" ) -func AssertTypeWithError(te, t uint8) error { +func AssertTypeWithError(te, t ObjectType) error { if te != t { return errors.New("WRONGTYPE Operation against a key holding the wrong kind of value") } return nil } -func AssertType(_type, expectedType uint8) []byte { +func AssertType(_type, expectedType ObjectType) []byte { if err := AssertTypeWithError(_type, expectedType); err != nil { return diceerrors.NewErrWithMessage(diceerrors.WrongKeyTypeErr) } diff --git a/internal/store/store.go b/internal/store/store.go index 5c1f46297..6bb18aecb 100644 --- a/internal/store/store.go +++ b/internal/store/store.go @@ -84,7 +84,7 @@ func ResetStore(store *Store) *Store { return store } -func (store *Store) NewObj(value interface{}, expDurationMs int64, oType uint8) *object.Obj { +func (store *Store) NewObj(value interface{}, expDurationMs int64, oType object.ObjectType) *object.Obj { obj := &object.Obj{ Value: value, Type: oType, From 6d4a7568a5f43284f9e703f92ee1a595abe789da Mon Sep 17 00:00:00 2001 From: Arpit Bhayani Date: Tue, 3 Dec 2024 11:48:29 +0530 Subject: [PATCH 03/17] Removing the additional check --- internal/eval/eval.go | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/internal/eval/eval.go b/internal/eval/eval.go index 95f708c96..0b28e079d 100644 --- a/internal/eval/eval.go +++ b/internal/eval/eval.go @@ -165,16 +165,7 @@ func evalMSET(args []string, store *dstore.Store) []byte { insertMap := make(map[string]*object.Obj, len(args)/2) for i := 0; i < len(args); i += 2 { key, value := args[i], args[i+1] - _, oType := getRawStringOrInt(value) - var storedValue interface{} - switch oType { - case object.ObjTypeInt: - storedValue, _ = strconv.ParseInt(value, 10, 64) - case object.ObjTypeString: - storedValue = value - default: - return clientio.Encode(fmt.Errorf("ERR unsupported type: %d", oType), false) - } + storedValue, oType := getRawStringOrInt(value) insertMap[key] = store.NewObj(storedValue, exDurationMs, oType) } From 1355e7c3346fbe4b9e2d38492d8b657a11c8e516 Mon Sep 17 00:00:00 2001 From: Arpit Bhayani Date: Tue, 3 Dec 2024 15:27:12 +0530 Subject: [PATCH 04/17] Bloomfilter naming conventions --- internal/eval/bloom_test.go | 6 +-- internal/eval/store_eval.go | 35 +++++++------- internal/eval/{bloom.go => type_bloom.go} | 56 +++++++++++------------ internal/eval/type_string.go | 6 +-- 4 files changed, 51 insertions(+), 52 deletions(-) rename internal/eval/{bloom.go => type_bloom.go} (87%) diff --git a/internal/eval/bloom_test.go b/internal/eval/bloom_test.go index 3d06c3daa..5ef929250 100644 --- a/internal/eval/bloom_test.go +++ b/internal/eval/bloom_test.go @@ -100,19 +100,19 @@ func TestGetOrCreateBloomFilter(t *testing.T) { opts := defaultBloomOpts() // Should create a new filter under the key `key`. - bloom, err := getOrCreateBloomFilter(key, store, opts) + bloom, err := GetOrCreateBloomFilter(key, store, opts) if bloom == nil || err != nil { t.Errorf("nil bloom or non-nil error returned while creating new filter - key: %s, opts: %+v, err: %v", key, opts, err) } // Should get the filter (which was created above) - bloom, err = getOrCreateBloomFilter(key, store, opts) + bloom, err = GetOrCreateBloomFilter(key, store, opts) if bloom == nil || err != nil { t.Errorf("nil bloom or non-nil error returned while fetching existing filter - key: %s, opts: %+v, err: %v", key, opts, err) } // Should get the filter with nil opts - bloom, err = getOrCreateBloomFilter(key, store, nil) + bloom, err = GetOrCreateBloomFilter(key, store, nil) if bloom == nil || err != nil { t.Errorf("nil bloom or non-nil error returned while fetching existing filter - key: %s, opts: %+v, err: %v", key, opts, err) } diff --git a/internal/eval/store_eval.go b/internal/eval/store_eval.go index 4fe6f04cd..343a386e4 100644 --- a/internal/eval/store_eval.go +++ b/internal/eval/store_eval.go @@ -2867,11 +2867,17 @@ func evalBFRESERVE(args []string, store *dstore.Store) *EvalResponse { return makeEvalError(err) } - _, err = CreateBloomFilter(args[0], store, opts) - if err != nil { + key := args[0] + + _, err = GetBloomFilter(key, store) + if err != nil && err != diceerrors.ErrKeyNotFound { // bloom filter does not exist return makeEvalError(err) + } else if err != nil && err == diceerrors.ErrKeyNotFound { // key does not exists + CreateOrReplaceBloomFilter(key, opts, store) + return makeEvalResult(clientio.OK) + } else { // bloom filter already exists + return makeEvalResult(clientio.OK) } - return makeEvalResult(clientio.OK) } // evalBFADD evaluates the BF.ADD command responsible for adding an element to a bloom filter. If the filter does not @@ -2881,12 +2887,12 @@ func evalBFADD(args []string, store *dstore.Store) *EvalResponse { return makeEvalError(diceerrors.ErrWrongArgumentCount("BF.ADD")) } - bloom, err := getOrCreateBloomFilter(args[0], store, nil) + bf, err := GetOrCreateBloomFilter(args[0], store, nil) if err != nil { return makeEvalError(err) } - result, err := bloom.add(args[1]) + result, err := bf.add(args[1]) if err != nil { return makeEvalError(err) } @@ -2901,14 +2907,14 @@ func evalBFEXISTS(args []string, store *dstore.Store) *EvalResponse { return makeEvalError(diceerrors.ErrWrongArgumentCount("BF.EXISTS")) } - bloom, err := GetBloomFilter(args[0], store) - if err != nil { + bf, err := GetBloomFilter(args[0], store) + if err != nil && err != diceerrors.ErrKeyNotFound { return makeEvalError(err) - } - if bloom == nil { + } else if err != nil && err == diceerrors.ErrKeyNotFound { return makeEvalResult(clientio.IntegerZero) } - result, err := bloom.exists(args[1]) + + result, err := bf.exists(args[1]) if err != nil { return makeEvalError(err) } @@ -2922,25 +2928,20 @@ func evalBFINFO(args []string, store *dstore.Store) *EvalResponse { return makeEvalError(diceerrors.ErrWrongArgumentCount("BF.INFO")) } - bloom, err := GetBloomFilter(args[0], store) - + bf, err := GetBloomFilter(args[0], store) if err != nil { return makeEvalError(err) } - if bloom == nil { - return makeEvalError(diceerrors.ErrGeneral("not found")) - } opt := "" if len(args) == 2 { opt = args[1] } - result, err := bloom.info(opt) + result, err := bf.info(opt) if err != nil { return makeEvalError(err) } - return makeEvalResult(result) } diff --git a/internal/eval/bloom.go b/internal/eval/type_bloom.go similarity index 87% rename from internal/eval/bloom.go rename to internal/eval/type_bloom.go index 35b893705..3d7a6072c 100644 --- a/internal/eval/bloom.go +++ b/internal/eval/type_bloom.go @@ -92,7 +92,7 @@ func newBloomOpts(args []string) (*BloomOpts, error) { // newBloomFilter creates and returns a new filter. It is responsible for initializing the // underlying bit array. -func newBloomFilter(opts *BloomOpts) *Bloom { +func NewBloom(opts *BloomOpts) *Bloom { // Calculate bits per element // bpe = -log(errorRate)/ln(2)^2 num := -1 * math.Log(opts.errorRate) @@ -278,25 +278,41 @@ func (opts *BloomOpts) updateIndexes(value string) error { return nil } -// getOrCreateBloomFilter attempts to fetch an existing bloom filter from -// the kv store. If it does not exist, it tries to create one with -// given `opts` and returns it. -func getOrCreateBloomFilter(key string, store *dstore.Store, opts *BloomOpts) (*Bloom, error) { +// CreateOrReplaceBloomFilter creates a new bloom filter with given `opts` +// and stores it in the kv store. If the bloom filter already exists, it +// replaces the existing one. If `opts` is nil, it uses the default options. +func CreateOrReplaceBloomFilter(key string, opts *BloomOpts, store *dstore.Store) *Bloom { + if opts == nil { + opts = defaultBloomOpts() + } + bf := NewBloom(opts) + obj := store.NewObj(bf, -1, object.ObjTypeBF) + store.Put(key, obj) + return bf +} + +// GetOrCreateBloomFilter fetches an existing bloom filter from +// the kv store and returns the datastructure instance of it. +// If it does not exist, it tries to create one with given `opts` and returns it. +// Note: It also stores it in the kv store. +func GetOrCreateBloomFilter(key string, store *dstore.Store, opts *BloomOpts) (*Bloom, error) { bf, err := GetBloomFilter(key, store) - if err != nil { + if err != nil && err != diceerrors.ErrKeyNotFound { return nil, err + } else if err != nil && err == diceerrors.ErrKeyNotFound { + bf = CreateOrReplaceBloomFilter(key, opts, store) } - if bf == nil { - bf, err = CreateBloomFilter(key, store, opts) - } - return bf, err + return bf, nil } -// get the bloom filter +// GetBloomFilter fetches an existing bloom filter from +// the kv store and returns the datastructure instance of it. +// The function also returns diceerrors.ErrKeyNotFound if the key does not exist. +// It also returns diceerrors.ErrWrongTypeOperation if the object is not a bloom filter. func GetBloomFilter(key string, store *dstore.Store) (*Bloom, error) { obj := store.Get(key) if obj == nil { - return nil, nil + return nil, diceerrors.ErrKeyNotFound } if err := object.AssertType(obj.Type, object.ObjTypeBF); err != nil { return nil, diceerrors.ErrWrongTypeOperation @@ -304,19 +320,3 @@ func GetBloomFilter(key string, store *dstore.Store) (*Bloom, error) { return obj.Value.(*Bloom), nil } - -func CreateBloomFilter(key string, store *dstore.Store, opts *BloomOpts) (*Bloom, error) { - bf, err := GetBloomFilter(key, store) - if bf != nil { - return nil, diceerrors.ErrGeneral("item exists") - } - if err != nil { - return nil, err - } - if opts == nil { - opts = defaultBloomOpts() - } - obj := store.NewObj(newBloomFilter(opts), -1, object.ObjTypeBF) - store.Put(key, obj) - return obj.Value.(*Bloom), nil -} diff --git a/internal/eval/type_string.go b/internal/eval/type_string.go index 3553a00f5..8ab3f9938 100644 --- a/internal/eval/type_string.go +++ b/internal/eval/type_string.go @@ -8,14 +8,12 @@ import ( ) type String struct { - Value string - Type object.ObjectType + value string } func NewString(value string) *String { return &String{ - Value: value, - Type: object.ObjTypeString, + value: value, } } From f505f7dd7cd34649b11f23942ac066676ff33a74 Mon Sep 17 00:00:00 2001 From: Arpit Bhayani Date: Tue, 3 Dec 2024 15:38:55 +0530 Subject: [PATCH 05/17] Unit test fixes. Strangely mem utilization reduced. --- internal/eval/bloom_test.go | 2 +- internal/eval/eval_test.go | 6 +++--- internal/eval/type_string_test.go | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/internal/eval/bloom_test.go b/internal/eval/bloom_test.go index 5ef929250..000499bd6 100644 --- a/internal/eval/bloom_test.go +++ b/internal/eval/bloom_test.go @@ -122,7 +122,7 @@ func TestUpdateIndexes(t *testing.T) { // Create a value, default opts and initialize all params of the filter value := "hello" opts := defaultBloomOpts() - bloom := newBloomFilter(opts) + bloom := NewBloom(opts) err := opts.updateIndexes(value) if err != nil { diff --git a/internal/eval/eval_test.go b/internal/eval/eval_test.go index 1ecab654c..5ba8b3f63 100644 --- a/internal/eval/eval_test.go +++ b/internal/eval/eval_test.go @@ -2995,7 +2995,7 @@ func testEvalPFADD(t *testing.T, store *dstore.Store) { name: "PFADD Incorrect type provided", setup: func() { key, value := "EXISTING_KEY", "VALUE" - oType := deduceType(value) + _, oType := getRawStringOrInt(value) var exDurationMs int64 = -1 keepttl := false @@ -4830,7 +4830,7 @@ func testEvalDebug(t *testing.T, store *dstore.Store) { store.Put(key, obj) }, input: []string{"MEMORY", "EXISTING_KEY"}, - migratedOutput: EvalResponse{Result: 89, Error: nil}, + migratedOutput: EvalResponse{Result: 72, Error: nil}, }, "root path": { @@ -4843,7 +4843,7 @@ func testEvalDebug(t *testing.T, store *dstore.Store) { store.Put(key, obj) }, input: []string{"MEMORY", "EXISTING_KEY", "$"}, - migratedOutput: EvalResponse{Result: 89, Error: nil}, + migratedOutput: EvalResponse{Result: 72, Error: nil}, }, "invalid path": { diff --git a/internal/eval/type_string_test.go b/internal/eval/type_string_test.go index 0fc2bf33a..57230ff89 100644 --- a/internal/eval/type_string_test.go +++ b/internal/eval/type_string_test.go @@ -13,7 +13,7 @@ func TestDeduceType(t *testing.T) { tests := []struct { name string input string - wantType uint8 + wantType object.ObjectType wantEnc uint8 }{ { @@ -45,7 +45,7 @@ func TestDeduceType(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - gotType := deduceType(tt.input) + _, gotType := getRawStringOrInt(tt.input) if gotType != tt.wantType { t.Errorf("deduceType(%q) = (%v), want (%v)", tt.input, gotType, tt.wantType) } From 6b9d3236c1afda2b7e3f222c7cb7fa9491186017 Mon Sep 17 00:00:00 2001 From: jujiale Date: Tue, 3 Dec 2024 18:49:27 +0800 Subject: [PATCH 06/17] fix: modify the config default value keep consistency (#1352) --- internal/cli/cli.go | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/internal/cli/cli.go b/internal/cli/cli.go index 36f80cf15..62820dec4 100644 --- a/internal/cli/cli.go +++ b/internal/cli/cli.go @@ -49,7 +49,7 @@ func printConfiguration() { // Add whether the watch feature is enabled slog.Info("running with", slog.Bool("profiling", config.DiceConfig.Performance.EnableProfiling)) - // Add whether the watch feature is enabled + // Add whether the persistence feature is enabled slog.Info("running with", slog.Bool("persistence", config.DiceConfig.Persistence.Enabled)) } @@ -73,10 +73,10 @@ func Execute() { flag.IntVar(&flagsConfig.RespServer.Port, "port", 7379, "port for the DiceDB server") - flag.IntVar(&flagsConfig.HTTP.Port, "http-port", 7380, "port for accepting requets over HTTP") + flag.IntVar(&flagsConfig.HTTP.Port, "http-port", 8082, "port for accepting requets over HTTP") flag.BoolVar(&flagsConfig.HTTP.Enabled, "enable-http", false, "enable DiceDB to listen, accept, and process HTTP") - flag.IntVar(&flagsConfig.WebSocket.Port, "websocket-port", 7381, "port for accepting requets over WebSocket") + flag.IntVar(&flagsConfig.WebSocket.Port, "websocket-port", 8379, "port for accepting requets over WebSocket") flag.BoolVar(&flagsConfig.WebSocket.Enabled, "enable-websocket", false, "enable DiceDB to listen, accept, and process WebSocket") flag.IntVar(&flagsConfig.Performance.NumShards, "num-shards", -1, "number shards to create. defaults to number of cores") @@ -98,7 +98,7 @@ func Execute() { flag.IntVar(&flagsConfig.Memory.KeysLimit, "keys-limit", config.DefaultKeysLimit, "keys limit for the DiceDB server. "+ "This flag controls the number of keys each shard holds at startup. You can multiply this number with the "+ "total number of shard threads to estimate how much memory will be required at system start up.") - flag.Float64Var(&flagsConfig.Memory.EvictionRatio, "eviction-ratio", 0.1, "ratio of keys to evict when the "+ + flag.Float64Var(&flagsConfig.Memory.EvictionRatio, "eviction-ratio", 0.9, "ratio of keys to evict when the "+ "keys limit is reached") flag.Usage = func() { @@ -115,9 +115,9 @@ func Execute() { fmt.Println(" -h, --help Show this help message") fmt.Println(" -host Host for the DiceDB server (default: \"0.0.0.0\")") fmt.Println(" -port Port for the DiceDB server (default: 7379)") - fmt.Println(" -http-port Port for accepting requests over HTTP (default: 7380)") + fmt.Println(" -http-port Port for accepting requests over HTTP (default: 8082)") fmt.Println(" -enable-http Enable DiceDB to listen, accept, and process HTTP (default: false)") - fmt.Println(" -websocket-port Port for accepting requests over WebSocket (default: 7381)") + fmt.Println(" -websocket-port Port for accepting requests over WebSocket (default: 8379)") fmt.Println(" -enable-websocket Enable DiceDB to listen, accept, and process WebSocket (default: false)") fmt.Println(" -num-shards Number of shards to create. Defaults to number of cores (default: -1)") fmt.Println(" -enable-watch Enable support for .WATCH commands and real-time reactivity (default: false)") @@ -130,8 +130,8 @@ func Execute() { fmt.Println(" -requirepass Enable authentication for the default user (default: \"\")") fmt.Println(" -o Directory path to create the config file (default: \"\")") fmt.Println(" -c File path of the config file (default: \"\")") - fmt.Println(" -keys-limit Keys limit for the DiceDB server (default: 0)") - fmt.Println(" -eviction-ratio Ratio of keys to evict when the keys limit is reached (default: 0.1)") + fmt.Println(" -keys-limit Keys limit for the DiceDB server (default: 200000000)") + fmt.Println(" -eviction-ratio Ratio of keys to evict when the keys limit is reached (default: 0.9)") color.Unset() os.Exit(0) } From a60515c17c910462b9c0d1dbc3b244d3bf5387ee Mon Sep 17 00:00:00 2001 From: Apoorv Yadav <32174554+apoorvyadav1111@users.noreply.github.com> Date: Tue, 3 Dec 2024 11:44:18 -0500 Subject: [PATCH 07/17] #1305: Add existing data types in Dump/ Restore (#1304) --- integration_tests/commands/resp/dump_test.go | 192 ++++++++++++++----- internal/eval/bloom.go | 115 ++++++++++- internal/eval/countminsketch.go | 80 ++++++++ internal/eval/deque.go | 159 +++++++++++++++ internal/eval/dump_restore.go | 156 +++++++++++++-- internal/eval/eval_test.go | 8 +- internal/eval/sortedset/sorted_set.go | 58 ++++++ internal/eval/store_eval.go | 2 +- 8 files changed, 695 insertions(+), 75 deletions(-) diff --git a/integration_tests/commands/resp/dump_test.go b/integration_tests/commands/resp/dump_test.go index 88f41ace4..27d50badb 100644 --- a/integration_tests/commands/resp/dump_test.go +++ b/integration_tests/commands/resp/dump_test.go @@ -1,7 +1,6 @@ package resp import ( - "encoding/base64" "testing" "github.com/dicedb/dice/testutils" @@ -11,7 +10,7 @@ import ( func TestDumpRestore(t *testing.T) { conn := getLocalConnection() defer conn.Close() - + simpleJSON := `{"name":"John","age":30}` testCases := []struct { name string commands []string @@ -20,57 +19,31 @@ func TestDumpRestore(t *testing.T) { { name: "DUMP and RESTORE string value", commands: []string{ - "SET mykey hello", - "DUMP mykey", - "DEL mykey", - "RESTORE mykey 2 CQAAAAAFaGVsbG//AEeXk742Rcc=", - "GET mykey", + "SET foo bar", + "DUMP foo", + "DEL foo", + "RESTORE foo 2 CQAAAAADYmFy/72GUVF+ClKv", + "GET foo", }, expected: []interface{}{ "OK", - func(result interface{}) bool { - dumped, ok := result.(string) - if !ok { - return false - } - decoded, err := base64.StdEncoding.DecodeString(dumped) - if err != nil { - return false - } - return len(decoded) > 11 && - decoded[0] == 0x09 && - decoded[1] == 0x00 && - string(decoded[6:11]) == "hello" && - decoded[11] == 0xFF - }, + "CQAAAAADYmFy/72GUVF+ClKv", int64(1), "OK", - "hello", + "bar", }, }, { name: "DUMP and RESTORE integer value", commands: []string{ - "SET intkey 42", - "DUMP intkey", - "DEL intkey", - "RESTORE intkey 2 CcAAAAAAAAAAKv9S/ymRDY3rXg==", + "set foo 12345", + "DUMP foo", + "DEL foo", + "RESTORE foo 2 CQUAAAAAAAAwOf8OqbusYAl2pQ==", }, expected: []interface{}{ "OK", - func(result interface{}) bool { - dumped, ok := result.(string) - if !ok { - return false - } - decoded, err := base64.StdEncoding.DecodeString(dumped) - if err != nil { - return false - } - return len(decoded) > 2 && - decoded[0] == 0x09 && - decoded[1] == 0xC0 - }, + "CQUAAAAAAAAwOf8OqbusYAl2pQ==", int64(1), "OK", }, @@ -84,26 +57,157 @@ func TestDumpRestore(t *testing.T) { "(nil)", }, }, + { + name: "DUMP JSON", + commands: []string{ + `JSON.SET foo $ ` + simpleJSON, + "DUMP foo", + "del foo", + "restore foo 2 CQMAAAAYeyJhZ2UiOjMwLCJuYW1lIjoiSm9obiJ9/6PVaIgw0n+C", + "JSON.GET foo $..name", + }, + expected: []interface{}{ + "OK", + "skip", + int64(1), + "OK", + `"John"`, + }, + }, + { + name: "DUMP Set", + commands: []string{ + "sadd foo bar baz bazz", + "dump foo", + "del foo", + "restore foo 2 CQYAAAAAAAAAAwAAAANiYXIAAAADYmF6AAAABGJhenr/DSf4vHxjdYo=", + "smembers foo", + }, + expected: []interface{}{ + int64(3), + "skip", + int64(1), + "OK", + []interface{}{"bar", "baz", "bazz"}, + }, + }, + { + name: "DUMP bytearray", + commands: []string{ + "setbit foo 1 1", + "dump foo", + "del foo", + "restore foo 2 CQQAAAAAAAAAAUD/g00L5pRbaJI=", + "get foo", + }, + expected: []interface{}{ + int64(0), + "CQQAAAAAAAAAAUD/g00L5pRbaJI=", + int64(1), + "OK", + "@", + }, + }, + { + name: "DUMP sorted set", + commands: []string{ + "zadd foo 1 bar 2 bazz", + "dump foo", + "del foo", + "restore foo 2 CQgAAAAAAAAAAgAAAAAAAAADYmFyP/AAAAAAAAAAAAAAAAAABGJhenpAAAAAAAAAAP/POOElibTuYQ==", + "zrange foo 0 2", + }, + expected: []interface{}{ + int64(2), + "skip", + int64(1), + "OK", + []interface{}{"bar", "bazz"}, + }, + }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { - FireCommand(conn, "FLUSHALL") + FireCommand(conn, "del foo") for i, cmd := range tc.commands { result := FireCommand(conn, cmd) expected := tc.expected[i] - + if expected == "skip" { + // when order of elements define the dump value, we test the restore function and skip dump + continue + } switch exp := expected.(type) { case string: assert.Equal(t, exp, result) case []interface{}: assert.True(t, testutils.UnorderedEqual(exp, result)) - case func(interface{}) bool: - assert.True(t, exp(result), cmd) default: assert.Equal(t, expected, result) } } }) } + FireCommand(conn, "FLUSHDB") +} + +func TestDumpRestoreBF(t *testing.T) { + conn := getLocalConnection() + defer conn.Close() + res := FireCommand(conn, "bf.add foo bar") + assert.Equal(t, int64(1), res) + + dumpValue := FireCommand(conn, "dump foo") + FireCommand(conn, "del foo") + + res = FireCommand(conn, "restore foo 0 "+dumpValue.(string)) + assert.Equal(t, "OK", res) + res = FireCommand(conn, "bf.exists foo bazz") + assert.Equal(t, int64(0), res) + + FireCommand(conn, "FLUSHDB") +} + +func TestDumpRestoreCMS(t *testing.T) { + conn := getLocalConnection() + defer conn.Close() + + // Add a value to the CMS + FireCommand(conn, "CMS.INITBYPROB foo 0.1 0.01") + res := FireCommand(conn, "cms.incrby foo bar 42") + assert.Equal(t, []interface{}([]interface{}{int64(42)}), res) + + // Dump the serialized value + dumpValue := FireCommand(conn, "dump foo") + FireCommand(conn, "del foo") // Delete the CMS + + // Restore the CMS from the dumped value + res = FireCommand(conn, "restore foo 0 "+dumpValue.(string)) + assert.Equal(t, "OK", res) + + // Check the value for a key in the restored CMS + res = FireCommand(conn, "cms.query foo bar") + assert.Equal(t, []interface{}([]interface{}{int64(42)}), res) + + // Check that another key not in the CMS returns 0 + res = FireCommand(conn, "cms.query foo bar") + assert.Equal(t, []interface{}([]interface{}{int64(42)}), res) + + FireCommand(conn, "FLUSHDB") +} + +func TestDumpRestoreDeque(t *testing.T) { + conn := getLocalConnection() + defer conn.Close() + res := FireCommand(conn, "lpush foo bar") + assert.Equal(t, int64(1), res) + dumpValue := FireCommand(conn, "dump foo") + res = FireCommand(conn, "del foo") + assert.Equal(t, int64(1), res) + res = FireCommand(conn, "restore foo 0 "+dumpValue.(string)) + assert.Equal(t, "OK", res) + res = FireCommand(conn, "lpop foo") + assert.Equal(t, "bar", res) + + FireCommand(conn, "FLUSHDB") } diff --git a/internal/eval/bloom.go b/internal/eval/bloom.go index 35b893705..c4548e780 100644 --- a/internal/eval/bloom.go +++ b/internal/eval/bloom.go @@ -1,6 +1,8 @@ package eval import ( + "bytes" + "encoding/binary" "fmt" "hash" "math" @@ -50,6 +52,8 @@ type BloomOpts struct { // is under the assumption that it's consumed at only 1 place at a time. Add // a lock when multiple clients can be supported. indexes []uint64 + + hashFnsSeeds []uint64 // seed for hash functions } type Bloom struct { @@ -102,10 +106,11 @@ func newBloomFilter(opts *BloomOpts) *Bloom { // k = ceil(ln(2) * bpe) k := math.Ceil(ln2 * opts.bpe) opts.hashFns = make([]hash.Hash64, int(k)) - + opts.hashFnsSeeds = make([]uint64, int(k)) // Initialize hash functions with random seeds for i := 0; i < int(k); i++ { - opts.hashFns[i] = murmur3.SeedNew64(rand.Uint64()) //nolint:gosec + opts.hashFnsSeeds[i] = rand.Uint64() //nolint:gosec + opts.hashFns[i] = murmur3.SeedNew64(opts.hashFnsSeeds[i]) } // initialize the common slice for storing indexes of bits to be set @@ -115,15 +120,15 @@ func newBloomFilter(opts *BloomOpts) *Bloom { // bits = k * entries / ln(2) // bytes = bits * 8 bits := uint64(math.Ceil((k * float64(opts.capacity)) / ln2)) - var bytes uint64 + var bytesNeeded uint64 if bits%8 == 0 { - bytes = bits / 8 + bytesNeeded = bits / 8 } else { - bytes = (bits / 8) + 1 + bytesNeeded = (bits / 8) + 1 } - opts.bits = bytes * 8 + opts.bits = bytesNeeded * 8 - bitset := make([]byte, bytes) + bitset := make([]byte, bytesNeeded) return &Bloom{opts, bitset, 0} } @@ -320,3 +325,99 @@ func CreateBloomFilter(key string, store *dstore.Store, opts *BloomOpts) (*Bloom store.Put(key, obj) return obj.Value.(*Bloom), nil } + +func (b *Bloom) Serialize(buf *bytes.Buffer) error { + // Serialize the Bloom struct + if err := binary.Write(buf, binary.BigEndian, b.cnt); err != nil { + return err + } + if err := binary.Write(buf, binary.BigEndian, b.opts.errorRate); err != nil { + return err + } + if err := binary.Write(buf, binary.BigEndian, b.opts.capacity); err != nil { + return err + } + if err := binary.Write(buf, binary.BigEndian, b.opts.bits); err != nil { + return err + } + + // Serialize the number of seeds and the seeds themselves + numSeeds := uint64(len(b.opts.hashFnsSeeds)) + if err := binary.Write(buf, binary.BigEndian, numSeeds); err != nil { + return err + } + if err := binary.Write(buf, binary.BigEndian, b.opts.hashFnsSeeds); err != nil { + return err + } + + // Serialize the number of indexes and the indexes themselves + numIndexes := uint64(len(b.opts.indexes)) + if err := binary.Write(buf, binary.BigEndian, numIndexes); err != nil { + return err + } + if err := binary.Write(buf, binary.BigEndian, b.opts.indexes); err != nil { + return err + } + + // Serialize the bitset + if _, err := buf.Write(b.bitset); err != nil { + return err + } + + return nil +} + +func DeserializeBloom(buf *bytes.Reader) (*Bloom, error) { + bloom := &Bloom{ + opts: &BloomOpts{}, // Initialize the opts field to prevent nil pointer dereference + } + + // Deserialize the Bloom struct + if err := binary.Read(buf, binary.BigEndian, &bloom.cnt); err != nil { + return nil, err + } + if err := binary.Read(buf, binary.BigEndian, &bloom.opts.errorRate); err != nil { + return nil, err + } + if err := binary.Read(buf, binary.BigEndian, &bloom.opts.capacity); err != nil { + return nil, err + } + if err := binary.Read(buf, binary.BigEndian, &bloom.opts.bits); err != nil { + return nil, err + } + + // Deserialize hash function seeds + var numSeeds uint64 + if err := binary.Read(buf, binary.BigEndian, &numSeeds); err != nil { + return nil, err + } + bloom.opts.hashFnsSeeds = make([]uint64, numSeeds) + if err := binary.Read(buf, binary.BigEndian, &bloom.opts.hashFnsSeeds); err != nil { + return nil, err + } + + // Deserialize indexes + var numIndexes uint64 + if err := binary.Read(buf, binary.BigEndian, &numIndexes); err != nil { + return nil, err + } + bloom.opts.indexes = make([]uint64, numIndexes) + if err := binary.Read(buf, binary.BigEndian, &bloom.opts.indexes); err != nil { + return nil, err + } + + // Deserialize bitset + bloom.bitset = make([]byte, bloom.opts.bits) + if _, err := buf.Read(bloom.bitset); err != nil { + return nil, err + } + + // Recalculate derived values + bloom.opts.bpe = -1 * math.Log(bloom.opts.errorRate) / math.Ln2 + bloom.opts.hashFns = make([]hash.Hash64, len(bloom.opts.hashFnsSeeds)) + for i := 0; i < len(bloom.opts.hashFnsSeeds); i++ { + bloom.opts.hashFns[i] = murmur3.SeedNew64(bloom.opts.hashFnsSeeds[i]) + } + + return bloom, nil +} diff --git a/internal/eval/countminsketch.go b/internal/eval/countminsketch.go index 27e417851..80a564631 100644 --- a/internal/eval/countminsketch.go +++ b/internal/eval/countminsketch.go @@ -1,7 +1,9 @@ package eval import ( + "bytes" "encoding/binary" + "errors" "fmt" "hash" "hash/fnv" @@ -232,6 +234,84 @@ func (c *CountMinSketch) mergeMatrices(sources []*CountMinSketch, weights []uint } } +// serialize encodes the CountMinSketch into a byte slice. +func (c *CountMinSketch) serialize(buffer *bytes.Buffer) error { + if c == nil { + return errors.New("cannot serialize a nil CountMinSketch") + } + + // Write depth, width, and count + if err := binary.Write(buffer, binary.BigEndian, c.opts.depth); err != nil { + return err + } + if err := binary.Write(buffer, binary.BigEndian, c.opts.width); err != nil { + return err + } + if err := binary.Write(buffer, binary.BigEndian, c.count); err != nil { + return err + } + + // Write matrix + for i := 0; i < len(c.matrix); i++ { + for j := 0; j < len(c.matrix[i]); j++ { + if err := binary.Write(buffer, binary.BigEndian, c.matrix[i][j]); err != nil { + return err + } + } + } + + return nil +} + +// deserialize reconstructs a CountMinSketch from a byte slice. +func DeserializeCMS(buffer *bytes.Reader) (*CountMinSketch, error) { + if buffer.Len() < 24 { // Minimum size for depth, width, and count + return nil, errors.New("insufficient data for deserialization") + } + + var depth, width, count uint64 + + // Read depth, width, and count + if err := binary.Read(buffer, binary.BigEndian, &depth); err != nil { + return nil, err + } + if err := binary.Read(buffer, binary.BigEndian, &width); err != nil { + return nil, err + } + if err := binary.Read(buffer, binary.BigEndian, &count); err != nil { + return nil, err + } + fmt.Println(depth, width, count, buffer.Len()) + // Validate data size + expectedSize := int(depth * width * 8) // Each uint64 takes 8 bytes + if buffer.Len() <= expectedSize { + return nil, errors.New("data size mismatch with expected matrix size") + } + + // Read matrix + matrix := make([][]uint64, depth) + for i := 0; i < int(depth); i++ { + matrix[i] = make([]uint64, width) + for j := 0; j < int(width); j++ { + if err := binary.Read(buffer, binary.BigEndian, &matrix[i][j]); err != nil { + return nil, err + } + } + } + + opts := &CountMinSketchOpts{ + depth: depth, + width: width, + hasher: fnv.New64(), // Default hasher + } + + return &CountMinSketch{ + opts: opts, + matrix: matrix, + count: count, + }, nil +} + // evalCMSMerge is used to merge multiple sketches into one. The final sketch // contains the weighted sum of the values in each of the source sketches. If // weights are not provided, default is 1. diff --git a/internal/eval/deque.go b/internal/eval/deque.go index 59d755c5f..2bf8c7d65 100644 --- a/internal/eval/deque.go +++ b/internal/eval/deque.go @@ -1,6 +1,8 @@ package eval import ( + "bytes" + "encoding/binary" "errors" "fmt" "strconv" @@ -838,3 +840,160 @@ func DecodeDeqEntry(xb []byte) (x string, entryLen int) { val >>= 64 - bit return strconv.FormatInt(val, 10), entryLen } + +func (q *Deque) Serialize(buf *bytes.Buffer) error { + if q == nil { + return errors.New("deque is nil") + } + + err := binary.Write(buf, binary.BigEndian, q.Length) + if err != nil { + return err + } + err = binary.Write(buf, binary.BigEndian, int32(q.leftIdx)) + if err != nil { + return err + } + + // Serialize byteList + err = serializeByteList(buf, q.list) + if err != nil { + return err + } + + return nil +} + +func serializeByteList(buf *bytes.Buffer, list *byteList) error { + if list == nil { + return binary.Write(buf, binary.BigEndian, int32(0)) + } + + err := binary.Write(buf, binary.BigEndian, int32(list.bufLen)) + if err != nil { + return err + } + err = binary.Write(buf, binary.BigEndian, list.size) + if err != nil { + return err + } + + current := list.head + var nodeCount int32 + nodes := [][]byte{} + + for current != nil { + nodes = append(nodes, current.buf) + current = current.next + nodeCount++ + } + + err = binary.Write(buf, binary.BigEndian, nodeCount) + if err != nil { + return err + } + + for _, nodeBuf := range nodes { + err = binary.Write(buf, binary.BigEndian, int32(len(nodeBuf))) + if err != nil { + return err + } + _, err = buf.Write(nodeBuf) + if err != nil { + return err + } + } + + return nil +} + +func DeserializeDeque(buf *bytes.Reader) (*Deque, error) { + if buf.Len() == 0 { + return nil, errors.New("data is empty") + } + + var length int64 + var leftIdx int32 + + err := binary.Read(buf, binary.BigEndian, &length) + if err != nil { + return nil, err + } + + err = binary.Read(buf, binary.BigEndian, &leftIdx) + if err != nil { + return nil, err + } + + list, err := deserializeByteList(buf) + if err != nil { + return nil, err + } + + return &Deque{ + Length: length, + list: list, + leftIdx: int(leftIdx), + }, nil +} + +func deserializeByteList(buf *bytes.Reader) (*byteList, error) { + var bufLen int32 + var size int64 + var nodeCount int32 + + err := binary.Read(buf, binary.BigEndian, &bufLen) + if err != nil { + return nil, err + } + + err = binary.Read(buf, binary.BigEndian, &size) + if err != nil { + return nil, err + } + + // Read the number of nodes + err = binary.Read(buf, binary.BigEndian, &nodeCount) + if err != nil { + return nil, err + } + + // Reconstruct the nodes + var prev *byteListNode + list := &byteList{ + bufLen: int(bufLen), + size: size, + } + + for i := int32(0); i < nodeCount; i++ { + // Read the length of the buffer + var bufSize int32 + err := binary.Read(buf, binary.BigEndian, &bufSize) + if err != nil { + return nil, err + } + + // Read the buffer data + nodeBuf := make([]byte, bufSize) + _, err = buf.Read(nodeBuf) + if err != nil { + return nil, err + } + + // Create and link the node + node := &byteListNode{ + buf: nodeBuf, + prev: prev, + } + if prev == nil { + list.head = node + } else { + prev.next = node + } + prev = node + } + + list.tail = prev + + return list, nil +} diff --git a/internal/eval/dump_restore.go b/internal/eval/dump_restore.go index 29d87159d..5c1b39953 100644 --- a/internal/eval/dump_restore.go +++ b/internal/eval/dump_restore.go @@ -3,9 +3,11 @@ package eval import ( "bytes" "encoding/binary" + "encoding/json" "errors" "hash/crc64" + "github.com/dicedb/dice/internal/eval/sortedset" "github.com/dicedb/dice/internal/object" ) @@ -13,19 +15,62 @@ func rdbDeserialize(data []byte) (*object.Obj, error) { if len(data) < 3 { return nil, errors.New("insufficient data for deserialization") } - objType := data[1] + var value interface{} + var err error + var valueRaw interface{} + + buf := bytes.NewReader(data) + _, err = buf.ReadByte() + if err != nil { + return nil, err + } + objType, err := buf.ReadByte() + if err != nil { + return nil, err + } switch objType { - case 0x00: - return readString(data[2:]) - case 0xC0: // Integer type - return readInt(data[2:]) + case object.ObjTypeString: + value, err = readString(buf) + case object.ObjTypeInt: // Integer type + value, err = readInt(buf) + case object.ObjTypeSet: // Set type + value, err = readSet(buf) + case object.ObjTypeJSON: // JSON type + valueRaw, err = readString(buf) + if err := json.Unmarshal([]byte(valueRaw.(string)), &value); err != nil { + return nil, err + } + case object.ObjTypeByteArray: // Byte array type + valueRaw, err = readInt(buf) + if err != nil { + return nil, err + } + byteArray := &ByteArray{ + Length: valueRaw.(int64), + data: make([]byte, valueRaw.(int64)), + } + if _, err := buf.Read(byteArray.data); err != nil { + return nil, err + } + value = byteArray + case object.ObjTypeDequeue: // Byte list type (Deque) + value, err = DeserializeDeque(buf) + case object.ObjTypeBF: // Bloom filter type + value, err = DeserializeBloom(buf) + case object.ObjTypeSortedSet: + value, err = sortedset.DeserializeSortedSet(buf) + case object.ObjTypeCountMinSketch: + value, err = DeserializeCMS(buf) default: return nil, errors.New("unsupported object type") } + if err != nil { + return nil, err + } + return &object.Obj{Type: objType, Value: value}, nil } -func readString(data []byte) (*object.Obj, error) { - buf := bytes.NewReader(data) +func readString(buf *bytes.Reader) (interface{}, error) { var strLen uint32 if err := binary.Read(buf, binary.BigEndian, &strLen); err != nil { return nil, err @@ -36,29 +81,44 @@ func readString(data []byte) (*object.Obj, error) { return nil, err } - return &object.Obj{Type: object.ObjTypeString, Value: string(strBytes)}, nil + return string(strBytes), nil } -func readInt(data []byte) (*object.Obj, error) { +func readInt(buf *bytes.Reader) (interface{}, error) { var intVal int64 - if err := binary.Read(bytes.NewReader(data), binary.BigEndian, &intVal); err != nil { + if err := binary.Read(buf, binary.BigEndian, &intVal); err != nil { return nil, err } - return &object.Obj{Type: object.ObjTypeInt, Value: intVal}, nil + return intVal, nil +} + +func readSet(buf *bytes.Reader) (interface{}, error) { + var strLen uint64 + if err := binary.Read(buf, binary.BigEndian, &strLen); err != nil { + return nil, err + } + setItems := make(map[string]struct{}) + for i := 0; i < int(strLen); i++ { + value, err := readString(buf) + if err != nil { + return nil, err + } + setItems[value.(string)] = struct{}{} + } + return setItems, nil } func rdbSerialize(obj *object.Obj) ([]byte, error) { var buf bytes.Buffer buf.WriteByte(0x09) - + buf.WriteByte(obj.Type) switch obj.Type { case object.ObjTypeString: str, ok := obj.Value.(string) if !ok { return nil, errors.New("invalid string value") } - buf.WriteByte(0x00) if err := writeString(&buf, str); err != nil { return nil, err } @@ -68,15 +128,67 @@ func rdbSerialize(obj *object.Obj) ([]byte, error) { if !ok { return nil, errors.New("invalid integer value") } - buf.WriteByte(0xC0) writeInt(&buf, intVal) - + case object.ObjTypeSet: + setItems, ok := obj.Value.(map[string]struct{}) + if !ok { + return nil, errors.New("invalid set value") + } + if err := writeSet(&buf, setItems); err != nil { + return nil, err + } + case object.ObjTypeJSON: + jsonValue, err := json.Marshal(obj.Value) + if err != nil { + return nil, err + } + if err := writeString(&buf, string(jsonValue)); err != nil { + return nil, err + } + case object.ObjTypeByteArray: + byteArray, ok := obj.Value.(*ByteArray) + if !ok { + return nil, errors.New("invalid byte array value") + } + writeInt(&buf, byteArray.Length) + buf.Write(byteArray.data) + case object.ObjTypeDequeue: + deque, ok := obj.Value.(*Deque) + if !ok { + return nil, errors.New("invalid byte list value") + } + if err := deque.Serialize(&buf); err != nil { + return nil, err + } + case object.ObjTypeBF: + bitSet, ok := obj.Value.(*Bloom) + if !ok { + return nil, errors.New("invalid bloom filter value") + } + if err := bitSet.Serialize(&buf); err != nil { + return nil, err + } + case object.ObjTypeSortedSet: + sortedSet, ok := obj.Value.(*sortedset.Set) + if !ok { + return nil, errors.New("invalid sorted set value") + } + if err := sortedSet.Serialize(&buf); err != nil { + return nil, err + } + case object.ObjTypeCountMinSketch: + cms, ok := obj.Value.(*CountMinSketch) + if !ok { + return nil, errors.New("invalid countminsketch value") + } + if err := cms.serialize(&buf); err != nil { + return nil, err + } default: return nil, errors.New("unsupported object type") } buf.WriteByte(0xFF) // End marker - return appendChecksum(buf.Bytes()), nil } @@ -95,6 +207,18 @@ func writeInt(buf *bytes.Buffer, intVal int64) { buf.Write(tempBuf) } +func writeSet(buf *bytes.Buffer, setItems map[string]struct{}) error { + setLen := uint64(len(setItems)) + if err := binary.Write(buf, binary.BigEndian, setLen); err != nil { + return err + } + for item := range setItems { + if err := writeString(buf, item); err != nil { + return err + } + } + return nil +} func appendChecksum(data []byte) []byte { checksum := crc64.Checksum(data, crc64.MakeTable(crc64.ECMA)) checksumBuf := make([]byte, 8) diff --git a/internal/eval/eval_test.go b/internal/eval/eval_test.go index 61ae149fe..01be7f591 100644 --- a/internal/eval/eval_test.go +++ b/internal/eval/eval_test.go @@ -8107,13 +8107,7 @@ func testEvalDUMP(t *testing.T, store *dstore.Store) { }, input: []string{"INTEGER_KEY"}, migratedOutput: EvalResponse{ - Result: base64.StdEncoding.EncodeToString([]byte{ - 0x09, - 0xC0, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0A, - 0xFF, - 0x12, 0x77, 0xDE, 0x29, 0x53, 0xDB, 0x44, 0xC2, - }), + Result: "CQUAAAAAAAAACv9+l81XgsShqw==", Error: nil, }, }, diff --git a/internal/eval/sortedset/sorted_set.go b/internal/eval/sortedset/sorted_set.go index 51d64efd5..5b4435c3f 100644 --- a/internal/eval/sortedset/sorted_set.go +++ b/internal/eval/sortedset/sorted_set.go @@ -1,6 +1,8 @@ package sortedset import ( + "bytes" + "encoding/binary" "strconv" "strings" @@ -251,3 +253,59 @@ func (ss *Set) CountInRange(minVal, maxVal float64) int { return count } + +func (ss *Set) Serialize(buf *bytes.Buffer) error { + // Serialize the length of the memberMap + memberCount := uint64(len(ss.memberMap)) + if err := binary.Write(buf, binary.BigEndian, memberCount); err != nil { + return err + } + + // Serialize each member and its score + for member, score := range ss.memberMap { + memberLen := uint64(len(member)) + if err := binary.Write(buf, binary.BigEndian, memberLen); err != nil { + return err + } + if _, err := buf.WriteString(member); err != nil { + return err + } + if err := binary.Write(buf, binary.BigEndian, score); err != nil { + return err + } + } + return nil +} + +func DeserializeSortedSet(buf *bytes.Reader) (*Set, error) { + ss := New() + + // Read the member count + var memberCount uint64 + if err := binary.Read(buf, binary.BigEndian, &memberCount); err != nil { + return nil, err + } + + // Read each member and its score + for i := uint64(0); i < memberCount; i++ { + var memberLen uint64 + if err := binary.Read(buf, binary.BigEndian, &memberLen); err != nil { + return nil, err + } + + member := make([]byte, memberLen) + if _, err := buf.Read(member); err != nil { + return nil, err + } + + var score float64 + if err := binary.Read(buf, binary.BigEndian, &score); err != nil { + return nil, err + } + + // Add the member back to the set + ss.Upsert(score, string(member)) + } + + return ss, nil +} diff --git a/internal/eval/store_eval.go b/internal/eval/store_eval.go index 37b694f5e..756201b95 100644 --- a/internal/eval/store_eval.go +++ b/internal/eval/store_eval.go @@ -2630,6 +2630,7 @@ func evalDUMP(args []string, store *dstore.Store) *EvalResponse { serializedValue, err := rdbSerialize(obj) if err != nil { + fmt.Println("error", err) return makeEvalError(diceerrors.ErrGeneral("serialization failed")) } encodedResult := base64.StdEncoding.EncodeToString(serializedValue) @@ -2651,7 +2652,6 @@ func evalRestore(args []string, store *dstore.Store) *EvalResponse { if err != nil { return makeEvalError(diceerrors.ErrGeneral("failed to decode base64 value")) } - obj, err := rdbDeserialize(serializedData) if err != nil { return makeEvalError(diceerrors.ErrGeneral("deserialization failed")) From 6aae69b61fbedcc86d4d48f1fd5f1e393f0d7aa3 Mon Sep 17 00:00:00 2001 From: rahul-mallick-15 <88314273+rahul-mallick-15@users.noreply.github.com> Date: Tue, 3 Dec 2024 22:35:19 +0530 Subject: [PATCH 08/17] Remove --enable-multithreading flag usage in README (#1349) --- README.md | 6 +++--- docs/src/content/docs/get-started/installation.mdx | 2 +- docs/src/content/docs/get-started/reactive-hello-world.mdx | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index c3845f361..2335c52bb 100644 --- a/README.md +++ b/README.md @@ -37,7 +37,7 @@ Although DiceDB is a drop-in replacement of Redis, which means almost no learnin The easiest way to get started with DiceDB is using [Docker](https://www.docker.com/) by running the following command. ```bash -docker run -p 7379:7379 dicedb/dicedb --enable-multithreading --enable-watch +docker run -p 7379:7379 dicedb/dicedb --enable-watch ``` The above command will start the DiceDB server running locally on the port `7379` and you can connect @@ -60,10 +60,10 @@ To run DiceDB for local development or running from source, you will need ```bash git clone https://github.com/dicedb/dice cd dice -go run main.go --enable-multithreading --enable-watch +go run main.go --enable-watch ``` -You can skip passing the two flags if you are not working with multi-threading or `.WATCH` features. +You can skip passing the flag if you are not working with `.WATCH` feature. 1. Install GoLangCI diff --git a/docs/src/content/docs/get-started/installation.mdx b/docs/src/content/docs/get-started/installation.mdx index 75f9622b2..8ba500b4e 100644 --- a/docs/src/content/docs/get-started/installation.mdx +++ b/docs/src/content/docs/get-started/installation.mdx @@ -16,7 +16,7 @@ We are looking for Early Design Partners, so, if you want to evaluate DiceDB, [b The easiest way to get started with DiceDB is using [Docker](https://www.docker.com/) by running the following command. ```bash -docker run -p 7379:7379 dicedb/dicedb --enable-multithreading --enable-watch +docker run -p 7379:7379 dicedb/dicedb --enable-watch ``` The above command will start the DiceDB server running locally on the port `7379` and you can connect diff --git a/docs/src/content/docs/get-started/reactive-hello-world.mdx b/docs/src/content/docs/get-started/reactive-hello-world.mdx index 744c56866..badd81fed 100644 --- a/docs/src/content/docs/get-started/reactive-hello-world.mdx +++ b/docs/src/content/docs/get-started/reactive-hello-world.mdx @@ -15,12 +15,12 @@ But, before we start, make sure you have ### Starting DiceDB -Start the DiceDB server with the two flags `--enable-multithreading` and `--enable-watch` -to enable multi-threading and watch mode, respectively. Your command would look something +Start the DiceDB server with the flag `--enable-watch` +to enable watch mode. Your command would look something like this ```bash -docker run -p 7379:7379 dicedb/dicedb --enable-multithreading --enable-watch +docker run -p 7379:7379 dicedb/dicedb --enable-watch ``` Also, connect to the database using the CLI as mentioned in the above installation steps or From 531a32a0cdb391b044059ccb1a73c96f19e67395 Mon Sep 17 00:00:00 2001 From: Arpit Bhayani Date: Wed, 4 Dec 2024 16:50:30 +0530 Subject: [PATCH 09/17] Some errors as constants and bloom -> bloomfilter --- integration_tests/commands/http/bloom_test.go | 7 ++++--- integration_tests/commands/http/jsondebug_test.go | 2 +- integration_tests/commands/resp/bloom_test.go | 7 ++++--- integration_tests/commands/websocket/bloom_test.go | 7 ++++--- integration_tests/commands/websocket/jsondebug_test.go | 2 +- internal/errors/migrated_errors.go | 1 + internal/eval/bloom_test.go | 2 +- internal/eval/eval_test.go | 2 +- internal/eval/store_eval.go | 6 ++++-- internal/eval/{type_bloom.go => type_bloomfilter.go} | 4 ++-- 10 files changed, 23 insertions(+), 17 deletions(-) rename internal/eval/{type_bloom.go => type_bloomfilter.go} (99%) diff --git a/integration_tests/commands/http/bloom_test.go b/integration_tests/commands/http/bloom_test.go index d6c47e41b..40c1de24a 100644 --- a/integration_tests/commands/http/bloom_test.go +++ b/integration_tests/commands/http/bloom_test.go @@ -3,6 +3,7 @@ package http import ( "testing" + diceerrors "github.com/dicedb/dice/internal/errors" "github.com/stretchr/testify/assert" ) @@ -82,7 +83,7 @@ func TestBloomFilter(t *testing.T) { Body: map[string]interface{}{"key": "bf", "values": []interface{}{0.01, 2000}}, }, }, - expected: []interface{}{"OK", "ERR item exists"}, + expected: []interface{}{"OK", diceerrors.ErrKeyExists}, }, } @@ -199,7 +200,7 @@ func TestBFEdgeCasesAndErrors(t *testing.T) { Body: map[string]interface{}{"key": "bf"}, }, }, - expected: []interface{}{"ERR not found"}, + expected: []interface{}{diceerrors.ErrKeyNotFound}, }, { name: "BF.RESERVE with a very high error rate", @@ -281,7 +282,7 @@ func TestBFEdgeCasesAndErrors(t *testing.T) { Body: map[string]interface{}{"key": "bf", "values": []interface{}{0.01, 2000}}, }, }, - expected: []interface{}{"OK", "ERR item exists"}, + expected: []interface{}{"OK", diceerrors.ErrKeyExists}, }, { name: "BF.INFO after multiple additions", diff --git a/integration_tests/commands/http/jsondebug_test.go b/integration_tests/commands/http/jsondebug_test.go index 26db95c84..37ab3a061 100644 --- a/integration_tests/commands/http/jsondebug_test.go +++ b/integration_tests/commands/http/jsondebug_test.go @@ -16,7 +16,7 @@ func TestJSONDEBUG(t *testing.T) { {Command: "JSON.SET", Body: map[string]interface{}{"key": "k1", "path": "$", "json": map[string]interface{}{"a": 1}}}, {Command: "JSON.DEBUG", Body: map[string]interface{}{"values": []interface{}{"MEMORY", "k1"}}}, }, - expected: []interface{}{"OK", float64(89)}, + expected: []interface{}{"OK", float64(72)}, }, { name: "jsondebug with a valid path", diff --git a/integration_tests/commands/resp/bloom_test.go b/integration_tests/commands/resp/bloom_test.go index 53ba8b506..e01c0b091 100644 --- a/integration_tests/commands/resp/bloom_test.go +++ b/integration_tests/commands/resp/bloom_test.go @@ -4,6 +4,7 @@ import ( "testing" "time" + diceerrors "github.com/dicedb/dice/internal/errors" "github.com/stretchr/testify/assert" ) @@ -42,7 +43,7 @@ func TestBFReserveAddInfoExists(t *testing.T) { { name: "BF.RESERVE on existent filter returns error", cmds: []string{"BF.RESERVE bf 0.01 1000", "BF.RESERVE bf 0.01 1000"}, - expect: []interface{}{"OK", "ERR item exists"}, + expect: []interface{}{"OK", diceerrors.ErrKeyExists}, delays: []time.Duration{0, 0}, cleanUp: []string{"DEL bf"}, }, @@ -135,7 +136,7 @@ func TestBFEdgeCasesAndErrors(t *testing.T) { { name: "BF.INFO on a non-existent filter", cmds: []string{"BF.INFO bf"}, - expect: []interface{}{"ERR not found"}, + expect: []interface{}{diceerrors.ErrKeyNotFound}, delays: []time.Duration{0}, cleanUp: []string{"del bf"}, }, @@ -170,7 +171,7 @@ func TestBFEdgeCasesAndErrors(t *testing.T) { { name: "BF.RESERVE with duplicate filter name", cmds: []string{"BF.RESERVE bf 0.01 1000", "BF.RESERVE bf 0.01 2000"}, - expect: []interface{}{"OK", "ERR item exists"}, + expect: []interface{}{"OK", diceerrors.ErrKeyExists}, delays: []time.Duration{0, 0}, cleanUp: []string{"del bf"}, }, diff --git a/integration_tests/commands/websocket/bloom_test.go b/integration_tests/commands/websocket/bloom_test.go index 7198505df..ba250fa95 100644 --- a/integration_tests/commands/websocket/bloom_test.go +++ b/integration_tests/commands/websocket/bloom_test.go @@ -4,6 +4,7 @@ import ( "testing" "time" + diceerrors "github.com/dicedb/dice/internal/errors" "github.com/stretchr/testify/assert" ) @@ -37,7 +38,7 @@ func TestBFReserveAddInfoExists(t *testing.T) { { name: "BF.RESERVE on existent filter returns error", cmds: []string{"BF.RESERVE bf 0.01 1000", "BF.RESERVE bf 0.01 1000"}, - expect: []interface{}{"OK", "ERR item exists"}, + expect: []interface{}{"OK", diceerrors.ErrKeyExists}, cleanUp: []string{"DEL bf"}, }, } @@ -126,7 +127,7 @@ func TestBFEdgeCasesAndErrors(t *testing.T) { { name: "BF.INFO on a non-existent filter", cmds: []string{"BF.INFO bf"}, - expect: []interface{}{"ERR not found"}, + expect: []interface{}{diceerrors.ErrKeyNotFound}, delays: []time.Duration{0}, cleanUp: []string{"del bf"}, }, @@ -161,7 +162,7 @@ func TestBFEdgeCasesAndErrors(t *testing.T) { { name: "BF.RESERVE with duplicate filter name", cmds: []string{"BF.RESERVE bf 0.01 1000", "BF.RESERVE bf 0.01 2000"}, - expect: []interface{}{"OK", "ERR item exists"}, + expect: []interface{}{"OK", diceerrors.ErrKeyExists}, delays: []time.Duration{0, 0}, cleanUp: []string{"del bf"}, }, diff --git a/integration_tests/commands/websocket/jsondebug_test.go b/integration_tests/commands/websocket/jsondebug_test.go index 8f2cba904..77a805f69 100644 --- a/integration_tests/commands/websocket/jsondebug_test.go +++ b/integration_tests/commands/websocket/jsondebug_test.go @@ -30,7 +30,7 @@ func TestJSONDEBUG(t *testing.T) { `JSON.SET k1 $ {"a":1}`, "JSON.DEBUG MEMORY k1", }, - expected: []interface{}{"OK", float64(89)}, + expected: []interface{}{"OK", float64(72)}, }, { name: "jsondebug with a valid path", diff --git a/internal/errors/migrated_errors.go b/internal/errors/migrated_errors.go index 6c8fe4e4c..ea81b0c4e 100644 --- a/internal/errors/migrated_errors.go +++ b/internal/errors/migrated_errors.go @@ -33,6 +33,7 @@ var ( ErrInvalidIPAddress = errors.New("invalid IP address") ErrInvalidFingerprint = errors.New("invalid fingerprint") ErrKeyDoesNotExist = errors.New("ERR could not perform this operation on a key that doesn't exist") + ErrKeyExists = errors.New("ERR key exists") // Error generation functions for specific error messages with dynamic parameters. ErrWrongArgumentCount = func(command string) error { diff --git a/internal/eval/bloom_test.go b/internal/eval/bloom_test.go index 000499bd6..d88d97f2f 100644 --- a/internal/eval/bloom_test.go +++ b/internal/eval/bloom_test.go @@ -122,7 +122,7 @@ func TestUpdateIndexes(t *testing.T) { // Create a value, default opts and initialize all params of the filter value := "hello" opts := defaultBloomOpts() - bloom := NewBloom(opts) + bloom := NewBloomFilter(opts) err := opts.updateIndexes(value) if err != nil { diff --git a/internal/eval/eval_test.go b/internal/eval/eval_test.go index 5ba8b3f63..22089ddf6 100644 --- a/internal/eval/eval_test.go +++ b/internal/eval/eval_test.go @@ -9028,7 +9028,7 @@ func testEvalBFINFO(t *testing.T, store *dstore.Store) { { name: "BF.INFO on non-existent filter", input: []string{"nonExistentFilter"}, - migratedOutput: EvalResponse{Result: nil, Error: errors.New("ERR not found")}, + migratedOutput: EvalResponse{Result: nil, Error: diceerrors.ErrKeyNotFound}, }, } diff --git a/internal/eval/store_eval.go b/internal/eval/store_eval.go index 343a386e4..c58a63008 100644 --- a/internal/eval/store_eval.go +++ b/internal/eval/store_eval.go @@ -2869,13 +2869,15 @@ func evalBFRESERVE(args []string, store *dstore.Store) *EvalResponse { key := args[0] - _, err = GetBloomFilter(key, store) + bf, err := GetBloomFilter(key, store) if err != nil && err != diceerrors.ErrKeyNotFound { // bloom filter does not exist return makeEvalError(err) } else if err != nil && err == diceerrors.ErrKeyNotFound { // key does not exists CreateOrReplaceBloomFilter(key, opts, store) return makeEvalResult(clientio.OK) - } else { // bloom filter already exists + } else if bf != nil { // bloom filter already exists + return makeEvalError(diceerrors.ErrKeyExists) + } else { return makeEvalResult(clientio.OK) } } diff --git a/internal/eval/type_bloom.go b/internal/eval/type_bloomfilter.go similarity index 99% rename from internal/eval/type_bloom.go rename to internal/eval/type_bloomfilter.go index 3d7a6072c..816524cad 100644 --- a/internal/eval/type_bloom.go +++ b/internal/eval/type_bloomfilter.go @@ -92,7 +92,7 @@ func newBloomOpts(args []string) (*BloomOpts, error) { // newBloomFilter creates and returns a new filter. It is responsible for initializing the // underlying bit array. -func NewBloom(opts *BloomOpts) *Bloom { +func NewBloomFilter(opts *BloomOpts) *Bloom { // Calculate bits per element // bpe = -log(errorRate)/ln(2)^2 num := -1 * math.Log(opts.errorRate) @@ -285,7 +285,7 @@ func CreateOrReplaceBloomFilter(key string, opts *BloomOpts, store *dstore.Store if opts == nil { opts = defaultBloomOpts() } - bf := NewBloom(opts) + bf := NewBloomFilter(opts) obj := store.NewObj(bf, -1, object.ObjTypeBF) store.Put(key, obj) return bf From 32e86d00eb557d2b21e360d6167d3da8f2ca605c Mon Sep 17 00:00:00 2001 From: Arpit Bhayani Date: Wed, 4 Dec 2024 17:01:56 +0530 Subject: [PATCH 10/17] Integration tests fixes --- integration_tests/commands/http/bloom_test.go | 6 +++--- integration_tests/commands/resp/bloom_test.go | 6 +++--- integration_tests/commands/resp/jsondebug_test.go | 2 +- integration_tests/commands/websocket/bloom_test.go | 6 +++--- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/integration_tests/commands/http/bloom_test.go b/integration_tests/commands/http/bloom_test.go index 40c1de24a..8d28958b9 100644 --- a/integration_tests/commands/http/bloom_test.go +++ b/integration_tests/commands/http/bloom_test.go @@ -83,7 +83,7 @@ func TestBloomFilter(t *testing.T) { Body: map[string]interface{}{"key": "bf", "values": []interface{}{0.01, 2000}}, }, }, - expected: []interface{}{"OK", diceerrors.ErrKeyExists}, + expected: []interface{}{"OK", diceerrors.ErrKeyExists.Error()}, }, } @@ -200,7 +200,7 @@ func TestBFEdgeCasesAndErrors(t *testing.T) { Body: map[string]interface{}{"key": "bf"}, }, }, - expected: []interface{}{diceerrors.ErrKeyNotFound}, + expected: []interface{}{diceerrors.ErrKeyNotFound.Error()}, }, { name: "BF.RESERVE with a very high error rate", @@ -282,7 +282,7 @@ func TestBFEdgeCasesAndErrors(t *testing.T) { Body: map[string]interface{}{"key": "bf", "values": []interface{}{0.01, 2000}}, }, }, - expected: []interface{}{"OK", diceerrors.ErrKeyExists}, + expected: []interface{}{"OK", diceerrors.ErrKeyExists.Error()}, }, { name: "BF.INFO after multiple additions", diff --git a/integration_tests/commands/resp/bloom_test.go b/integration_tests/commands/resp/bloom_test.go index e01c0b091..5af5b3044 100644 --- a/integration_tests/commands/resp/bloom_test.go +++ b/integration_tests/commands/resp/bloom_test.go @@ -43,7 +43,7 @@ func TestBFReserveAddInfoExists(t *testing.T) { { name: "BF.RESERVE on existent filter returns error", cmds: []string{"BF.RESERVE bf 0.01 1000", "BF.RESERVE bf 0.01 1000"}, - expect: []interface{}{"OK", diceerrors.ErrKeyExists}, + expect: []interface{}{"OK", diceerrors.ErrKeyExists.Error()}, delays: []time.Duration{0, 0}, cleanUp: []string{"DEL bf"}, }, @@ -136,7 +136,7 @@ func TestBFEdgeCasesAndErrors(t *testing.T) { { name: "BF.INFO on a non-existent filter", cmds: []string{"BF.INFO bf"}, - expect: []interface{}{diceerrors.ErrKeyNotFound}, + expect: []interface{}{diceerrors.ErrKeyNotFound.Error()}, delays: []time.Duration{0}, cleanUp: []string{"del bf"}, }, @@ -171,7 +171,7 @@ func TestBFEdgeCasesAndErrors(t *testing.T) { { name: "BF.RESERVE with duplicate filter name", cmds: []string{"BF.RESERVE bf 0.01 1000", "BF.RESERVE bf 0.01 2000"}, - expect: []interface{}{"OK", diceerrors.ErrKeyExists}, + expect: []interface{}{"OK", diceerrors.ErrKeyExists.Error()}, delays: []time.Duration{0, 0}, cleanUp: []string{"del bf"}, }, diff --git a/integration_tests/commands/resp/jsondebug_test.go b/integration_tests/commands/resp/jsondebug_test.go index af1bda920..8ad10a9ae 100644 --- a/integration_tests/commands/resp/jsondebug_test.go +++ b/integration_tests/commands/resp/jsondebug_test.go @@ -23,7 +23,7 @@ func TestJSONDEBUG(t *testing.T) { `JSON.SET k1 $ {"a":1}`, "JSON.DEBUG MEMORY k1", }, - expected: []interface{}{"OK", int64(89)}, + expected: []interface{}{"OK", int64(72)}, }, { name: "jsondebug with a valid path", diff --git a/integration_tests/commands/websocket/bloom_test.go b/integration_tests/commands/websocket/bloom_test.go index ba250fa95..d8c9222ca 100644 --- a/integration_tests/commands/websocket/bloom_test.go +++ b/integration_tests/commands/websocket/bloom_test.go @@ -38,7 +38,7 @@ func TestBFReserveAddInfoExists(t *testing.T) { { name: "BF.RESERVE on existent filter returns error", cmds: []string{"BF.RESERVE bf 0.01 1000", "BF.RESERVE bf 0.01 1000"}, - expect: []interface{}{"OK", diceerrors.ErrKeyExists}, + expect: []interface{}{"OK", diceerrors.ErrKeyExists.Error()}, cleanUp: []string{"DEL bf"}, }, } @@ -127,7 +127,7 @@ func TestBFEdgeCasesAndErrors(t *testing.T) { { name: "BF.INFO on a non-existent filter", cmds: []string{"BF.INFO bf"}, - expect: []interface{}{diceerrors.ErrKeyNotFound}, + expect: []interface{}{diceerrors.ErrKeyNotFound.Error()}, delays: []time.Duration{0}, cleanUp: []string{"del bf"}, }, @@ -162,7 +162,7 @@ func TestBFEdgeCasesAndErrors(t *testing.T) { { name: "BF.RESERVE with duplicate filter name", cmds: []string{"BF.RESERVE bf 0.01 1000", "BF.RESERVE bf 0.01 2000"}, - expect: []interface{}{"OK", diceerrors.ErrKeyExists}, + expect: []interface{}{"OK", diceerrors.ErrKeyExists.Error()}, delays: []time.Duration{0, 0}, cleanUp: []string{"del bf"}, }, From fdbb9794ca03532ace83b62e47a9e51c6bd22576 Mon Sep 17 00:00:00 2001 From: Arpit Bhayani Date: Wed, 4 Dec 2024 17:05:54 +0530 Subject: [PATCH 11/17] Object Type changed and casted during RDB serialization --- internal/eval/dump_restore.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/internal/eval/dump_restore.go b/internal/eval/dump_restore.go index 5c1b39953..7049cdd2b 100644 --- a/internal/eval/dump_restore.go +++ b/internal/eval/dump_restore.go @@ -24,10 +24,12 @@ func rdbDeserialize(data []byte) (*object.Obj, error) { if err != nil { return nil, err } - objType, err := buf.ReadByte() + _oType, err := buf.ReadByte() if err != nil { return nil, err } + + objType := object.ObjectType(_oType) switch objType { case object.ObjTypeString: value, err = readString(buf) @@ -112,7 +114,7 @@ func readSet(buf *bytes.Reader) (interface{}, error) { func rdbSerialize(obj *object.Obj) ([]byte, error) { var buf bytes.Buffer buf.WriteByte(0x09) - buf.WriteByte(obj.Type) + buf.WriteByte(byte(obj.Type)) switch obj.Type { case object.ObjTypeString: str, ok := obj.Value.(string) From 0d736f63bbcd0f61115ba7f96218eb70c1b25638 Mon Sep 17 00:00:00 2001 From: Arpit Bhayani Date: Wed, 4 Dec 2024 17:09:10 +0530 Subject: [PATCH 12/17] Lint fixes --- internal/eval/store_eval.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/internal/eval/store_eval.go b/internal/eval/store_eval.go index f59e8bb83..c80ad9d63 100644 --- a/internal/eval/store_eval.go +++ b/internal/eval/store_eval.go @@ -2877,9 +2877,8 @@ func evalBFRESERVE(args []string, store *dstore.Store) *EvalResponse { return makeEvalResult(clientio.OK) } else if bf != nil { // bloom filter already exists return makeEvalError(diceerrors.ErrKeyExists) - } else { - return makeEvalResult(clientio.OK) } + return makeEvalResult(clientio.OK) } // evalBFADD evaluates the BF.ADD command responsible for adding an element to a bloom filter. If the filter does not From 0677ffb89c684814a47b8e308d83d89bfd8e40ee Mon Sep 17 00:00:00 2001 From: Prachi <94190717+Prachi-Jamdade@users.noreply.github.com> Date: Thu, 5 Dec 2024 23:35:09 +0530 Subject: [PATCH 13/17] feat: Add a tutorial example for URL Shortner built using DiceDB (#1259) Co-authored-by: Jyotinder --- docs/astro.config.mjs | 8 +- .../{tutorials => }/realtime-leaderboard.md | 0 .../content/docs/tutorials/url-shortener.md | 215 ++++++++++++++++++ examples/url-shortener/Dockerfile | 28 +++ examples/url-shortener/compose.yaml | 17 ++ examples/url-shortener/go.mod | 41 ++++ examples/url-shortener/go.sum | 87 +++++++ examples/url-shortener/main.go | 100 ++++++++ 8 files changed, 492 insertions(+), 4 deletions(-) rename docs/src/content/docs/{tutorials => }/realtime-leaderboard.md (100%) create mode 100644 docs/src/content/docs/tutorials/url-shortener.md create mode 100644 examples/url-shortener/Dockerfile create mode 100644 examples/url-shortener/compose.yaml create mode 100644 examples/url-shortener/go.mod create mode 100644 examples/url-shortener/go.sum create mode 100644 examples/url-shortener/main.go diff --git a/docs/astro.config.mjs b/docs/astro.config.mjs index 8ec2f2762..d3b263be3 100644 --- a/docs/astro.config.mjs +++ b/docs/astro.config.mjs @@ -33,10 +33,6 @@ export default defineConfig({ label: "Get started", autogenerate: { directory: "get-started" }, }, - // { - // label: 'Tutorials', - // autogenerate: { directory: 'tutorials' } - // }, { label: "SDK", autogenerate: { directory: "sdk" }, @@ -45,6 +41,10 @@ export default defineConfig({ label: "Connection Protocols", autogenerate: { directory: "protocols" }, }, + { + label: 'Tutorials', + autogenerate: { directory: 'tutorials' } + }, { label: "Commands", autogenerate: { directory: "commands" }, diff --git a/docs/src/content/docs/tutorials/realtime-leaderboard.md b/docs/src/content/docs/realtime-leaderboard.md similarity index 100% rename from docs/src/content/docs/tutorials/realtime-leaderboard.md rename to docs/src/content/docs/realtime-leaderboard.md diff --git a/docs/src/content/docs/tutorials/url-shortener.md b/docs/src/content/docs/tutorials/url-shortener.md new file mode 100644 index 000000000..fb47fd2d6 --- /dev/null +++ b/docs/src/content/docs/tutorials/url-shortener.md @@ -0,0 +1,215 @@ +--- +title: "Building a URL Shortener" +description: "Create a simple URL Shortener using DiceDB Go SDK." +--- + +This tutorial guides you through creating a URL shortener using DiceDB, a key-value store, with Go. We’ll set up endpoints to generate short URLs and redirect them to the original URLs. + +## Prerequisites + +1. Go (version 1.18 or later): [Download Go](https://golang.org/dl/) +2. DiceDB: A DiceDB server running locally. Refer to the [DiceDB Installation Guide](get-started/installation) if you haven't set it up yet. + +## Setup + +### 1. Install and Run DiceDB +Start a DiceDB server using Docker: +```bash +docker run -d -p 7379:7379 dicedb/dicedb +``` + +This command pulls the DiceDB Docker image and runs it, exposing it on port `7379`. + +### 2. Initialize a New Go Project +Create a new directory for your project and initialize a Go module: +```bash +mkdir url-shortener +cd url-shortener +go mod init url-shortener +``` + +### 3. Install Required Packages +Install the DiceDB Go SDK and other dependencies: +```bash +go get github.com/dicedb/dicedb-go +go get github.com/gin-gonic/gin +go get github.com/google/uuid +``` + +## Understanding DiceDB Commands +We'll use the following DiceDB commands: +### `SET` Command +Stores a key-value pair in DiceDB. +- **Syntax**: `SET key value [expiration]` + - `key`: Unique identifier (e.g., short URL code) + - `value`: Data to store (e.g., serialized JSON) + - `expiration`: Optional; time-to-live in seconds (use `0` for no expiration) +### `GET` Command +Retrieves the value associated with a key. +- **Syntax**: `GET key` + - `key`: Identifier for the data to retrieve + +## Writing the Code +Create a file named `main.go` and add the following code: + +- `main.go`: + ```go + package main + + import ( + "context" + "encoding/json" + "log" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/dicedb/dicedb-go" // DiceDB Go SDK + ) + + type URL struct { + ID string `json:"id"` + LongURL string `json:"long_url"` + ShortURL string `json:"short_url"` + } + + var db *dicedb.Client + + // Initialize DiceDB connection + func init() { + db = dicedb.NewClient(&dicedb.Options{ + Addr: "localhost:7379", + }) + } + + // Creates a short URL from a given long URL + func CreateShortURL(c *gin.Context) { + var requestBody URL + if err := c.ShouldBindJSON(&requestBody); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request"}) + return + } + + // Generate unique short ID and construct the short URL + shortID := uuid.New().String()[:8] + requestBody.ID = shortID + requestBody.ShortURL = "http://localhost:8080/" + shortID + + // Serialize URL struct to JSON and store it in DiceDB + urlData, err := json.Marshal(requestBody) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to save URL"}) + return + } + + if err := db.Set(context.Background(), shortID, urlData, 0).Err(); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to save URL"}) + return + } + + c.JSON(http.StatusCreated, gin.H{"short_url": requestBody.ShortURL}) + } + + // Redirects to the original URL based on the short URL ID + func RedirectURL(c *gin.Context) { + id := c.Param("id") + + // Retrieve stored URL data from DiceDB + urlData, err := db.Get(context.Background(), id).Result() + if err != nil { + c.JSON(http.StatusNotFound, gin.H{"error": "URL not found"}) + return + } + + // Deserialize JSON data back into URL struct + var url URL + if err := json.Unmarshal([]byte(urlData), &url); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to decode URL data"}) + return + } + + // Redirect user to the original long URL + c.Redirect(http.StatusFound, url.LongURL) + } + + func main() { + router := gin.Default() + + // Define endpoints for creating short URLs and redirecting + router.POST("/shorten", CreateShortURL) + router.GET("/:id", RedirectURL) + + // Start the server on port 8080 + if err := router.Run(":8080"); err != nil { + log.Fatal("Failed to start server:", err) + } + } + ``` + +## Explanation + +### 1. Initialize the DiceDB Client +We set up the DiceDB client in the `init` function: +```go +db = dicedb.NewClient(&dicedb.Options{ + Addr: "localhost:7379", +}) +``` + +### 2. Create Short URL Endpoint +- **Input Validation**: Ensures the `long_url` field is present. +- **Short ID Generation**: Uses `uuid` to create a unique 8-character ID. +- **Data Serialization**: Converts the `URL` struct to JSON. +- **Data Storage**: Saves the JSON data in DiceDB with the `Set` command. +- **Response**: Returns the generated short URL. + +### 3. Redirect to Original URL Endpoint +- **Data Retrieval**: Fetches the URL data from DiceDB using the `Get` command. +- **Data Deserialization**: Converts JSON back to the `URL` struct. +- **Redirection**: Redirects the user to the `LongURL`. + +### 4. Start the Server +The `main` function sets up the routes and starts the server on port `8080`. + +## Running the Application + +### 1. Start the Go Application +```bash +go run main.go +``` +This will start the application server on port 8080 by default, you should see output similar to +```bash +[GIN-debug] Listening and serving HTTP on :8080 +``` + +### 2. Ensure DiceDB is Running +Ensure your DiceDB server is up and running on port `7379`. + +## Testing the application + +### 1. Shorten URL: +**Using `curl`:** +```bash +curl -X POST -H "Content-Type: application/json" -d '{"long_url": "https://example.com"}' http://localhost:8080/shorten +``` + +**Response:** +```json +{ +"short_url": "http://localhost:8080/" +} +``` + +### 2. Redirect to Original URL: +**Using `curl`:** +```bash +curl -L http://localhost:8080/abcd1234 +``` + +**Using a Browser:** +Navigate to: +``` +http://localhost:8080/abcd1234 +``` + +You should be redirected to `https://example.com`. diff --git a/examples/url-shortener/Dockerfile b/examples/url-shortener/Dockerfile new file mode 100644 index 000000000..c062275dc --- /dev/null +++ b/examples/url-shortener/Dockerfile @@ -0,0 +1,28 @@ +# Build Stage +FROM golang:1.23 AS builder + +WORKDIR /app + +# Copy go.mod and go.sum to download dependencies +COPY go.mod go.sum ./ +RUN go mod download + +# Copy application source code and build +COPY . . +RUN CGO_ENABLED=0 GOOS=linux go build -o server . + +# Runtime Stage +FROM alpine:latest + +# Install runtime dependencies +RUN apk --no-cache add ca-certificates + +# Copy the built application +WORKDIR /root/ +COPY --from=builder /app/server . + +# Expose the application port +EXPOSE 8080 + +# Start the application +CMD ["./server"] diff --git a/examples/url-shortener/compose.yaml b/examples/url-shortener/compose.yaml new file mode 100644 index 000000000..fad46cb2b --- /dev/null +++ b/examples/url-shortener/compose.yaml @@ -0,0 +1,17 @@ +services: + dicedb: + image: dicedb/dicedb:latest + ports: + - "7379:7379" + + url-shortener: + build: + context: . + dockerfile: Dockerfile + ports: + - "8000:8000" + depends_on: + - dicedb + environment: + - DICEDB_HOST=dicedb + - DICEDB_PORT=7379 diff --git a/examples/url-shortener/go.mod b/examples/url-shortener/go.mod new file mode 100644 index 000000000..1dbd9137d --- /dev/null +++ b/examples/url-shortener/go.mod @@ -0,0 +1,41 @@ +module url-shortener + +go 1.23.2 + +require github.com/gin-gonic/gin v1.10.0 + +require ( + github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect +) + +require ( + github.com/bytedance/sonic v1.11.6 // indirect + github.com/bytedance/sonic/loader v0.1.1 // indirect + github.com/cloudwego/base64x v0.1.4 // indirect + github.com/cloudwego/iasm v0.2.0 // indirect + github.com/dicedb/dicedb-go v0.0.0-20241026093718-570de4575be3 + github.com/gabriel-vasile/mimetype v1.4.3 // indirect + github.com/gin-contrib/sse v0.1.0 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.20.0 // indirect + github.com/goccy/go-json v0.10.2 // indirect + github.com/google/uuid v1.6.0 + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/cpuid/v2 v2.2.7 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/pelletier/go-toml/v2 v2.2.2 // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/ugorji/go/codec v1.2.12 // indirect + golang.org/x/arch v0.8.0 // indirect + golang.org/x/crypto v0.23.0 // indirect + golang.org/x/net v0.25.0 // indirect + golang.org/x/sys v0.20.0 // indirect + golang.org/x/text v0.15.0 // indirect + google.golang.org/protobuf v1.34.1 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/examples/url-shortener/go.sum b/examples/url-shortener/go.sum new file mode 100644 index 000000000..289e93e92 --- /dev/null +++ b/examples/url-shortener/go.sum @@ -0,0 +1,87 @@ +github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0= +github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4= +github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM= +github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= +github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= +github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y= +github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= +github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg= +github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/dicedb/dicedb-go v0.0.0-20241026093718-570de4575be3 h1:JvnAibMNGA0vQH+T47Y/d5/POURIvfJl3fFk0GIEBkQ= +github.com/dicedb/dicedb-go v0.0.0-20241026093718-570de4575be3/go.mod h1:p7x5/3S6wBEmiRMwxavj1I1P1xsSVQS6fcSbeai5ic4= +github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= +github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= +github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU= +github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8= +github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= +github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= +github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM= +github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM= +github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= +github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= +golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc= +golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= +golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= +golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg= +google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= +rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= diff --git a/examples/url-shortener/main.go b/examples/url-shortener/main.go new file mode 100644 index 000000000..583eb8a02 --- /dev/null +++ b/examples/url-shortener/main.go @@ -0,0 +1,100 @@ +package main + +import ( + "context" + "encoding/json" + "os" + "fmt" + + "log" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/dicedb/dicedb-go" // DiceDB Go SDK +) + +type URL struct { + LongURL string `json:"long_url"` +} + +var db *dicedb.Client + +// Initialize DiceDB connection +func init() { + dhost := "localhost" + if val := os.Getenv("DICEDB_HOST"); val != "" { + dhost = val + } + + dport := "7379" + if val := os.Getenv("DICEDB_PORT"); val != "" { + dport = val + } + + db = dicedb.NewClient(&dicedb.Options{ + Addr: fmt.Sprintf("%s:%s", dhost, dport), + }) +} + +// Creates a short URL from a given long URL +func createShortURL(c *gin.Context) { + var requestBody URL + if err := c.ShouldBindJSON(&requestBody); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request"}) + return + } + + // Generate unique short ID and construct the short URL + shortID := uuid.New().String()[:8] + shortURL := "http://localhost:8080/" + shortID + + // Serialize URL struct to JSON and store it in DiceDB + urlData, err := json.Marshal(requestBody) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to save URL"}) + return + } + + if err := db.Set(context.Background(), shortID, urlData, 0).Err(); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to save URL"}) + return + } + + c.JSON(http.StatusCreated, gin.H{"short_url": shortURL}) +} + +// Redirects to the original URL based on the short URL ID +func redirectURL(c *gin.Context) { + id := c.Param("id") + + // Retrieve stored URL data from DiceDB + urlData, err := db.Get(context.Background(), id).Result() + if err != nil { + c.JSON(http.StatusNotFound, gin.H{"error": "URL not found"}) + return + } + + // Deserialize JSON data back into URL struct + var url URL + if err := json.Unmarshal([]byte(urlData), &url); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to decode URL data"}) + return + } + + // Redirect user to the original long URL + c.Redirect(http.StatusFound, url.LongURL) +} + +func main() { + router := gin.Default() + + // Define endpoints for creating short URLs and redirecting + router.POST("/shorten", createShortURL) + router.GET("/:id", redirectURL) + + // Start the server on port 8080 + if err := router.Run(":8080"); err != nil { + log.Fatal("Failed to start server:", err) + } +} From dfe05d51cbcf89267d3ce10ed664260069885c29 Mon Sep 17 00:00:00 2001 From: Prashant Shubham Date: Mon, 9 Dec 2024 15:46:04 +0530 Subject: [PATCH 14/17] Disabled multishard cmds and test for HTTP/WS (#1363) --- .../commands/http/command_getkeys_test.go | 15 +- .../commands/http/command_rename_test.go | 4 + integration_tests/commands/http/copy_test.go | 4 + .../commands/http/dbsize_test.go | 4 + integration_tests/commands/http/json_test.go | 1 + integration_tests/commands/http/keys_test.go | 4 + integration_tests/commands/http/mget_test.go | 4 + integration_tests/commands/http/mset_test.go | 4 + .../commands/http/object_test.go | 4 + .../commands/http/set_data_cmd_test.go | 229 +++--- integration_tests/commands/http/setup.go | 7 +- integration_tests/commands/http/touch_test.go | 4 + .../commands/resp/command_getkeys_test.go | 3 +- integration_tests/commands/websocket/setup.go | 5 +- .../commands/websocket/writeretry_test.go | 10 +- internal/eval/commands.go | 98 --- internal/eval/eval.go | 526 -------------- internal/eval/eval_test.go | 133 +--- internal/eval/store_eval.go | 135 +++- internal/iothread/cmd_meta.go | 16 + internal/server/cmd_meta.go | 652 ------------------ internal/server/{utils => httpws}/httpResp.go | 2 +- internal/server/{ => httpws}/httpServer.go | 64 +- .../{utils => httpws}/redisCmdAdapter.go | 9 +- .../{utils => httpws}/redisCmdAdapter_test.go | 2 +- .../server/{ => httpws}/websocketServer.go | 16 +- main.go | 7 +- 27 files changed, 380 insertions(+), 1582 deletions(-) delete mode 100644 internal/server/cmd_meta.go rename internal/server/{utils => httpws}/httpResp.go (92%) rename internal/server/{ => httpws}/httpServer.go (88%) rename internal/server/{utils => httpws}/redisCmdAdapter.go (97%) rename internal/server/{utils => httpws}/redisCmdAdapter_test.go (99%) rename internal/server/{ => httpws}/websocketServer.go (96%) diff --git a/integration_tests/commands/http/command_getkeys_test.go b/integration_tests/commands/http/command_getkeys_test.go index 906117296..3ebb1074a 100644 --- a/integration_tests/commands/http/command_getkeys_test.go +++ b/integration_tests/commands/http/command_getkeys_test.go @@ -38,13 +38,14 @@ func TestCommandGetKeys(t *testing.T) { }, expected: []interface{}{[]interface{}{"1 2 3 4 5 6 7"}}, }, - { - name: "MSET command", - commands: []HTTPCommand{ - {Command: "COMMAND/GETKEYS", Body: map[string]interface{}{"key": "MSET", "keys": []interface{}{"key1 key2"}, "values": []interface{}{" val1 val2"}}}, - }, - expected: []interface{}{[]interface{}{"key1 key2"}}, - }, + // Skipping these tests until multishards cmds supported by http + //{ + // name: "MSET command", + // commands: []HTTPCommand{ + // {Command: "COMMAND/GETKEYS", Body: map[string]interface{}{"key": "MSET", "keys": []interface{}{"key1 key2"}, "values": []interface{}{" val1 val2"}}}, + // }, + // expected: []interface{}{"ERR invalid command specified"}, + //}, { name: "Expire command", commands: []HTTPCommand{ diff --git a/integration_tests/commands/http/command_rename_test.go b/integration_tests/commands/http/command_rename_test.go index caef9ed8c..0c9845060 100644 --- a/integration_tests/commands/http/command_rename_test.go +++ b/integration_tests/commands/http/command_rename_test.go @@ -1,3 +1,7 @@ +//go:build ignore +// +build ignore + +// Ignored as multishard commands not supported by HTTP package http import ( diff --git a/integration_tests/commands/http/copy_test.go b/integration_tests/commands/http/copy_test.go index 51fdaa41a..fc46da0ce 100644 --- a/integration_tests/commands/http/copy_test.go +++ b/integration_tests/commands/http/copy_test.go @@ -1,3 +1,7 @@ +//go:build ignore +// +build ignore + +// Ignored as multishard commands not supported by HTTP package http import ( diff --git a/integration_tests/commands/http/dbsize_test.go b/integration_tests/commands/http/dbsize_test.go index a76929d66..430abab5e 100644 --- a/integration_tests/commands/http/dbsize_test.go +++ b/integration_tests/commands/http/dbsize_test.go @@ -1,3 +1,7 @@ +//go:build ignore +// +build ignore + +// Ignored as multishard commands not supported by HTTP package http import ( diff --git a/integration_tests/commands/http/json_test.go b/integration_tests/commands/http/json_test.go index 8a3b862ec..ebcee8c07 100644 --- a/integration_tests/commands/http/json_test.go +++ b/integration_tests/commands/http/json_test.go @@ -881,6 +881,7 @@ func TestJsonStrlen(t *testing.T) { } func TestJSONMGET(t *testing.T) { + t.Skip("Skipping this test until multishards cmds supported by http") exec := NewHTTPCommandExecutor() setupData := map[string]string{ "xx": `["hehhhe","hello"]`, diff --git a/integration_tests/commands/http/keys_test.go b/integration_tests/commands/http/keys_test.go index 75c9b05b1..6219a3945 100644 --- a/integration_tests/commands/http/keys_test.go +++ b/integration_tests/commands/http/keys_test.go @@ -1,3 +1,7 @@ +//go:build ignore +// +build ignore + +// Ignored as multishard commands not supported by HTTP package http import ( diff --git a/integration_tests/commands/http/mget_test.go b/integration_tests/commands/http/mget_test.go index 93c97e0a7..ae85f68ff 100644 --- a/integration_tests/commands/http/mget_test.go +++ b/integration_tests/commands/http/mget_test.go @@ -1,3 +1,7 @@ +//go:build ignore +// +build ignore + +// Ignored as multishard commands not supported by HTTP package http import ( diff --git a/integration_tests/commands/http/mset_test.go b/integration_tests/commands/http/mset_test.go index 8b8182e1f..7a0d47f87 100644 --- a/integration_tests/commands/http/mset_test.go +++ b/integration_tests/commands/http/mset_test.go @@ -1,3 +1,7 @@ +//go:build ignore +// +build ignore + +// Ignored as multishard commands not supported by HTTP package http import ( diff --git a/integration_tests/commands/http/object_test.go b/integration_tests/commands/http/object_test.go index e16cdab27..b0b105883 100644 --- a/integration_tests/commands/http/object_test.go +++ b/integration_tests/commands/http/object_test.go @@ -1,3 +1,7 @@ +//go:build ignore +// +build ignore + +// Ignored as multishard commands not supported by HTTP package http import ( diff --git a/integration_tests/commands/http/set_data_cmd_test.go b/integration_tests/commands/http/set_data_cmd_test.go index e850a7569..adbd66bce 100644 --- a/integration_tests/commands/http/set_data_cmd_test.go +++ b/integration_tests/commands/http/set_data_cmd_test.go @@ -159,120 +159,121 @@ func TestSetDataCmd(t *testing.T) { assert_type: []string{"equal", "array", "equal", "array"}, expected: []interface{}{float64(3), []any{string("bar"), string("baz"), string("bax")}, float64(0), []any{string("bar"), string("baz"), string("bax")}}, }, - { - name: "SADD & SDIFF", - commands: []HTTPCommand{ - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, - {Command: "SADD", Body: map[string]interface{}{"key": "foo2", "value": "baz"}}, - {Command: "SADD", Body: map[string]interface{}{"key": "foo2", "value": "bax"}}, - {Command: "SDIFF", Body: map[string]interface{}{"values": []interface{}{"foo", "foo2"}}}, - }, - assert_type: []string{"equal", "equal", "equal", "equal", "array"}, - expected: []interface{}{float64(1), float64(1), float64(1), float64(1), []any{string("bar")}}, - }, - { - name: "SADD & SDIFF with non-existing subsequent key", - commands: []HTTPCommand{ - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, - {Command: "SDIFF", Body: map[string]interface{}{"values": []interface{}{"foo", "foo2"}}}, - }, - assert_type: []string{"equal", "equal", "array"}, - expected: []interface{}{float64(1), float64(1), []any{string("bar"), string("baz")}}, - }, - { - name: "SADD & SDIFF with wrong key type", - commands: []HTTPCommand{ - {Command: "SET", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, - {Command: "SDIFF", Body: map[string]interface{}{"values": []interface{}{"foo", "foo2"}}}, - }, - assert_type: []string{"equal", "equal"}, - expected: []interface{}{"OK", "WRONGTYPE Operation against a key holding the wrong kind of value"}, - }, - { - name: "SADD & SDIFF with subsequent key of wrong type", - commands: []HTTPCommand{ - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, - {Command: "SET", Body: map[string]interface{}{"key": "foo2", "value": "bar"}}, - {Command: "SDIFF", Body: map[string]interface{}{"values": []interface{}{"foo", "foo2"}}}, - }, - assert_type: []string{"equal", "equal", "equal", "equal"}, - expected: []interface{}{float64(1), float64(1), "OK", "WRONGTYPE Operation against a key holding the wrong kind of value"}, - }, - { - name: "SADD & SDIFF with non-existing first key", - commands: []HTTPCommand{ - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, - {Command: "SDIFF", Body: map[string]interface{}{"key1": "foo2", "key2": "foo"}}, - }, - assert_type: []string{"equal", "equal", "array"}, - expected: []interface{}{float64(1), float64(1), []any{}}, - }, - { - name: "SADD & SDIFF with one key", - commands: []HTTPCommand{ - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, - {Command: "SDIFF", Body: map[string]interface{}{"key": "foo"}}, - }, - assert_type: []string{"equal", "equal", "array"}, - expected: []interface{}{float64(1), float64(1), []any{string("bar"), string("baz")}}, - }, - { - name: "SADD & SINTER", - commands: []HTTPCommand{ - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, - {Command: "SADD", Body: map[string]interface{}{"key": "foo2", "value": "baz"}}, - {Command: "SADD", Body: map[string]interface{}{"key": "foo2", "value": "bax"}}, - {Command: "SINTER", Body: map[string]interface{}{"values": []interface{}{"foo", "foo2"}}}, - }, - assert_type: []string{"equal", "equal", "equal", "equal", "array"}, - expected: []interface{}{float64(1), float64(1), float64(1), float64(1), []any{string("baz")}}, - }, - { - name: "SADD & SINTER with non-existing subsequent key", - commands: []HTTPCommand{ - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, - {Command: "SINTER", Body: map[string]interface{}{"values": []interface{}{"foo", "foo2"}}}, - }, - assert_type: []string{"equal", "equal", "array"}, - expected: []interface{}{float64(1), float64(1), []any{}}, - }, - { - name: "SADD & SINTER with wrong key type", - commands: []HTTPCommand{ - {Command: "SET", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, - {Command: "SINTER", Body: map[string]interface{}{"values": []interface{}{"foo", "foo2"}}}, - }, - assert_type: []string{"equal", "equal"}, - expected: []interface{}{"OK", "WRONGTYPE Operation against a key holding the wrong kind of value"}, - }, - { - name: "SADD & SINTER with subsequent key of wrong type", - commands: []HTTPCommand{ - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, - {Command: "SET", Body: map[string]interface{}{"key": "foo2", "value": "bar"}}, - {Command: "SINTER", Body: map[string]interface{}{"values": []interface{}{"foo", "foo2"}}}, - }, - assert_type: []string{"equal", "equal", "equal", "equal"}, - expected: []interface{}{float64(1), float64(1), "OK", "WRONGTYPE Operation against a key holding the wrong kind of value"}, - }, - { - name: "SADD & SINTER with single key", - commands: []HTTPCommand{ - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, - {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, - {Command: "SINTER", Body: map[string]interface{}{"values": []interface{}{"foo"}}}, - }, - assert_type: []string{"equal", "equal", "array"}, - expected: []interface{}{float64(1), float64(1), []any{string("bar"), string("baz")}}, - }, + // Skipping these tests until multishards cmds supported by http + //{ + // name: "SADD & SDIFF", + // commands: []HTTPCommand{ + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, + // {Command: "SADD", Body: map[string]interface{}{"key": "foo2", "value": "baz"}}, + // {Command: "SADD", Body: map[string]interface{}{"key": "foo2", "value": "bax"}}, + // {Command: "SDIFF", Body: map[string]interface{}{"values": []interface{}{"foo", "foo2"}}}, + // }, + // assert_type: []string{"equal", "equal", "equal", "equal", "array"}, + // expected: []interface{}{float64(1), float64(1), float64(1), float64(1), []any{string("bar")}}, + //}, + //{ + // name: "SADD & SDIFF with non-existing subsequent key", + // commands: []HTTPCommand{ + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, + // {Command: "SDIFF", Body: map[string]interface{}{"values": []interface{}{"foo", "foo2"}}}, + // }, + // assert_type: []string{"equal", "equal", "array"}, + // expected: []interface{}{float64(1), float64(1), []any{string("bar"), string("baz")}}, + //}, + //{ + // name: "SADD & SDIFF with wrong key type", + // commands: []HTTPCommand{ + // {Command: "SET", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, + // {Command: "SDIFF", Body: map[string]interface{}{"values": []interface{}{"foo", "foo2"}}}, + // }, + // assert_type: []string{"equal", "equal"}, + // expected: []interface{}{"OK", "WRONGTYPE Operation against a key holding the wrong kind of value"}, + //}, + //{ + // name: "SADD & SDIFF with subsequent key of wrong type", + // commands: []HTTPCommand{ + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, + // {Command: "SET", Body: map[string]interface{}{"key": "foo2", "value": "bar"}}, + // {Command: "SDIFF", Body: map[string]interface{}{"values": []interface{}{"foo", "foo2"}}}, + // }, + // assert_type: []string{"equal", "equal", "equal", "equal"}, + // expected: []interface{}{float64(1), float64(1), "OK", "WRONGTYPE Operation against a key holding the wrong kind of value"}, + //}, + //{ + // name: "SADD & SDIFF with non-existing first key", + // commands: []HTTPCommand{ + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, + // {Command: "SDIFF", Body: map[string]interface{}{"key1": "foo2", "key2": "foo"}}, + // }, + // assert_type: []string{"equal", "equal", "array"}, + // expected: []interface{}{float64(1), float64(1), []any{}}, + //}, + //{ + // name: "SADD & SDIFF with one key", + // commands: []HTTPCommand{ + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, + // {Command: "SDIFF", Body: map[string]interface{}{"key": "foo"}}, + // }, + // assert_type: []string{"equal", "equal", "array"}, + // expected: []interface{}{float64(1), float64(1), []any{string("bar"), string("baz")}}, + //}, + //{ + // name: "SADD & SINTER", + // commands: []HTTPCommand{ + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, + // {Command: "SADD", Body: map[string]interface{}{"key": "foo2", "value": "baz"}}, + // {Command: "SADD", Body: map[string]interface{}{"key": "foo2", "value": "bax"}}, + // {Command: "SINTER", Body: map[string]interface{}{"values": []interface{}{"foo", "foo2"}}}, + // }, + // assert_type: []string{"equal", "equal", "equal", "equal", "array"}, + // expected: []interface{}{float64(1), float64(1), float64(1), float64(1), []any{string("baz")}}, + //}, + //{ + // name: "SADD & SINTER with non-existing subsequent key", + // commands: []HTTPCommand{ + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, + // {Command: "SINTER", Body: map[string]interface{}{"values": []interface{}{"foo", "foo2"}}}, + // }, + // assert_type: []string{"equal", "equal", "array"}, + // expected: []interface{}{float64(1), float64(1), []any{}}, + //}, + //{ + // name: "SADD & SINTER with wrong key type", + // commands: []HTTPCommand{ + // {Command: "SET", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, + // {Command: "SINTER", Body: map[string]interface{}{"values": []interface{}{"foo", "foo2"}}}, + // }, + // assert_type: []string{"equal", "equal"}, + // expected: []interface{}{"OK", "WRONGTYPE Operation against a key holding the wrong kind of value"}, + //}, + //{ + // name: "SADD & SINTER with subsequent key of wrong type", + // commands: []HTTPCommand{ + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, + // {Command: "SET", Body: map[string]interface{}{"key": "foo2", "value": "bar"}}, + // {Command: "SINTER", Body: map[string]interface{}{"values": []interface{}{"foo", "foo2"}}}, + // }, + // assert_type: []string{"equal", "equal", "equal", "equal"}, + // expected: []interface{}{float64(1), float64(1), "OK", "WRONGTYPE Operation against a key holding the wrong kind of value"}, + //}, + //{ + // name: "SADD & SINTER with single key", + // commands: []HTTPCommand{ + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "bar"}}, + // {Command: "SADD", Body: map[string]interface{}{"key": "foo", "value": "baz"}}, + // {Command: "SINTER", Body: map[string]interface{}{"values": []interface{}{"foo"}}}, + // }, + // assert_type: []string{"equal", "equal", "array"}, + // expected: []interface{}{float64(1), float64(1), []any{string("bar"), string("baz")}}, + //}, } defer exec.FireCommand(HTTPCommand{ diff --git a/integration_tests/commands/http/setup.go b/integration_tests/commands/http/setup.go index d27c67fbd..24ef1aa00 100644 --- a/integration_tests/commands/http/setup.go +++ b/integration_tests/commands/http/setup.go @@ -12,12 +12,11 @@ import ( "sync" "time" - "github.com/dicedb/dice/internal/server/utils" + "github.com/dicedb/dice/internal/server/httpws" "github.com/dicedb/dice/config" derrors "github.com/dicedb/dice/internal/errors" "github.com/dicedb/dice/internal/querymanager" - "github.com/dicedb/dice/internal/server" "github.com/dicedb/dice/internal/shard" dstore "github.com/dicedb/dice/internal/store" ) @@ -88,7 +87,7 @@ func (e *HTTPCommandExecutor) FireCommand(cmd HTTPCommand) (interface{}, error) defer resp.Body.Close() if cmd.Command != "Q.WATCH" { - var result utils.HTTPResponse + var result httpws.HTTPResponse err = json.NewDecoder(resp.Body).Decode(&result) if err != nil { return nil, err @@ -119,7 +118,7 @@ func RunHTTPServer(ctx context.Context, wg *sync.WaitGroup, opt TestServerOption queryWatcherLocal := querymanager.NewQueryManager() config.DiceConfig.HTTP.Port = opt.Port // Initialize the HTTPServer - testServer := server.NewHTTPServer(shardManager, nil) + testServer := httpws.NewHTTPServer(shardManager, nil) // Inform the user that the server is starting fmt.Println("Starting the test server on port", config.DiceConfig.HTTP.Port) shardManagerCtx, cancelShardManager := context.WithCancel(ctx) diff --git a/integration_tests/commands/http/touch_test.go b/integration_tests/commands/http/touch_test.go index 88400e51c..454a24821 100644 --- a/integration_tests/commands/http/touch_test.go +++ b/integration_tests/commands/http/touch_test.go @@ -1,3 +1,7 @@ +//go:build ignore +// +build ignore + +// Ignored as multishard commands not supported by HTTP package http import ( diff --git a/integration_tests/commands/resp/command_getkeys_test.go b/integration_tests/commands/resp/command_getkeys_test.go index 607bba994..166079b4b 100644 --- a/integration_tests/commands/resp/command_getkeys_test.go +++ b/integration_tests/commands/resp/command_getkeys_test.go @@ -15,7 +15,8 @@ var getKeysTestCases = []struct { {"Get command", "get key", []interface{}{"key"}}, {"TTL command", "ttl key", []interface{}{"key"}}, {"Del command", "del 1 2 3 4 5 6", []interface{}{"1", "2", "3", "4", "5", "6"}}, - {"MSET command", "MSET key1 val1 key2 val2", []interface{}{"key1", "key2"}}, + // TODO: Fix this for multi shard support + //{"MSET command", "MSET key1 val1 key2 val2", []interface{}{"key1", "key2"}}, {"Expire command", "expire key time extra", []interface{}{"key"}}, {"Ping command", "ping", "ERR the command has no key arguments"}, {"Invalid Get command", "get", "ERR invalid number of arguments specified for command"}, diff --git a/integration_tests/commands/websocket/setup.go b/integration_tests/commands/websocket/setup.go index cbfcc286a..281836f30 100644 --- a/integration_tests/commands/websocket/setup.go +++ b/integration_tests/commands/websocket/setup.go @@ -11,10 +11,11 @@ import ( "sync" "time" + "github.com/dicedb/dice/internal/server/httpws" + "github.com/dicedb/dice/config" derrors "github.com/dicedb/dice/internal/errors" "github.com/dicedb/dice/internal/querymanager" - "github.com/dicedb/dice/internal/server" "github.com/dicedb/dice/internal/shard" dstore "github.com/dicedb/dice/internal/store" "github.com/gorilla/websocket" @@ -117,7 +118,7 @@ func RunWebsocketServer(ctx context.Context, wg *sync.WaitGroup, opt TestServerO shardManager := shard.NewShardManager(1, watchChan, nil, globalErrChannel) queryWatcherLocal := querymanager.NewQueryManager() config.DiceConfig.WebSocket.Port = opt.Port - testServer := server.NewWebSocketServer(shardManager, testPort1, nil) + testServer := httpws.NewWebSocketServer(shardManager, testPort1, nil) shardManagerCtx, cancelShardManager := context.WithCancel(ctx) // run shard manager diff --git a/integration_tests/commands/websocket/writeretry_test.go b/integration_tests/commands/websocket/writeretry_test.go index 19555ed10..978b836af 100644 --- a/integration_tests/commands/websocket/writeretry_test.go +++ b/integration_tests/commands/websocket/writeretry_test.go @@ -2,6 +2,7 @@ package websocket import ( "fmt" + "github.com/dicedb/dice/internal/server/httpws" "net" "net/http" "net/url" @@ -9,7 +10,6 @@ import ( "testing" "time" - "github.com/dicedb/dice/internal/server" "github.com/gorilla/websocket" "github.com/stretchr/testify/assert" ) @@ -22,7 +22,7 @@ func TestWriteResponseWithRetries_Success(t *testing.T) { defer conn.Close() // Complete a write without any errors - err := server.WriteResponseWithRetries(conn, []byte("hello"), 3) + err := httpws.WriteResponseWithRetries(conn, []byte("hello"), 3) assert.NoError(t, err) } @@ -33,7 +33,7 @@ func TestWriteResponseWithRetries_NetworkError(t *testing.T) { // Simulate a network error by closing the connection beforehand conn.Close() - err := server.WriteResponseWithRetries(conn, []byte("hello"), 3) + err := httpws.WriteResponseWithRetries(conn, []byte("hello"), 3) assert.Error(t, err) assert.Contains(t, err.Error(), "network operation error") } @@ -45,7 +45,7 @@ func TestWriteResponseWithRetries_BrokenPipe(t *testing.T) { // Simulate a broken pipe error by manually triggering it. conn.UnderlyingConn().(*net.TCPConn).CloseWrite() - err := server.WriteResponseWithRetries(conn, []byte("hello"), 3) + err := httpws.WriteResponseWithRetries(conn, []byte("hello"), 3) assert.Error(t, err) assert.Contains(t, err.Error(), "broken pipe") } @@ -60,7 +60,7 @@ func TestWriteResponseWithRetries_EAGAINRetry(t *testing.T) { conn.SetWriteDeadline(time.Now().Add(1 * time.Millisecond)) for retries < 2 { - err := server.WriteResponseWithRetries(conn, []byte("hello"), 3) + err := httpws.WriteResponseWithRetries(conn, []byte("hello"), 3) if err != nil { // Retry and reset deadline after a failed attempt. conn.SetWriteDeadline(time.Now().Add(100 * time.Millisecond)) diff --git a/internal/eval/commands.go b/internal/eval/commands.go index 787bca20c..8b2d94bb7 100644 --- a/internal/eval/commands.go +++ b/internal/eval/commands.go @@ -111,55 +111,6 @@ var ( // their implementation for HTTP and WebSocket protocols is still pending. // As a result, their Eval functions remained intact. var ( - msetCmdMeta = DiceCmdMeta{ - Name: "MSET", - Info: `MSET sets multiple keys to multiple values in the db - args should contain an even number of elements - each pair of elements will be treated as pair - Returns encoded error response if the number of arguments is not even - Returns encoded OK RESP once all entries are added`, - Eval: evalMSET, - Arity: -3, - KeySpecs: KeySpecs{BeginIndex: 1, Step: 2, LastKey: -1}, - } - - jsonMGetCmdMeta = DiceCmdMeta{ - Name: "JSON.MGET", - Info: `JSON.MGET key..key [path] - Returns the encoded RESP value of the key, if present - Null reply: If the key doesn't exist or has expired. - Error reply: If the number of arguments is incorrect or the stored value is not a JSON type.`, - Eval: evalJSONMGET, - Arity: 2, - KeySpecs: KeySpecs{BeginIndex: 1}, - } - - keysCmdMeta = DiceCmdMeta{ - Name: "KEYS", - Info: "KEYS command is used to get all the keys in the database. Complexity is O(n) where n is the number of keys in the database.", - Eval: evalKeys, - Arity: 1, - } - - MGetCmdMeta = DiceCmdMeta{ - Name: "MGET", - Info: `The MGET command returns an array of RESP values corresponding to the provided keys. - For each key, if the key is expired or does not exist, the response will be RespNIL; - otherwise, the response will be the RESP value of the key. - `, - Eval: evalMGET, - Arity: -2, - KeySpecs: KeySpecs{BeginIndex: 1, Step: 1, LastKey: -1}, - } - - //TODO: supports only http protocol, needs to be removed once http is migrated to multishard - copyCmdMeta = DiceCmdMeta{ - Name: "COPY", - Info: `COPY command copies the value stored at the source key to the destination key.`, - Eval: evalCOPY, - Arity: -2, - } - //TODO: supports only http protocol, needs to be removed once http is migrated to multishard objectCopyCmdMeta = DiceCmdMeta{ Name: "OBJECTCOPY", @@ -168,39 +119,6 @@ var ( IsMigrated: true, Arity: -2, } - touchCmdMeta = DiceCmdMeta{ - Name: "TOUCH", - Info: `TOUCH key1 key2 ... key_N - Alters the last access time of a key(s). - A key is ignored if it does not exist.`, - Eval: evalTOUCH, - Arity: -2, - KeySpecs: KeySpecs{BeginIndex: 1}, - } - sdiffCmdMeta = DiceCmdMeta{ - Name: "SDIFF", - Info: `SDIFF key1 [key2 ... key_N] - Returns the members of the set resulting from the difference between the first set and all the successive sets. - Non existing keys are treated as empty sets.`, - Eval: evalSDIFF, - Arity: -2, - KeySpecs: KeySpecs{BeginIndex: 1}, - } - sinterCmdMeta = DiceCmdMeta{ - Name: "SINTER", - Info: `SINTER key1 [key2 ... key_N] - Returns the members of the set resulting from the intersection of all the given sets. - Non existing keys are treated as empty sets.`, - Eval: evalSINTER, - Arity: -2, - KeySpecs: KeySpecs{BeginIndex: 1}, - } - dbSizeCmdMeta = DiceCmdMeta{ - Name: "DBSIZE", - Info: `DBSIZE Return the number of keys in the database`, - Eval: evalDBSIZE, - Arity: 1, - } ) // Single Shard command @@ -716,12 +634,6 @@ var ( IsMigrated: true, NewEval: evalEXISTS, } - renameCmdMeta = DiceCmdMeta{ - Name: "RENAME", - Info: "Renames a key and overwrites the destination", - Eval: evalRename, - Arity: 3, - } getexCmdMeta = DiceCmdMeta{ Name: "GETEX", Info: `Get the value of key and optionally set its expiration. @@ -1429,9 +1341,7 @@ func init() { DiceCmds["COMMAND|INFO"] = commandInfoCmdMeta DiceCmds["COMMAND|DOCS"] = commandDocsCmdMeta DiceCmds["COMMAND|GETKEYSANDFLAGS"] = commandGetKeysAndFlagsCmdMeta - DiceCmds["COPY"] = copyCmdMeta DiceCmds["OBJECTCOPY"] = objectCopyCmdMeta - DiceCmds["DBSIZE"] = dbSizeCmdMeta DiceCmds["DECR"] = decrCmdMeta DiceCmds["DECRBY"] = decrByCmdMeta DiceCmds["DEL"] = delCmdMeta @@ -1481,7 +1391,6 @@ func init() { DiceCmds["JSON.FORGET"] = jsonforgetCmdMeta DiceCmds["JSON.GET"] = jsongetCmdMeta DiceCmds["JSON.INGEST"] = jsoningestCmdMeta - DiceCmds["JSON.MGET"] = jsonMGetCmdMeta DiceCmds["JSON.NUMINCRBY"] = jsonnumincrbyCmdMeta DiceCmds["JSON.NUMMULTBY"] = jsonnummultbyCmdMeta DiceCmds["JSON.OBJKEYS"] = jsonobjkeysCmdMeta @@ -1491,13 +1400,10 @@ func init() { DiceCmds["JSON.STRLEN"] = jsonStrlenCmdMeta DiceCmds["JSON.TOGGLE"] = jsontoggleCmdMeta DiceCmds["JSON.TYPE"] = jsontypeCmdMeta - DiceCmds["KEYS"] = keysCmdMeta DiceCmds["LATENCY"] = latencyCmdMeta DiceCmds["LLEN"] = llenCmdMeta DiceCmds["LPOP"] = lpopCmdMeta DiceCmds["LPUSH"] = lpushCmdMeta - DiceCmds["MGET"] = MGetCmdMeta - DiceCmds["MSET"] = msetCmdMeta DiceCmds["OBJECT"] = objectCmdMeta DiceCmds["PERSIST"] = persistCmdMeta DiceCmds["PFADD"] = pfAddCmdMeta @@ -1505,21 +1411,17 @@ func init() { DiceCmds["PFMERGE"] = pfMergeCmdMeta DiceCmds["PING"] = pingCmdMeta DiceCmds["PTTL"] = pttlCmdMeta - DiceCmds["RENAME"] = renameCmdMeta DiceCmds["RESTORE"] = restorekeyCmdMeta DiceCmds["RPOP"] = rpopCmdMeta DiceCmds["RPUSH"] = rpushCmdMeta DiceCmds["SADD"] = saddCmdMeta DiceCmds["SCARD"] = scardCmdMeta - DiceCmds["SDIFF"] = sdiffCmdMeta DiceCmds["SET"] = setCmdMeta DiceCmds["SETBIT"] = setBitCmdMeta DiceCmds["SETEX"] = setexCmdMeta - DiceCmds["SINTER"] = sinterCmdMeta DiceCmds["SLEEP"] = sleepCmdMeta DiceCmds["SMEMBERS"] = smembersCmdMeta DiceCmds["SREM"] = sremCmdMeta - DiceCmds["TOUCH"] = touchCmdMeta DiceCmds["TTL"] = ttlCmdMeta DiceCmds["TYPE"] = typeCmdMeta DiceCmds["ZADD"] = zaddCmdMeta diff --git a/internal/eval/eval.go b/internal/eval/eval.go index 0b28e079d..c571bd6a0 100644 --- a/internal/eval/eval.go +++ b/internal/eval/eval.go @@ -1,26 +1,20 @@ package eval import ( - "errors" "fmt" - "sort" "strconv" - "strings" "time" "github.com/dicedb/dice/internal/object" "github.com/dicedb/dice/internal/sql" - "github.com/bytedance/sonic" "github.com/dicedb/dice/config" "github.com/dicedb/dice/internal/clientio" "github.com/dicedb/dice/internal/comm" diceerrors "github.com/dicedb/dice/internal/errors" "github.com/dicedb/dice/internal/querymanager" - "github.com/dicedb/dice/internal/server/utils" dstore "github.com/dicedb/dice/internal/store" - "github.com/ohler55/ojg/jp" ) type exDurationState int @@ -146,213 +140,6 @@ func EvalAUTH(args []string, c *comm.Client) []byte { return clientio.RespOK } -// evalMSET puts multiple pairs in db as in the args -// MSET is atomic, so all given keys are set at once. -// args must contain key and value pairs. - -// Returns encoded error response if at least a pair is not part of args -// Returns encoded OK RESP once new entries are added -// If the key already exists then the value will be overwritten and expiry will be discarded -// TODO: Needs to be removed after http and websocket migrated to the multithreading -func evalMSET(args []string, store *dstore.Store) []byte { - if len(args) <= 1 || len(args)%2 != 0 { - return diceerrors.NewErrArity("MSET") - } - - // MSET does not have expiry support - var exDurationMs int64 = -1 - - insertMap := make(map[string]*object.Obj, len(args)/2) - for i := 0; i < len(args); i += 2 { - key, value := args[i], args[i+1] - storedValue, oType := getRawStringOrInt(value) - insertMap[key] = store.NewObj(storedValue, exDurationMs, oType) - } - - store.PutAll(insertMap) - return clientio.RespOK -} - -// evalDBSIZE returns the number of keys in the database. -// TODO: Needs to be removed after http and websocket migrated to the multithreading -func evalDBSIZE(args []string, store *dstore.Store) []byte { - if len(args) > 0 { - return diceerrors.NewErrArity("DBSIZE") - } - - // Expired keys must be explicitly deleted since the cronFrequency for cleanup is configurable. - // A longer delay may prevent timely cleanup, leading to incorrect DBSIZE results. - dstore.DeleteExpiredKeys(store) - // return the RESP encoded value - return clientio.Encode(store.GetDBSize(), false) -} - -// trimElementAndUpdateArray trim the array between the given start and stop index -// Returns trimmed array -func trimElementAndUpdateArray(arr []any, start, stop int) []any { - updatedArray := make([]any, 0) - length := len(arr) - if len(arr) == 0 { - return updatedArray - } - var startIdx, stopIdx int - - if start >= length { - return updatedArray - } - - startIdx = adjustIndex(start, arr) - stopIdx = adjustIndex(stop, arr) - - if startIdx > stopIdx { - return updatedArray - } - - updatedArray = arr[startIdx : stopIdx+1] - return updatedArray -} - -// insertElementAndUpdateArray add an element at the given index -// Returns remaining array and error -func insertElementAndUpdateArray(arr []any, index int, elements []interface{}) (updatedArray []any, err error) { - length := len(arr) - var idx int - if index >= -length && index <= length { - idx = adjustIndex(index, arr) - } else { - return nil, errors.New("index out of bounds") - } - before := arr[:idx] - after := arr[idx:] - - elements = append(elements, after...) - before = append(before, elements...) - updatedArray = append(updatedArray, before...) - return updatedArray, nil -} - -// adjustIndex will bound the array between 0 and len(arr) - 1 -// It also handles negative indexes -func adjustIndex(idx int, arr []any) int { - // if index is positive and out of bound, limit it to the last index - if idx > len(arr) { - idx = len(arr) - 1 - } - - // if index is negative, change it to equivalent positive index - if idx < 0 { - // if index is out of bound then limit it to the first index - if idx < -len(arr) { - idx = 0 - } else { - idx = len(arr) + idx - } - } - return idx -} - -// evalJSONMGET retrieves a JSON value stored for the multiple key -// args must contain at least the key and a path; -// Returns encoded error response if incorrect number of arguments -// The RESP value of the key is encoded and then returned -// TODO: Needs to be removed after http and websocket migrated to the multithreading -func evalJSONMGET(args []string, store *dstore.Store) []byte { - if len(args) < 2 { - return diceerrors.NewErrArity("JSON.MGET") - } - - var results []interface{} - - // Default path is root if not specified - argsLen := len(args) - path := args[argsLen-1] - - for i := 0; i < (argsLen - 1); i++ { - key := args[i] - result, _ := jsonMGETHelper(store, path, key) - results = append(results, result) - } - - var interfaceObj interface{} = results - return clientio.Encode(interfaceObj, false) -} - -func jsonMGETHelper(store *dstore.Store, path, key string) (result interface{}, err2 []byte) { - // Retrieve the object from the database - obj := store.Get(key) - if obj == nil { - return result, nil - } - - // Check if the object is of JSON type - if errWithMessage := object.AssertType(obj.Type, object.ObjTypeJSON); errWithMessage != nil { - return result, errWithMessage - } - - jsonData := obj.Value - - // If path is root, return the entire JSON - if path == defaultRootPath { - resultBytes, err := sonic.Marshal(jsonData) - if err != nil { - return result, diceerrors.NewErrWithMessage("could not serialize result") - } - return string(resultBytes), nil - } - - // Parse the JSONPath expression - expr, err := jp.ParseString(path) - if err != nil { - return result, diceerrors.NewErrWithMessage("invalid JSONPath") - } - - // Execute the JSONPath query - results := expr.Get(jsonData) - if len(results) == 0 { - return result, diceerrors.NewErrWithMessage(fmt.Sprintf("Path '%s' does not exist", path)) - } - - // Serialize the result - var resultBytes []byte - if len(results) == 1 { - resultBytes, err = sonic.Marshal(results[0]) - } else { - resultBytes, err = sonic.Marshal(results) - } - if err != nil { - return nil, diceerrors.NewErrWithMessage("could not serialize result") - } - return string(resultBytes), nil -} - -// ReverseSlice takes a slice of any type and returns a new slice with the elements reversed. -func ReverseSlice[T any](slice []T) []T { - reversed := make([]T, len(slice)) - for i, v := range slice { - reversed[len(slice)-1-i] = v - } - return reversed -} - -// Parses and returns the input string as an int64 or float64 -func parseFloatInt(input string) (result interface{}, err error) { - // Try to parse as an integer - if intValue, parseErr := strconv.ParseInt(input, 10, 64); parseErr == nil { - result = intValue - return - } - - // Try to parse as a float - if floatValue, parseErr := strconv.ParseFloat(input, 64); parseErr == nil { - result = floatValue - return - } - - // If neither parsing succeeds, return an error - err = errors.New("invalid input: not a valid int or float") - return -} - func evalHELLO(args []string, store *dstore.Store) []byte { if len(args) > 1 { return diceerrors.NewErrArity("HELLO") @@ -473,316 +260,3 @@ func EvalQUNWATCH(args []string, httpOp bool, client *comm.Client) []byte { return clientio.RespOK } - -// evalKeys returns the list of keys that match the pattern should be the only param in args -// TODO: Needs to be removed after http and websocket migrated to the multithreading -func evalKeys(args []string, store *dstore.Store) []byte { - if len(args) != 1 { - return diceerrors.NewErrArity("KEYS") - } - - pattern := args[0] - keys, err := store.Keys(pattern) - if err != nil { - return clientio.Encode(err, false) - } - - return clientio.Encode(keys, false) -} - -// TODO: Needs to be removed after http and websocket migrated to the multithreading -func evalRename(args []string, store *dstore.Store) []byte { - if len(args) != 2 { - return diceerrors.NewErrArity("RENAME") - } - sourceKey := args[0] - destKey := args[1] - - // if Source key does not exist, return RESP encoded nil - sourceObj := store.Get(sourceKey) - if sourceObj == nil { - return diceerrors.NewErrWithMessage(diceerrors.NoKeyErr) - } - - // if Source and Destination Keys are same return RESP encoded ok - if sourceKey == destKey { - return clientio.RespOK - } - - if ok := store.Rename(sourceKey, destKey); ok { - return clientio.RespOK - } - return clientio.RespNIL -} - -// The MGET command returns an array of RESP values corresponding to the provided keys. -// For each key, if the key is expired or does not exist, the response will be response.RespNIL; -// otherwise, the response will be the RESP value of the key. -// MGET is atomic, it retrieves all values at once -// TODO: Needs to be removed after http and websocket migrated to the multithreading -func evalMGET(args []string, store *dstore.Store) []byte { - if len(args) < 1 { - return diceerrors.NewErrArity("MGET") - } - values := store.GetAll(args) - resp := make([]interface{}, len(args)) - for i, obj := range values { - if obj == nil { - resp[i] = clientio.RespNIL - } else { - resp[i] = obj.Value - } - } - return clientio.Encode(resp, false) -} - -// TODO: Needs to be removed after http and websocket migrated to the multithreading -func evalCOPY(args []string, store *dstore.Store) []byte { - if len(args) < 2 { - return diceerrors.NewErrArity("COPY") - } - - isReplace := false - - sourceKey := args[0] - destinationKey := args[1] - sourceObj := store.Get(sourceKey) - if sourceObj == nil { - return clientio.RespZero - } - - for i := 2; i < len(args); i++ { - arg := strings.ToUpper(args[i]) - if arg == dstore.Replace { - isReplace = true - } - } - - if isReplace { - store.Del(destinationKey) - } - - destinationObj := store.Get(destinationKey) - if destinationObj != nil { - return clientio.RespZero - } - - copyObj := sourceObj.DeepCopy() - if copyObj == nil { - return clientio.RespZero - } - - exp, ok := dstore.GetExpiry(sourceObj, store) - var exDurationMs int64 = -1 - if ok { - exDurationMs = int64(exp - uint64(utils.GetCurrentTime().UnixMilli())) - } - - store.Put(destinationKey, copyObj) - - if exDurationMs > 0 { - store.SetExpiry(copyObj, exDurationMs) - } - return clientio.RespOne -} - -// TODO: Needs to be removed after http and websocket migrated to the multithreading -func evalTOUCH(args []string, store *dstore.Store) []byte { - if len(args) == 0 { - return diceerrors.NewErrArity("TOUCH") - } - - count := 0 - for _, key := range args { - if store.Get(key) != nil { - count++ - } - } - - return clientio.Encode(count, false) -} - -func evalSDIFF(args []string, store *dstore.Store) []byte { - if len(args) < 1 { - return diceerrors.NewErrArity("SDIFF") - } - - srcKey := args[0] - obj := store.Get(srcKey) - - // if the source key does not exist, return an empty response - if obj == nil { - return clientio.Encode([]string{}, false) - } - - if err := object.AssertType(obj.Type, object.ObjTypeSet); err != nil { - return diceerrors.NewErrWithFormattedMessage(diceerrors.WrongTypeErr) - } - - // Get the set object from the store. - // store the count as the number of elements in the first set - srcSet := obj.Value.(map[string]struct{}) - count := len(srcSet) - - tmpSet := make(map[string]struct{}, count) - for k := range srcSet { - tmpSet[k] = struct{}{} - } - - // we decrement the count as we find the elements in the other sets - // if the count is 0, we skip further sets but still get them from - // the store to check if they are set objects and update their last accessed time - - for _, arg := range args[1:] { - // Get the set object from the store. - obj := store.Get(arg) - - if obj == nil { - continue - } - - // If the object exists, check if it is a set object. - if err := object.AssertType(obj.Type, object.ObjTypeSet); err != nil { - return diceerrors.NewErrWithFormattedMessage(diceerrors.WrongTypeErr) - } - - // only if the count is greater than 0, we need to check the other sets - if count > 0 { - // Get the set object. - set := obj.Value.(map[string]struct{}) - - for k := range set { - if _, ok := tmpSet[k]; ok { - delete(tmpSet, k) - count-- - } - } - } - } - - if count == 0 { - return clientio.Encode([]string{}, false) - } - - // Get the members of the set. - members := make([]string, 0, len(tmpSet)) - for k := range tmpSet { - members = append(members, k) - } - return clientio.Encode(members, false) -} - -// Migrated to the new eval, but kept for http and websocket -func evalSINTER(args []string, store *dstore.Store) []byte { - if len(args) < 1 { - return diceerrors.NewErrArity("SINTER") - } - - sets := make([]map[string]struct{}, 0, len(args)) - - empty := 0 - - for _, arg := range args { - // Get the set object from the store. - obj := store.Get(arg) - - if obj == nil { - empty++ - continue - } - - // If the object exists, check if it is a set object. - if err := object.AssertType(obj.Type, object.ObjTypeSet); err != nil { - return diceerrors.NewErrWithFormattedMessage(diceerrors.WrongTypeErr) - } - - // Get the set object. - set := obj.Value.(map[string]struct{}) - sets = append(sets, set) - } - - if empty > 0 { - return clientio.Encode([]string{}, false) - } - - // sort the sets by the number of elements in the set - // we will iterate over the smallest set - // and check if the element is present in all the other sets - sort.Slice(sets, func(i, j int) bool { - return len(sets[i]) < len(sets[j]) - }) - - count := 0 - resultSet := make(map[string]struct{}, len(sets[0])) - - // init the result set with the first set - // store the number of elements in the first set in count - // we will decrement the count if we do not find the elements in the other sets - for k := range sets[0] { - resultSet[k] = struct{}{} - count++ - } - - for i := 1; i < len(sets); i++ { - if count == 0 { - break - } - for k := range resultSet { - if _, ok := sets[i][k]; !ok { - delete(resultSet, k) - count-- - } - } - } - - if count == 0 { - return clientio.Encode([]string{}, false) - } - - members := make([]string, 0, len(resultSet)) - for k := range resultSet { - members = append(members, k) - } - return clientio.Encode(members, false) -} - -// formatFloat formats float64 as string. -// Optionally appends a decimal (.0) for whole numbers, -// if b is true. -func formatFloat(f float64, b bool) string { - formatted := strconv.FormatFloat(f, 'f', -1, 64) - if b { - parts := strings.Split(formatted, ".") - if len(parts) == 1 { - formatted += ".0" - } - } - return formatted -} - -// This method executes each operation, contained in ops array, based on commands used. -func executeBitfieldOps(value *ByteArray, ops []utils.BitFieldOp) []interface{} { - overflowType := WRAP - var result []interface{} - for _, op := range ops { - switch op.Kind { - case GET: - res := value.getBits(int(op.Offset), int(op.EVal), op.EType == SIGNED) - result = append(result, res) - case SET: - prevValue := value.getBits(int(op.Offset), int(op.EVal), op.EType == SIGNED) - value.setBits(int(op.Offset), int(op.EVal), op.Value) - result = append(result, prevValue) - case INCRBY: - res, err := value.incrByBits(int(op.Offset), int(op.EVal), op.Value, overflowType, op.EType == SIGNED) - if err != nil { - result = append(result, nil) - } else { - result = append(result, res) - } - case OVERFLOW: - overflowType = op.EType - } - } - return result -} diff --git a/internal/eval/eval_test.go b/internal/eval/eval_test.go index f1a629a6e..41b0088df 100644 --- a/internal/eval/eval_test.go +++ b/internal/eval/eval_test.go @@ -43,7 +43,6 @@ func setupTest(store *dstore.Store) *dstore.Store { func TestEval(t *testing.T) { store := dstore.NewStore(nil, nil, nil) - testEvalMSET(t, store) testEvalECHO(t, store) testEvalHELLO(t, store) testEvalSET(t, store) @@ -72,7 +71,6 @@ func TestEval(t *testing.T) { testEvalEXPIRE(t, store) testEvalEXPIRETIME(t, store) testEvalEXPIREAT(t, store) - testEvalDbsize(t, store) testEvalGETSET(t, store) testEvalHSET(t, store) testEvalHMSET(t, store) @@ -132,7 +130,6 @@ func TestEval(t *testing.T) { testEvalBitFieldRO(t, store) testEvalGEOADD(t, store) testEvalGEODIST(t, store) - testEvalSINTER(t, store) testEvalJSONSTRAPPEND(t, store) testEvalINCR(t, store) testEvalINCRBY(t, store) @@ -464,19 +461,6 @@ func testEvalGETDEL(t *testing.T, store *dstore.Store) { runMigratedEvalTests(t, tests, evalGETDEL, store) } -func testEvalMSET(t *testing.T, store *dstore.Store) { - tests := map[string]evalTestCase{ - "nil value": {input: nil, output: []byte("-ERR wrong number of arguments for 'mset' command\r\n")}, - "empty array": {input: []string{}, output: []byte("-ERR wrong number of arguments for 'mset' command\r\n")}, - "one value": {input: []string{"KEY"}, output: []byte("-ERR wrong number of arguments for 'mset' command\r\n")}, - "key val pair": {input: []string{"KEY", "VAL"}, output: clientio.RespOK}, - "odd key val pair": {input: []string{"KEY", "VAL", "KEY2"}, output: []byte("-ERR wrong number of arguments for 'mset' command\r\n")}, - "even key val pair": {input: []string{"KEY", "VAL", "KEY2", "VAL2"}, output: clientio.RespOK}, - } - - runEvalTests(t, tests, evalMSET, store) -} - func testEvalGET(t *testing.T, store *dstore.Store) { tests := []evalTestCase{ { @@ -2896,54 +2880,6 @@ func testEvalPersist(t *testing.T, store *dstore.Store) { runMigratedEvalTests(t, tests, evalPERSIST, store) } -func testEvalDbsize(t *testing.T, store *dstore.Store) { - tests := map[string]evalTestCase{ - "DBSIZE command with invalid no of args": { - input: []string{"INVALID_ARG"}, - output: []byte("-ERR wrong number of arguments for 'dbsize' command\r\n"), - }, - "no key in db": { - input: nil, - output: []byte(":0\r\n"), - }, - "one key exists in db": { - setup: func() { - evalSET([]string{"key", "val"}, store) - }, - input: nil, - output: []byte(":1\r\n"), - }, - "two keys exist in db": { - setup: func() { - evalSET([]string{"key1", "val1"}, store) - evalSET([]string{"key2", "val2"}, store) - }, - input: nil, - output: []byte(":2\r\n"), - }, - "repeating keys shall result in same dbsize": { - setup: func() { - evalSET([]string{"key1", "val1"}, store) - evalSET([]string{"key2", "val2"}, store) - evalSET([]string{"key2", "val2"}, store) - }, - input: nil, - output: []byte(":2\r\n"), - }, - "deleted keys shall be reflected in dbsize": { - setup: func() { - evalSET([]string{"key1", "val1"}, store) - evalSET([]string{"key2", "val2"}, store) - evalDEL([]string{"key2"}, store) - }, - input: nil, - output: []byte(":1\r\n"), - }, - } - - runEvalTests(t, tests, evalDBSIZE, store) -} - func testEvalPFADD(t *testing.T, store *dstore.Store) { tests := map[string]evalTestCase{ "PFADD nil value": { @@ -4397,14 +4333,6 @@ func runMigratedEvalTests(t *testing.T, tests map[string]evalTestCase, evalFunc } } -func BenchmarkEvalMSET(b *testing.B) { - b.ResetTimer() - for i := 0; i < b.N; i++ { - store := dstore.NewStore(nil, nil, nil) - evalMSET([]string{"KEY", "VAL", "KEY2", "VAL2"}, store) - } -} - func BenchmarkEvalHSET(b *testing.B) { store := dstore.NewStore(nil, nil, nil) for i := 0; i < b.N; i++ { @@ -5512,7 +5440,7 @@ func testEvalCOMMAND(t *testing.T, store *dstore.Store) { }, }, "command getkeys with an invalid number of arguments for a command": { - input: []string{"GETKEYS", "MSET", "key1"}, + input: []string{"GETKEYS", "SET", "key1"}, migratedOutput: EvalResponse{ Result: nil, Error: diceerrors.ErrGeneral("invalid number of arguments specified for command"), @@ -6097,13 +6025,6 @@ func testEvalHSETNX(t *testing.T, store *dstore.Store) { runMigratedEvalTests(t, tests, evalHSETNX, store) } -func TestMSETConsistency(t *testing.T) { - store := dstore.NewStore(nil, nil, nil) - evalMSET([]string{"KEY", "VAL", "KEY2", "VAL2"}, store) - - assert.Equal(t, "VAL", store.Get("KEY").Value) - assert.Equal(t, "VAL2", store.Get("KEY2").Value) -} func BenchmarkEvalHINCRBY(b *testing.B) { store := dstore.NewStore(nil, nil, nil) @@ -8108,7 +8029,7 @@ func testEvalDUMP(t *testing.T, store *dstore.Store) { input: []string{"INTEGER_KEY"}, migratedOutput: EvalResponse{ Result: "CQUAAAAAAAAACv9+l81XgsShqw==", - Error: nil, + Error: nil, }, }, "dump expired key": { @@ -8326,56 +8247,6 @@ func testEvalGEODIST(t *testing.T, store *dstore.Store) { runMigratedEvalTests(t, tests, evalGEODIST, store) } -func testEvalSINTER(t *testing.T, store *dstore.Store) { - tests := map[string]evalTestCase{ - "intersection of two sets": { - setup: func() { - evalSADD([]string{"set1", "a", "b", "c"}, store) - evalSADD([]string{"set2", "c", "d", "e"}, store) - }, - input: []string{"set1", "set2"}, - output: clientio.Encode([]string{"c"}, false), - }, - "intersection of three sets": { - setup: func() { - evalSADD([]string{"set1", "a", "b", "c"}, store) - evalSADD([]string{"set2", "b", "c", "d"}, store) - evalSADD([]string{"set3", "c", "d", "e"}, store) - }, - input: []string{"set1", "set2", "set3"}, - output: clientio.Encode([]string{"c"}, false), - }, - "intersection with single set": { - setup: func() { - evalSADD([]string{"set1", "a"}, store) - }, - input: []string{"set1"}, - output: clientio.Encode([]string{"a"}, false), - }, - "intersection with a non-existent key": { - setup: func() { - evalSADD([]string{"set1", "a", "b", "c"}, store) - }, - input: []string{"set1", "nonexistent"}, - output: clientio.Encode([]string{}, false), - }, - "intersection with wrong type": { - setup: func() { - evalSADD([]string{"set1", "a", "b", "c"}, store) - store.Put("string", &object.Obj{Value: "string", Type: object.ObjTypeString}) - }, - input: []string{"set1", "string"}, - output: []byte("-WRONGTYPE Operation against a key holding the wrong kind of value\r\n"), - }, - "no arguments": { - input: []string{}, - output: diceerrors.NewErrArity("SINTER"), - }, - } - - runEvalTests(t, tests, evalSINTER, store) -} - func testEvalJSONSTRAPPEND(t *testing.T, store *dstore.Store) { tests := map[string]evalTestCase{ "append to single field": { diff --git a/internal/eval/store_eval.go b/internal/eval/store_eval.go index c80ad9d63..015d20b00 100644 --- a/internal/eval/store_eval.go +++ b/internal/eval/store_eval.go @@ -5651,7 +5651,7 @@ func evalJSONTOGGLE(args []string, store *dstore.Store) *EvalResponse { obj.Value = jsonData } - toggleResults = ReverseSlice(toggleResults) + toggleResults = reverseSlice(toggleResults) return makeEvalResult(toggleResults) } @@ -6772,3 +6772,136 @@ func evalCommandDocs(args []string) *EvalResponse { return makeEvalResult(result) } + +// This method executes each operation, contained in ops array, based on commands used. +func executeBitfieldOps(value *ByteArray, ops []utils.BitFieldOp) []interface{} { + overflowType := WRAP + var result []interface{} + for _, op := range ops { + switch op.Kind { + case GET: + res := value.getBits(int(op.Offset), int(op.EVal), op.EType == SIGNED) + result = append(result, res) + case SET: + prevValue := value.getBits(int(op.Offset), int(op.EVal), op.EType == SIGNED) + value.setBits(int(op.Offset), int(op.EVal), op.Value) + result = append(result, prevValue) + case INCRBY: + res, err := value.incrByBits(int(op.Offset), int(op.EVal), op.Value, overflowType, op.EType == SIGNED) + if err != nil { + result = append(result, nil) + } else { + result = append(result, res) + } + case OVERFLOW: + overflowType = op.EType + } + } + return result +} + +// formatFloat formats float64 as string. +// Optionally appends a decimal (.0) for whole numbers, +// if b is true. +func formatFloat(f float64, b bool) string { + formatted := strconv.FormatFloat(f, 'f', -1, 64) + if b { + parts := strings.Split(formatted, ".") + if len(parts) == 1 { + formatted += ".0" + } + } + return formatted +} + +// trimElementAndUpdateArray trim the array between the given start and stop index +// Returns trimmed array +func trimElementAndUpdateArray(arr []any, start, stop int) []any { + updatedArray := make([]any, 0) + length := len(arr) + if len(arr) == 0 { + return updatedArray + } + var startIdx, stopIdx int + + if start >= length { + return updatedArray + } + + startIdx = adjustIndex(start, arr) + stopIdx = adjustIndex(stop, arr) + + if startIdx > stopIdx { + return updatedArray + } + + updatedArray = arr[startIdx : stopIdx+1] + return updatedArray +} + +// insertElementAndUpdateArray add an element at the given index +// Returns remaining array and error +func insertElementAndUpdateArray(arr []any, index int, elements []interface{}) (updatedArray []any, err error) { + length := len(arr) + var idx int + if index >= -length && index <= length { + idx = adjustIndex(index, arr) + } else { + return nil, errors.New("index out of bounds") + } + before := arr[:idx] + after := arr[idx:] + + elements = append(elements, after...) + before = append(before, elements...) + updatedArray = append(updatedArray, before...) + return updatedArray, nil +} + +// adjustIndex will bound the array between 0 and len(arr) - 1 +// It also handles negative indexes +func adjustIndex(idx int, arr []any) int { + // if index is positive and out of bound, limit it to the last index + if idx > len(arr) { + idx = len(arr) - 1 + } + + // if index is negative, change it to equivalent positive index + if idx < 0 { + // if index is out of bound then limit it to the first index + if idx < -len(arr) { + idx = 0 + } else { + idx = len(arr) + idx + } + } + return idx +} + +// reverseSlice takes a slice of any type and returns a new slice with the elements reversed. +func reverseSlice[T any](slice []T) []T { + reversed := make([]T, len(slice)) + for i, v := range slice { + reversed[len(slice)-1-i] = v + } + return reversed +} + +// Parses and returns the input string as an int64 or float64 +func parseFloatInt(input string) (result interface{}, err error) { + // Try to parse as an integer + if intValue, parseErr := strconv.ParseInt(input, 10, 64); parseErr == nil { + result = intValue + return + } + + // Try to parse as a float + if floatValue, parseErr := strconv.ParseFloat(input, 64); parseErr == nil { + result = floatValue + return + } + + // If neither parsing succeeds, return an error + err = errors.New("invalid input: not a valid int or float") + return +} diff --git a/internal/iothread/cmd_meta.go b/internal/iothread/cmd_meta.go index bda33d29f..ce7c35b7b 100644 --- a/internal/iothread/cmd_meta.go +++ b/internal/iothread/cmd_meta.go @@ -129,10 +129,14 @@ const ( CmdGetDel = "GETDEL" CmdLrange = "LRANGE" CmdLinsert = "LINSERT" + CmdJSONArrInsert = "JSON.ARRINSERT" + CmdJSONArrTrim = "JSON.ARRTRIM" CmdJSONArrAppend = "JSON.ARRAPPEND" CmdJSONArrLen = "JSON.ARRLEN" CmdJSONArrPop = "JSON.ARRPOP" CmdJSONClear = "JSON.CLEAR" + CmdJSONSet = "JSON.SET" + CmdJSONObjKeys = "JSON.OBJKEYS" CmdJSONDel = "JSON.DEL" CmdJSONForget = "JSON.FORGET" CmdJSONGet = "JSON.GET" @@ -267,6 +271,12 @@ var CommandsMeta = map[string]CmdMeta{ CmdJSONArrAppend: { CmdType: SingleShard, }, + CmdJSONArrInsert: { + CmdType: SingleShard, + }, + CmdJSONArrTrim: { + CmdType: SingleShard, + }, CmdJSONArrLen: { CmdType: SingleShard, }, @@ -276,6 +286,12 @@ var CommandsMeta = map[string]CmdMeta{ CmdJSONClear: { CmdType: SingleShard, }, + CmdJSONSet: { + CmdType: SingleShard, + }, + CmdJSONObjKeys: { + CmdType: SingleShard, + }, CmdJSONDel: { CmdType: SingleShard, }, diff --git a/internal/server/cmd_meta.go b/internal/server/cmd_meta.go deleted file mode 100644 index fc9927d4a..000000000 --- a/internal/server/cmd_meta.go +++ /dev/null @@ -1,652 +0,0 @@ -package server - -import ( - "github.com/dicedb/dice/internal/cmd" - "github.com/dicedb/dice/internal/comm" - "github.com/dicedb/dice/internal/eval" - "github.com/dicedb/dice/internal/shard" -) - -// Type defines the type of DiceDB command based on how it interacts with shards. -// It uses an integer value to represent different command types. -type Type int - -// Enum values for Type using iota for auto-increment. -// Global commands don't interact with shards, SingleShard commands interact with one shard, -// MultiShard commands interact with multiple shards, and Custom commands require a direct client connection. -const ( - Global Type = iota // Global commands don't need to interact with shards. - SingleShard // Single-shard commands interact with only one shard. - MultiShard // MultiShard commands interact with multiple shards using scatter-gather logic. - Custom // Custom commands involve direct client communication. -) - -// CmdMeta stores metadata about DiceDB commands, including how they are processed across shards. -// Type indicates how the command should be handled, while Breakup and Gather provide logic -// for breaking up multishard commands and gathering their responses. -type CmdMeta struct { - Cmd string // Command name. - Breakup func(mgr *shard.ShardManager, DiceDBCmd *cmd.DiceDBCmd, c *comm.Client) []cmd.DiceDBCmd // Function to break up multishard commands. - Gather func(responses ...eval.EvalResponse) []byte // Function to gather responses from shards. - RespNoShards func(args []string) []byte // Function for commands that don't interact with shards. - Type // Enum indicating the command type. -} - -// CmdMetaMap is a map that associates command names with their corresponding metadata. -var ( - CmdMetaMap = map[string]CmdMeta{} - - // Metadata for global commands that don't interact with shards. - // PING is an example of global command. - pingCmdMeta = CmdMeta{ - Cmd: "PING", - Type: Global, - } - - // Metadata for single-shard commands that only interact with one shard. - // These commands don't require breakup and gather logic. - setCmdMeta = CmdMeta{ - Cmd: "SET", - Type: SingleShard, - } - expireCmdMeta = CmdMeta{ - Cmd: "EXPIRE", - Type: SingleShard, - } - expireAtCmdMeta = CmdMeta{ - Cmd: "EXPIREAT", - Type: SingleShard, - } - expireTimeCmdMeta = CmdMeta{ - Cmd: "EXPIRETIME", - Type: SingleShard, - } - getCmdMeta = CmdMeta{ - Cmd: "GET", - Type: SingleShard, - } - getsetCmdMeta = CmdMeta{ - Cmd: "GETSET", - Type: SingleShard, - } - setexCmdMeta = CmdMeta{ - Cmd: "SETEX", - Type: SingleShard, - } - saddCmdMeta = CmdMeta{ - Cmd: "SADD", - Type: SingleShard, - } - sremCmdMeta = CmdMeta{ - Cmd: "SREM", - Type: SingleShard, - } - scardCmdMeta = CmdMeta{ - Cmd: "SCARD", - Type: SingleShard, - } - smembersCmdMeta = CmdMeta{ - Cmd: "SMEMBERS", - } - - jsonArrAppendCmdMeta = CmdMeta{ - Cmd: "JSON.ARRAPPEND", - Type: SingleShard, - } - jsonArrLenCmdMeta = CmdMeta{ - Cmd: "JSON.ARRLEN", - Type: SingleShard, - } - jsonArrPopCmdMeta = CmdMeta{ - Cmd: "JSON.ARRPOP", - Type: SingleShard, - } - jsonDebugCmdMeta = CmdMeta{ - Cmd: "JSON.DEBUG", - Type: SingleShard, - } - jsonRespCmdMeta = CmdMeta{ - Cmd: "JSON.RESP", - Type: SingleShard, - } - - getrangeCmdMeta = CmdMeta{ - Cmd: "GETRANGE", - Type: SingleShard, - } - hexistsCmdMeta = CmdMeta{ - Cmd: "HEXISTS", - Type: SingleShard, - } - hkeysCmdMeta = CmdMeta{ - Cmd: "HKEYS", - Type: SingleShard, - } - - hvalsCmdMeta = CmdMeta{ - Cmd: "HVALS", - Type: SingleShard, - } - zaddCmdMeta = CmdMeta{ - Cmd: "ZADD", - Type: SingleShard, - } - zcountCmdMeta = CmdMeta{ - Cmd: "ZCOUNT", - Type: SingleShard, - } - zrangeCmdMeta = CmdMeta{ - Cmd: "ZRANGE", - Type: SingleShard, - } - appendCmdMeta = CmdMeta{ - Cmd: "APPEND", - Type: SingleShard, - } - zpopminCmdMeta = CmdMeta{ - Cmd: "ZPOPMIN", - Type: SingleShard, - } - zrankCmdMeta = CmdMeta{ - Cmd: "ZRANK", - Type: SingleShard, - } - zcardCmdMeta = CmdMeta{ - Cmd: "ZCARD", - Type: SingleShard, - } - zremCmdMeta = CmdMeta{ - Cmd: "ZREM", - Type: SingleShard, - } - pfaddCmdMeta = CmdMeta{ - Cmd: "PFADD", - Type: SingleShard, - } - pfcountCmdMeta = CmdMeta{ - Cmd: "PFCOUNT", - Type: SingleShard, - } - pfmergeCmdMeta = CmdMeta{ - Cmd: "PFMERGE", - Type: SingleShard, - } - ttlCmdMeta = CmdMeta{ - Cmd: "TTL", - Type: SingleShard, - } - pttlCmdMeta = CmdMeta{ - Cmd: "PTTL", - Type: SingleShard, - } - setbitCmdMeta = CmdMeta{ - Cmd: "SETBIT", - Type: SingleShard, - } - getbitCmdMeta = CmdMeta{ - Cmd: "GETBIT", - Type: SingleShard, - } - bitcountCmdMeta = CmdMeta{ - Cmd: "BITCOUNT", - Type: SingleShard, - } - bitfieldCmdMeta = CmdMeta{ - Cmd: "BITFIELD", - Type: SingleShard, - } - bitposCmdMeta = CmdMeta{ - Cmd: "BITPOS", - Type: SingleShard, - } - bitfieldroCmdMeta = CmdMeta{ - Cmd: "BITFIELD_RO", - Type: SingleShard, - } - delCmdMeta = CmdMeta{ - Cmd: "DEL", - Type: SingleShard, - } - existsCmdMeta = CmdMeta{ - Cmd: "EXISTS", - Type: SingleShard, - } - persistCmdMeta = CmdMeta{ - Cmd: "PERSIST", - Type: SingleShard, - } - typeCmdMeta = CmdMeta{ - Cmd: "TYPE", - Type: SingleShard, - } - - jsonclearCmdMeta = CmdMeta{ - Cmd: "JSON.CLEAR", - Type: SingleShard, - } - - jsonstrlenCmdMeta = CmdMeta{ - Cmd: "JSON.STRLEN", - Type: SingleShard, - } - - jsonobjlenCmdMeta = CmdMeta{ - Cmd: "JSON.OBJLEN", - Type: SingleShard, - } - hlenCmdMeta = CmdMeta{ - Cmd: "HLEN", - Type: SingleShard, - } - hstrlenCmdMeta = CmdMeta{ - Cmd: "HSTRLEN", - Type: SingleShard, - } - hscanCmdMeta = CmdMeta{ - Cmd: "HSCAN", - Type: SingleShard, - } - - jsonarrinsertCmdMeta = CmdMeta{ - Cmd: "JSON.ARRINSERT", - Type: SingleShard, - } - - jsonarrtrimCmdMeta = CmdMeta{ - Cmd: "JSON.ARRTRIM", - Type: SingleShard, - } - - jsonobjkeystCmdMeta = CmdMeta{ - Cmd: "JSON.OBJKEYS", - Type: SingleShard, - } - - incrCmdMeta = CmdMeta{ - Cmd: "INCR", - Type: SingleShard, - } - incrByCmdMeta = CmdMeta{ - Cmd: "INCRBY", - Type: SingleShard, - } - decrCmdMeta = CmdMeta{ - Cmd: "DECR", - Type: SingleShard, - } - decrByCmdMeta = CmdMeta{ - Cmd: "DECRBY", - Type: SingleShard, - } - incrByFloatCmdMeta = CmdMeta{ - Cmd: "INCRBYFLOAT", - Type: SingleShard, - } - hincrbyCmdMeta = CmdMeta{ - Cmd: "HINCRBY", - Type: SingleShard, - } - hincrbyfloatCmdMeta = CmdMeta{ - Cmd: "HINCRBYFLOAT", - Type: SingleShard, - } - hrandfieldCmdMeta = CmdMeta{ - Cmd: "HRANDFIELD", - Type: SingleShard, - } - zpopmaxCmdMeta = CmdMeta{ - Cmd: "ZPOPMAX", - Type: SingleShard, - } - bfaddCmdMeta = CmdMeta{ - Cmd: "BF.ADD", - Type: SingleShard, - } - bfreserveCmdMeta = CmdMeta{ - Cmd: "BF.RESERVE", - Type: SingleShard, - } - bfexistsCmdMeta = CmdMeta{ - Cmd: "BF.EXISTS", - Type: SingleShard, - } - bfinfoCmdMeta = CmdMeta{ - Cmd: "BF.INFO", - Type: SingleShard, - } - cmsInitByDimCmdMeta = CmdMeta{ - Cmd: "CMS.INITBYDIM", - Type: SingleShard, - } - cmsInitByProbCmdMeta = CmdMeta{ - Cmd: "CMS.INITBYPROB", - Type: SingleShard, - } - cmsInfoCmdMeta = CmdMeta{ - Cmd: "CMS.INFO", - Type: SingleShard, - } - cmsIncrByCmdMeta = CmdMeta{ - Cmd: "CMS.INCRBY", - Type: SingleShard, - } - cmsQueryCmdMeta = CmdMeta{ - Cmd: "CMS.QUERY", - Type: SingleShard, - } - cmsMergeCmdMeta = CmdMeta{ - Cmd: "CMS.MERGE", - Type: SingleShard, - } - getexCmdMeta = CmdMeta{ - Cmd: "GETEX", - Type: SingleShard, - } - getdelCmdMeta = CmdMeta{ - Cmd: "GETDEL", - Type: SingleShard, - } - hsetCmdMeta = CmdMeta{ - Cmd: "HSET", - Type: SingleShard, - } - hgetCmdMeta = CmdMeta{ - Cmd: "HGET", - Type: SingleShard, - } - hsetnxCmdMeta = CmdMeta{ - Cmd: "HSETNX", - Type: SingleShard, - } - hdelCmdMeta = CmdMeta{ - Cmd: "HDEL", - Type: SingleShard, - } - hmsetCmdMeta = CmdMeta{ - Cmd: "HMSET", - Type: SingleShard, - } - hmgetCmdMeta = CmdMeta{ - Cmd: "HMGET", - Type: SingleShard, - } - lrangeCmdMeta = CmdMeta{ - Cmd: "LRANGE", - Type: SingleShard, - } - linsertCmdMeta = CmdMeta{ - Cmd: "LINSERT", - Type: SingleShard, - } - lpushCmdMeta = CmdMeta{ - Cmd: "LPUSH", - Type: SingleShard, - } - rpushCmdMeta = CmdMeta{ - Cmd: "RPUSH", - Type: SingleShard, - } - lpopCmdMeta = CmdMeta{ - Cmd: "LPOP", - Type: SingleShard, - } - rpopCmdMeta = CmdMeta{ - Cmd: "RPOP", - Type: SingleShard, - } - llenCmdMeta = CmdMeta{ - Cmd: "LLEN", - Type: SingleShard, - } - jsonForgetCmdMeta = CmdMeta{ - Cmd: "JSON.FORGET", - Type: SingleShard, - } - jsonDelCmdMeta = CmdMeta{ - Cmd: "JSON.DEL", - Type: SingleShard, - } - jsonToggleCmdMeta = CmdMeta{ - Cmd: "JSON.TOGGLE", - Type: SingleShard, - } - jsonNumIncrByCmdMeta = CmdMeta{ - Cmd: "JSON.NUMINCRBY", - Type: SingleShard, - } - jsonNumMultByCmdMeta = CmdMeta{ - Cmd: "JSON.NUMMULTBY", - Type: SingleShard, - } - jsonSetCmdMeta = CmdMeta{ - Cmd: "JSON.SET", - Type: SingleShard, - } - jsonGetCmdMeta = CmdMeta{ - Cmd: "JSON.GET", - Type: SingleShard, - } - jsonTypeCmdMeta = CmdMeta{ - Cmd: "JSON.TYPE", - Type: SingleShard, - } - jsonIngestCmdMeta = CmdMeta{ - Cmd: "JSON.INGEST", - Type: SingleShard, - } - jsonArrStrAppendCmdMeta = CmdMeta{ - Cmd: "JSON.STRAPPEND", - Type: SingleShard, - } - hGetAllCmdMeta = CmdMeta{ - Cmd: "HGETALL", - Type: SingleShard, - } - dumpCmdMeta = CmdMeta{ - Cmd: "DUMP", - Type: SingleShard, - } - restoreCmdMeta = CmdMeta{ - Cmd: "RESTORE", - Type: SingleShard, - } - geoaddCmdMeta = CmdMeta{ - Cmd: "GEOADD", - Type: SingleShard, - } - geodistCmdMeta = CmdMeta{ - Cmd: "GEODIST", - Type: SingleShard, - } - clientCmdMeta = CmdMeta{ - Cmd: "CLIENT", - Type: SingleShard, - } - latencyCmdMeta = CmdMeta{ - Cmd: "LATENCY", - Type: SingleShard, - } - flushDBCmdMeta = CmdMeta{ - Cmd: "FLUSHDB", - Type: MultiShard, - } - objectCmdMeta = CmdMeta{ - Cmd: "OBJECT", - Type: SingleShard, - } - commandCmdMeta = CmdMeta{ - Cmd: "COMMAND", - Type: SingleShard, - } - CmdCommandCountMeta = CmdMeta{ - Cmd: "COMMAND|COUNT", - Type: SingleShard, - } - CmdCommandHelp = CmdMeta{ - Cmd: "COMMAND|HELP", - Type: SingleShard, - } - CmdCommandInfo = CmdMeta{ - Cmd: "COMMAND|INFO", - Type: SingleShard, - } - CmdCommandList = CmdMeta{ - Cmd: "COMMAND|LIST", - Type: SingleShard, - } - CmdCommandDocs = CmdMeta{ - Cmd: "COMMAND|DOCS", - Type: SingleShard, - } - CmdCommandGetKeys = CmdMeta{ - Cmd: "COMMAND|GETKEYS", - Type: SingleShard, - } - CmdCommandGetKeysFlags = CmdMeta{ - Cmd: "COMMAND|GETKEYSANDFLAGS", - Type: SingleShard, - } - - // Metadata for multishard commands would go here. - // These commands require both breakup and gather logic. - - // Metadata for custom commands requiring specific client-side logic would go here. -) - -// init initializes the CmdMetaMap map by associating each command name with its corresponding metadata. -func init() { - // Global commands. - CmdMetaMap["PING"] = pingCmdMeta - - // Single-shard commands. - CmdMetaMap["SET"] = setCmdMeta - CmdMetaMap["EXPIRE"] = expireCmdMeta - CmdMetaMap["EXPIREAT"] = expireAtCmdMeta - CmdMetaMap["EXPIRETIME"] = expireTimeCmdMeta - CmdMetaMap["GET"] = getCmdMeta - CmdMetaMap["GETSET"] = getsetCmdMeta - CmdMetaMap["SETEX"] = setexCmdMeta - - CmdMetaMap["SADD"] = saddCmdMeta - CmdMetaMap["SREM"] = sremCmdMeta - CmdMetaMap["SCARD"] = scardCmdMeta - CmdMetaMap["SMEMBERS"] = smembersCmdMeta - - CmdMetaMap["JSON.ARRAPPEND"] = jsonArrAppendCmdMeta - CmdMetaMap["JSON.ARRLEN"] = jsonArrLenCmdMeta - CmdMetaMap["JSON.ARRPOP"] = jsonArrPopCmdMeta - CmdMetaMap["JSON.DEBUG"] = jsonDebugCmdMeta - CmdMetaMap["JSON.RESP"] = jsonRespCmdMeta - - CmdMetaMap["GETRANGE"] = getrangeCmdMeta - CmdMetaMap["APPEND"] = appendCmdMeta - CmdMetaMap["JSON.CLEAR"] = jsonclearCmdMeta - CmdMetaMap["JSON.STRLEN"] = jsonstrlenCmdMeta - CmdMetaMap["JSON.OBJLEN"] = jsonobjlenCmdMeta - CmdMetaMap["HEXISTS"] = hexistsCmdMeta - CmdMetaMap["HKEYS"] = hkeysCmdMeta - CmdMetaMap["HVALS"] = hvalsCmdMeta - CmdMetaMap["JSON.ARRINSERT"] = jsonarrinsertCmdMeta - CmdMetaMap["JSON.ARRTRIM"] = jsonarrtrimCmdMeta - CmdMetaMap["JSON.OBJKEYS"] = jsonobjkeystCmdMeta - CmdMetaMap["ZADD"] = zaddCmdMeta - CmdMetaMap["ZCOUNT"] = zcountCmdMeta - CmdMetaMap["ZRANGE"] = zrangeCmdMeta - CmdMetaMap["ZRANK"] = zrankCmdMeta - CmdMetaMap["ZCARD"] = zcardCmdMeta - CmdMetaMap["ZREM"] = zremCmdMeta - CmdMetaMap["PFADD"] = pfaddCmdMeta - CmdMetaMap["ZPOPMIN"] = zpopminCmdMeta - CmdMetaMap["PFCOUNT"] = pfcountCmdMeta - CmdMetaMap["PFMERGE"] = pfmergeCmdMeta - CmdMetaMap["DEL"] = delCmdMeta - CmdMetaMap["EXISTS"] = existsCmdMeta - CmdMetaMap["PERSIST"] = persistCmdMeta - CmdMetaMap["TYPE"] = typeCmdMeta - CmdMetaMap["HLEN"] = hlenCmdMeta - CmdMetaMap["HSTRLEN"] = hstrlenCmdMeta - CmdMetaMap["HSCAN"] = hscanCmdMeta - CmdMetaMap["INCR"] = incrCmdMeta - CmdMetaMap["INCRBY"] = incrByCmdMeta - CmdMetaMap["INCR"] = incrCmdMeta - CmdMetaMap["DECR"] = decrCmdMeta - CmdMetaMap["DECRBY"] = decrByCmdMeta - CmdMetaMap["INCRBYFLOAT"] = incrByFloatCmdMeta - CmdMetaMap["HINCRBY"] = hincrbyCmdMeta - CmdMetaMap["HINCRBYFLOAT"] = hincrbyfloatCmdMeta - CmdMetaMap["HRANDFIELD"] = hrandfieldCmdMeta - CmdMetaMap["PFADD"] = pfaddCmdMeta - CmdMetaMap["ZPOPMIN"] = zpopminCmdMeta - CmdMetaMap["PFCOUNT"] = pfcountCmdMeta - CmdMetaMap["PFMERGE"] = pfmergeCmdMeta - CmdMetaMap["TTL"] = ttlCmdMeta - CmdMetaMap["PTTL"] = pttlCmdMeta - CmdMetaMap["HINCRBY"] = hincrbyCmdMeta - CmdMetaMap["HINCRBYFLOAT"] = hincrbyfloatCmdMeta - CmdMetaMap["HRANDFIELD"] = hrandfieldCmdMeta - CmdMetaMap["PFADD"] = pfaddCmdMeta - CmdMetaMap["PFCOUNT"] = pfcountCmdMeta - CmdMetaMap["PFMERGE"] = pfmergeCmdMeta - CmdMetaMap["HINCRBY"] = hincrbyCmdMeta - CmdMetaMap["HINCRBYFLOAT"] = hincrbyfloatCmdMeta - CmdMetaMap["HRANDFIELD"] = hrandfieldCmdMeta - CmdMetaMap["ZPOPMAX"] = zpopmaxCmdMeta - CmdMetaMap["BF.ADD"] = bfaddCmdMeta - CmdMetaMap["BF.RESERVE"] = bfreserveCmdMeta - CmdMetaMap["BF.EXISTS"] = bfexistsCmdMeta - CmdMetaMap["BF.INFO"] = bfinfoCmdMeta - CmdMetaMap["CMS.INITBYDIM"] = cmsInitByDimCmdMeta - CmdMetaMap["CMS.INITBYPROB"] = cmsInitByProbCmdMeta - CmdMetaMap["CMS.INFO"] = cmsInfoCmdMeta - CmdMetaMap["CMS.INCRBY"] = cmsIncrByCmdMeta - CmdMetaMap["CMS.QUERY"] = cmsQueryCmdMeta - CmdMetaMap["CMS.MERGE"] = cmsMergeCmdMeta - CmdMetaMap["GETEX"] = getexCmdMeta - CmdMetaMap["GETDEL"] = getdelCmdMeta - CmdMetaMap["HSET"] = hsetCmdMeta - CmdMetaMap["HGET"] = hgetCmdMeta - CmdMetaMap["HSETNX"] = hsetnxCmdMeta - CmdMetaMap["HDEL"] = hdelCmdMeta - CmdMetaMap["HMSET"] = hmsetCmdMeta - CmdMetaMap["HMGET"] = hmgetCmdMeta - CmdMetaMap["SETBIT"] = setbitCmdMeta - CmdMetaMap["GETBIT"] = getbitCmdMeta - CmdMetaMap["BITCOUNT"] = bitcountCmdMeta - CmdMetaMap["BITFIELD"] = bitfieldCmdMeta - CmdMetaMap["BITPOS"] = bitposCmdMeta - CmdMetaMap["BITFIELD_RO"] = bitfieldroCmdMeta - CmdMetaMap["LRANGE"] = lrangeCmdMeta - CmdMetaMap["LINSERT"] = linsertCmdMeta - CmdMetaMap["LPUSH"] = lpushCmdMeta - CmdMetaMap["RPUSH"] = rpushCmdMeta - CmdMetaMap["LPOP"] = lpopCmdMeta - CmdMetaMap["RPOP"] = rpopCmdMeta - CmdMetaMap["LLEN"] = llenCmdMeta - CmdMetaMap["JSON.FORGET"] = jsonForgetCmdMeta - CmdMetaMap["JSON.DEL"] = jsonDelCmdMeta - CmdMetaMap["JSON.TOGGLE"] = jsonToggleCmdMeta - CmdMetaMap["JSON.NUMINCRBY"] = jsonNumIncrByCmdMeta - CmdMetaMap["JSON.NUMMULTBY"] = jsonNumMultByCmdMeta - CmdMetaMap["JSON.SET"] = jsonSetCmdMeta - CmdMetaMap["JSON.GET"] = jsonGetCmdMeta - CmdMetaMap["JSON.TYPE"] = jsonTypeCmdMeta - CmdMetaMap["JSON.INGEST"] = jsonIngestCmdMeta - CmdMetaMap["JSON.STRAPPEND"] = jsonArrStrAppendCmdMeta - CmdMetaMap["HGETALL"] = hGetAllCmdMeta - CmdMetaMap["DUMP"] = dumpCmdMeta - CmdMetaMap["RESTORE"] = restoreCmdMeta - CmdMetaMap["GEOADD"] = geoaddCmdMeta - CmdMetaMap["GEODIST"] = geodistCmdMeta - CmdMetaMap["CLIENT"] = clientCmdMeta - CmdMetaMap["LATENCY"] = latencyCmdMeta - CmdMetaMap["FLUSHDB"] = flushDBCmdMeta - CmdMetaMap["OBJECT"] = objectCmdMeta - CmdMetaMap["COMMAND"] = commandCmdMeta - CmdMetaMap["COMMAND|COUNT"] = CmdCommandCountMeta - CmdMetaMap["COMMAND|HELP"] = CmdCommandHelp - CmdMetaMap["COMMAND|INFO"] = CmdCommandInfo - CmdMetaMap["COMMAND|LIST"] = CmdCommandList - CmdMetaMap["COMMAND|DOCS"] = CmdCommandDocs - CmdMetaMap["COMMAND|GETKEYS"] = CmdCommandGetKeys - CmdMetaMap["COMMAND|GETKEYSANDFLAGS"] = CmdCommandGetKeysFlags -} diff --git a/internal/server/utils/httpResp.go b/internal/server/httpws/httpResp.go similarity index 92% rename from internal/server/utils/httpResp.go rename to internal/server/httpws/httpResp.go index ad66ec391..10f77ee76 100644 --- a/internal/server/utils/httpResp.go +++ b/internal/server/httpws/httpResp.go @@ -1,4 +1,4 @@ -package utils +package httpws const ( HTTPStatusSuccess string = "success" diff --git a/internal/server/httpServer.go b/internal/server/httpws/httpServer.go similarity index 88% rename from internal/server/httpServer.go rename to internal/server/httpws/httpServer.go index d472dbc62..f04c42aec 100644 --- a/internal/server/httpServer.go +++ b/internal/server/httpws/httpServer.go @@ -1,4 +1,4 @@ -package server +package httpws import ( "bytes" @@ -12,6 +12,8 @@ import ( "sync" "time" + "github.com/dicedb/dice/internal/iothread" + "github.com/dicedb/dice/internal/eval" "github.com/dicedb/dice/internal/server/abstractserver" "github.com/dicedb/dice/internal/wal" @@ -22,7 +24,6 @@ import ( "github.com/dicedb/dice/internal/comm" derrors "github.com/dicedb/dice/internal/errors" "github.com/dicedb/dice/internal/ops" - "github.com/dicedb/dice/internal/server/utils" "github.com/dicedb/dice/internal/shard" ) @@ -133,16 +134,16 @@ func (s *HTTPServer) Run(ctx context.Context) error { func (s *HTTPServer) DiceHTTPHandler(writer http.ResponseWriter, request *http.Request) { // convert to REDIS cmd - diceDBCmd, err := utils.ParseHTTPRequest(request) + diceDBCmd, err := ParseHTTPRequest(request) if err != nil { - responseJSON, _ := json.Marshal(utils.HTTPResponse{Status: utils.HTTPStatusError, Data: "Invalid HTTP request format"}) - writer.Header().Set("Content-Type", "application/json") - writer.WriteHeader(http.StatusBadRequest) // Set HTTP status code to 500 - _, err = writer.Write(responseJSON) - if err != nil { - slog.Error("Error writing response", "error", err) - } - slog.Error("Error parsing HTTP request", slog.Any("error", err)) + writeErrorResponse(writer, http.StatusBadRequest, "Invalid HTTP request format", + "Error parsing HTTP request", slog.Any("error", err)) + return + } + + if iothread.CommandsMeta[diceDBCmd.Cmd].CmdType == iothread.MultiShard { + writeErrorResponse(writer, http.StatusBadRequest, "unsupported command", + "Unsupported command received", slog.String("cmd", diceDBCmd.Cmd)) return } @@ -154,14 +155,9 @@ func (s *HTTPServer) DiceHTTPHandler(writer http.ResponseWriter, request *http.R } if unimplementedCommands[diceDBCmd.Cmd] { - responseJSON, _ := json.Marshal(utils.HTTPResponse{Status: utils.HTTPStatusError, Data: fmt.Sprintf("Command %s is not implemented with HTTP", diceDBCmd.Cmd)}) - writer.Header().Set("Content-Type", "application/json") - writer.WriteHeader(http.StatusBadRequest) // Set HTTP status code to 500 - _, err = writer.Write(responseJSON) - if err != nil { - slog.Error("Error writing response", "error", err) - } - slog.Error("Command %s is not implemented", slog.String("cmd", diceDBCmd.Cmd)) + writeErrorResponse(writer, http.StatusBadRequest, + fmt.Sprintf("Command %s is not implemented with HTTP", diceDBCmd.Cmd), + "Command is not implemented", slog.String("cmd", diceDBCmd.Cmd)) return } @@ -181,7 +177,7 @@ func (s *HTTPServer) DiceHTTPHandler(writer http.ResponseWriter, request *http.R func (s *HTTPServer) DiceHTTPQwatchHandler(writer http.ResponseWriter, request *http.Request) { // convert to REDIS cmd - diceDBCmd, err := utils.ParseHTTPRequest(request) + diceDBCmd, err := ParseHTTPRequest(request) if err != nil { http.Error(writer, "Error parsing HTTP request", http.StatusBadRequest) slog.Error("Error parsing HTTP request", slog.Any("error", err)) @@ -336,19 +332,19 @@ func (s *HTTPServer) writeResponse(writer http.ResponseWriter, result *ops.Store var ( responseValue interface{} err error - httpResponse utils.HTTPResponse + httpResponse HTTPResponse isDiceErr bool ) // Check if the command is migrated, if it is we use EvalResponse values // else we use RESPParser to decode the response - _, ok := CmdMetaMap[diceDBCmd.Cmd] + _, ok := iothread.CommandsMeta[diceDBCmd.Cmd] // TODO: Remove this conditional check and if (true) condition when all commands are migrated - if !ok { + if !ok || iothread.CommandsMeta[diceDBCmd.Cmd].CmdType == iothread.Custom { responseValue, err = DecodeEvalResponse(result.EvalResponse) if err != nil { slog.Error("Error decoding response", "error", err) - httpResponse = utils.HTTPResponse{Status: utils.HTTPStatusError, Data: "Internal Server Error"} + httpResponse = HTTPResponse{Status: HTTPStatusError, Data: "Internal Server Error"} writeJSONResponse(writer, httpResponse, http.StatusInternalServerError) return } @@ -362,11 +358,11 @@ func (s *HTTPServer) writeResponse(writer http.ResponseWriter, result *ops.Store } // Create the HTTP response - httpResponse = utils.HTTPResponse{Data: ResponseParser(responseValue)} + httpResponse = HTTPResponse{Data: ResponseParser(responseValue)} if isDiceErr { - httpResponse.Status = utils.HTTPStatusError + httpResponse.Status = HTTPStatusError } else { - httpResponse.Status = utils.HTTPStatusSuccess + httpResponse.Status = HTTPStatusSuccess } // Write the response back to the client @@ -374,7 +370,7 @@ func (s *HTTPServer) writeResponse(writer http.ResponseWriter, result *ops.Store } // Helper function to write the JSON response -func writeJSONResponse(writer http.ResponseWriter, response utils.HTTPResponse, statusCode int) { +func writeJSONResponse(writer http.ResponseWriter, response HTTPResponse, statusCode int) { writer.Header().Set("Content-Type", "application/json") writer.WriteHeader(statusCode) @@ -391,6 +387,18 @@ func writeJSONResponse(writer http.ResponseWriter, response utils.HTTPResponse, } } +func writeErrorResponse(writer http.ResponseWriter, status int, message, logMessage string, logFields ...any) { + responseJSON, _ := json.Marshal(HTTPResponse{Status: HTTPStatusError, Data: message}) + writer.Header().Set("Content-Type", "application/json") + writer.WriteHeader(status) + if _, err := writer.Write(responseJSON); err != nil { + slog.Error("HTTP-WS Error writing response", "error", err) + } + if logMessage != "" { + slog.Error(logMessage, logFields...) + } +} + // ResponseParser parses the response value for both migrated and non-migrated cmds and // returns response to be rendered for HTTP/WS response func ResponseParser(responseValue interface{}) interface{} { diff --git a/internal/server/utils/redisCmdAdapter.go b/internal/server/httpws/redisCmdAdapter.go similarity index 97% rename from internal/server/utils/redisCmdAdapter.go rename to internal/server/httpws/redisCmdAdapter.go index 06e093b27..577a73e68 100644 --- a/internal/server/utils/redisCmdAdapter.go +++ b/internal/server/httpws/redisCmdAdapter.go @@ -1,17 +1,18 @@ -package utils +package httpws import ( "encoding/base64" "encoding/json" "errors" "fmt" - "io" "net/http" "regexp" "strconv" "strings" + "github.com/dicedb/dice/internal/server/utils" + "github.com/dicedb/dice/internal/cmd" diceerrors "github.com/dicedb/dice/internal/errors" ) @@ -64,7 +65,7 @@ func ParseHTTPRequest(r *http.Request) (*cmd.DiceDBCmd, error) { queryParams := r.URL.Query() keyPrefix := queryParams.Get(KeyPrefix) - if keyPrefix != "" && command == JSONIngest { + if keyPrefix != "" && command == utils.JSONIngest { args = append(args, keyPrefix) } // Step 1: Handle JSON body if present @@ -173,7 +174,7 @@ func ParseWebsocketMessage(msg []byte) (*cmd.DiceDBCmd, error) { // if key prefix is empty for JSON.INGEST command // add "" to cmdArr - if command == JSONIngest && len(cmdArr) == 2 { + if command == utils.JSONIngest && len(cmdArr) == 2 { cmdArr = append([]string{""}, cmdArr...) } diff --git a/internal/server/utils/redisCmdAdapter_test.go b/internal/server/httpws/redisCmdAdapter_test.go similarity index 99% rename from internal/server/utils/redisCmdAdapter_test.go rename to internal/server/httpws/redisCmdAdapter_test.go index c50c7cf23..091b8bbc6 100644 --- a/internal/server/utils/redisCmdAdapter_test.go +++ b/internal/server/httpws/redisCmdAdapter_test.go @@ -1,4 +1,4 @@ -package utils +package httpws import ( "net/http/httptest" diff --git a/internal/server/websocketServer.go b/internal/server/httpws/websocketServer.go similarity index 96% rename from internal/server/websocketServer.go rename to internal/server/httpws/websocketServer.go index 93148614e..bc090ff2b 100644 --- a/internal/server/websocketServer.go +++ b/internal/server/httpws/websocketServer.go @@ -1,4 +1,4 @@ -package server +package httpws import ( "bytes" @@ -14,6 +14,8 @@ import ( "syscall" "time" + "github.com/dicedb/dice/internal/iothread" + "github.com/dicedb/dice/internal/server/abstractserver" "github.com/dicedb/dice/internal/wal" @@ -23,7 +25,6 @@ import ( "github.com/dicedb/dice/internal/comm" diceerrors "github.com/dicedb/dice/internal/errors" "github.com/dicedb/dice/internal/ops" - "github.com/dicedb/dice/internal/server/utils" "github.com/dicedb/dice/internal/shard" "github.com/gorilla/websocket" "golang.org/x/exp/rand" @@ -143,7 +144,7 @@ func (s *WebsocketServer) WebsocketHandler(w http.ResponseWriter, r *http.Reques } // parse message to dice command - diceDBCmd, err := utils.ParseWebsocketMessage(msg) + diceDBCmd, err := ParseWebsocketMessage(msg) if errors.Is(err, diceerrors.ErrEmptyCommand) { continue } else if err != nil { @@ -153,6 +154,13 @@ func (s *WebsocketServer) WebsocketHandler(w http.ResponseWriter, r *http.Reques continue } + if iothread.CommandsMeta[diceDBCmd.Cmd].CmdType == iothread.MultiShard { + if err := WriteResponseWithRetries(conn, []byte("error: unsupported command"), maxRetries); err != nil { + slog.Debug(fmt.Sprintf("Error writing message: %v", err)) + } + continue + } + // TODO - on abort, close client connection instead of closing server? if diceDBCmd.Cmd == Abort { close(s.shutdownChan) @@ -271,7 +279,7 @@ func (s *WebsocketServer) processResponse(conn *websocket.Conn, diceDBCmd *cmd.D var responseValue interface{} // Check if the command is migrated, if it is we use EvalResponse values // else we use RESPParser to decode the response - _, ok := CmdMetaMap[diceDBCmd.Cmd] + _, ok := iothread.CommandsMeta[diceDBCmd.Cmd] // TODO: Remove this conditional check and if (true) condition when all commands are migrated if !ok { responseValue, err = DecodeEvalResponse(response.EvalResponse) diff --git a/main.go b/main.go index 09744ae52..0eea883a5 100644 --- a/main.go +++ b/main.go @@ -15,6 +15,8 @@ import ( "syscall" "time" + "github.com/dicedb/dice/internal/server/httpws" + "github.com/dicedb/dice/internal/cli" "github.com/dicedb/dice/internal/logger" "github.com/dicedb/dice/internal/server/abstractserver" @@ -25,7 +27,6 @@ import ( diceerrors "github.com/dicedb/dice/internal/errors" "github.com/dicedb/dice/internal/iothread" "github.com/dicedb/dice/internal/observability" - "github.com/dicedb/dice/internal/server" "github.com/dicedb/dice/internal/server/resp" "github.com/dicedb/dice/internal/shard" dstore "github.com/dicedb/dice/internal/store" @@ -140,13 +141,13 @@ func main() { go runServer(ctx, &serverWg, respServer, serverErrCh) if config.DiceConfig.HTTP.Enabled { - httpServer := server.NewHTTPServer(shardManager, wl) + httpServer := httpws.NewHTTPServer(shardManager, wl) serverWg.Add(1) go runServer(ctx, &serverWg, httpServer, serverErrCh) } if config.DiceConfig.WebSocket.Enabled { - websocketServer := server.NewWebSocketServer(shardManager, config.DiceConfig.WebSocket.Port, wl) + websocketServer := httpws.NewWebSocketServer(shardManager, config.DiceConfig.WebSocket.Port, wl) serverWg.Add(1) go runServer(ctx, &serverWg, websocketServer, serverErrCh) } From df110f61f6fbbee4e8ec8fe82c2b786ff7f49462 Mon Sep 17 00:00:00 2001 From: Ankit Dash Date: Wed, 11 Dec 2024 00:33:14 +0530 Subject: [PATCH 15/17] Add support for command GEOPOS (#1328) Co-authored-by: pshubham --- docs/src/content/docs/commands/GEOPOS.md | 58 ++++++++++++ integration_tests/commands/http/geo_test.go | 56 ++++++++++++ integration_tests/commands/resp/geo_test.go | 60 ++++++++++++ .../commands/websocket/geo_test.go | 60 ++++++++++++ internal/eval/commands.go | 9 ++ internal/eval/eval_test.go | 91 +++++++++++++++++++ internal/eval/store_eval.go | 52 +++++++++++ internal/iothread/cmd_meta.go | 4 + 8 files changed, 390 insertions(+) create mode 100644 docs/src/content/docs/commands/GEOPOS.md diff --git a/docs/src/content/docs/commands/GEOPOS.md b/docs/src/content/docs/commands/GEOPOS.md new file mode 100644 index 000000000..2d2d16ae7 --- /dev/null +++ b/docs/src/content/docs/commands/GEOPOS.md @@ -0,0 +1,58 @@ +--- +title: GEOPOS +description: The `GEOPOS` command in DiceDB is used to return the longitude, latitude to a specified key, as stored in the sorted set. +--- + +The `GEOPOS` command in DiceDB is used to return the longitude, latitude to a specified key which is stored in a sorted set. When elements are added via `GEOADD` then they are stored in 52 bit geohash hence the values returned by `GEOPOS` might have small margins of error. + +## Syntax + +```bash +GEOPOS key [member [member ...]] +``` +## Parameters +| Parameter | Description | Type | Required | +| --------- | --------------------------------------------------------------------------------- | ------ | -------- | +| key | The name of the sorted set key whose member's coordinates are to be returned | string | Yes | +| member | A unique identifier for the location. | string | Yes | +## Return Values +| Condition | Return Value | +| ------------------------------------------------------------ | ----------------------------------------------------------- | +| Coordinates exist for the specified member(s) | Returns an ordered list of coordinates (longitude, latitude) for each specified member | +| Coordinates do not exist for the specified member(s) | Returns `(nil)` for each member without coordinates +| Incorrect Argument Count |`ERR wrong number of arguments for 'geopos' command` | +| Key does not exist in the sorted set |`Error: nil` | +## Behaviour +When the GEOPOS command is issued, DiceDB performs the following steps: +1. It checks if argument count is valid or not. If not an error is thrown. +2. It checks the validity of the key. +3. If the key is invalid then an error is returned. +4. Else it checks the members provided after the key. +5. For each member it checks the coordinates of the member. +6. If the coordinates exist then it is returned in an ordered list of latitude, longitude for the particular member. +7. If the coordinates do not exist then a ``(nil)`` is returned for that member. + +## Errors +1. `Wrong number of arguments for 'GEOPOS' command` + - Error Message: (error) ERR wrong number of arguments for 'geoadd' command. + - Occurs when the command is executed with an incorrect number of arguments. +2. `Wrong key for 'GEOPOS' command` + - Error Message: Error: nil + - Occurs when the command is executed with a key that does not exist in the sorted set. + +## Example Usage + +Here are a few examples demonstrating the usage of the GEOPOS command: +### Example: Fetching the latitude, longitude of an existing member of the set +```bash +127.0.0.1:7379> GEOADD Sicily 13.361389 38.115556 "Palermo" 15.087269 37.502669 "Catania" +2 +127.0.0.1:7379> GEOPOS Sicily "Palermo" +1) 1) 13.361387 +2) 38.115556 +``` +### Example: Fetching the latitude, longitude of a member not in the set +```bash +127.0.0.1:7379> GEOPOS Sicily "Agrigento" +1) (nil) +``` \ No newline at end of file diff --git a/integration_tests/commands/http/geo_test.go b/integration_tests/commands/http/geo_test.go index a8e5c6e93..2943c3fa2 100644 --- a/integration_tests/commands/http/geo_test.go +++ b/integration_tests/commands/http/geo_test.go @@ -84,3 +84,59 @@ func TestGeoDist(t *testing.T) { }) } } + +func TestGeoPos(t *testing.T) { + exec := NewHTTPCommandExecutor() + + testCases := []struct { + name string + commands []HTTPCommand + expected []interface{} + }{ + { + name: "GEOPOS for existing points", + commands: []HTTPCommand{ + {Command: "GEOADD", Body: map[string]interface{}{"key": "index", "values": []interface{}{"13.361389", "38.115556", "Palermo"}}}, + {Command: "GEOPOS", Body: map[string]interface{}{"key": "index", "values": []interface{}{"Palermo"}}}, + }, + expected: []interface{}{ + float64(1), + []interface{}{[]interface{}{float64(13.361387), float64(38.115556)}}, + }, + }, + { + name: "GEOPOS for non-existing points", + commands: []HTTPCommand{ + {Command: "GEOPOS", Body: map[string]interface{}{"key": "index", "values": []interface{}{"NonExisting"}}}, + }, + expected: []interface{}{[]interface{}{nil}}, + }, + { + name: "GEOPOS for non-existing index", + commands: []HTTPCommand{ + {Command: "GEOPOS", Body: map[string]interface{}{"key": "NonExisting", "values": []interface{}{"Palermo"}}}, + }, + expected: []interface{}{nil}, + }, + { + name: "GEOPOS for a key not used for setting geospatial values", + commands: []HTTPCommand{ + {Command: "SET", Body: map[string]interface{}{"key": "k", "value": "v"}}, + {Command: "GEOPOS", Body: map[string]interface{}{"key": "k", "values": []interface{}{"v"}}}, + }, + expected: []interface{}{ + "OK", + "WRONGTYPE Operation against a key holding the wrong kind of value", + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + for i, cmd := range tc.commands { + result, _ := exec.FireCommand(cmd) + assert.Equal(t, tc.expected[i], result, "Value mismatch for cmd %v", cmd) + } + }) + } +} diff --git a/integration_tests/commands/resp/geo_test.go b/integration_tests/commands/resp/geo_test.go index 57bd2656a..b6eefc927 100644 --- a/integration_tests/commands/resp/geo_test.go +++ b/integration_tests/commands/resp/geo_test.go @@ -84,3 +84,63 @@ func TestGeoDist(t *testing.T) { }) } } + +func TestGeoPos(t *testing.T) { + conn := getLocalConnection() + defer conn.Close() + + testCases := []struct { + name string + cmds []string + expect []interface{} + delays []time.Duration + }{ + { + name: "GEOPOS b/w existing points", + cmds: []string{ + "GEOADD index 13.361389 38.115556 Palermo", + "GEOPOS index Palermo", + }, + expect: []interface{}{ + int64(1), + []interface{}{[]interface{}{"13.361387", "38.115556"}}, + }, + }, + { + name: "GEOPOS for non existing points", + cmds: []string{ + "GEOPOS index NonExisting", + }, + expect: []interface{}{ + []interface{}{"(nil)"}, + }, + }, + { + name: "GEOPOS for non existing index", + cmds: []string{ + "GEOPOS NonExisting Palermo", + }, + expect: []interface{}{"(nil)"}, + }, + { + name: "GEOPOS for a key not used for setting geospatial values", + cmds: []string{ + "SET k v", + "GEOPOS k v", + }, + expect: []interface{}{ + "OK", + "WRONGTYPE Operation against a key holding the wrong kind of value", + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + for i, cmd := range tc.cmds { + result := FireCommand(conn, cmd) + assert.Equal(t, tc.expect[i], result, "Value mismatch for cmd %s", cmd) + } + }) + } +} diff --git a/integration_tests/commands/websocket/geo_test.go b/integration_tests/commands/websocket/geo_test.go index e2c5d240b..e7402f7de 100644 --- a/integration_tests/commands/websocket/geo_test.go +++ b/integration_tests/commands/websocket/geo_test.go @@ -85,3 +85,63 @@ func TestGeoDist(t *testing.T) { }) } } + +func TestGeoPos(t *testing.T) { + exec := NewWebsocketCommandExecutor() + conn := exec.ConnectToServer() + defer conn.Close() + + testCases := []struct { + name string + cmds []string + expect []interface{} + }{ + { + name: "GEOPOS b/w existing points", + cmds: []string{ + "GEOADD index 13.361389 38.115556 Palermo", + "GEOPOS index Palermo", + }, + expect: []interface{}{ + float64(1), + []interface{}{[]interface{}{float64(13.361387), float64(38.115556)}}, + }, + }, + { + name: "GEOPOS for non existing points", + cmds: []string{ + "GEOPOS index NonExisting", + }, + expect: []interface{}{[]interface{}{nil}}, + }, + { + name: "GEOPOS for non existing index", + cmds: []string{ + "GEOPOS NonExisting Palermo", + }, + expect: []interface{}{nil}, + }, + { + name: "GEOPOS for a key not used for setting geospatial values", + cmds: []string{ + "SET k v", + "GEOPOS k v", + }, + expect: []interface{}{ + "OK", + "WRONGTYPE Operation against a key holding the wrong kind of value", + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + for i, cmd := range tc.cmds { + result, err := exec.FireCommandAndReadResponse(conn, cmd) + assert.Nil(t, err) + assert.Equal(t, tc.expect[i], result, "Value mismatch for cmd %s", cmd) + } + }) + } +} + diff --git a/internal/eval/commands.go b/internal/eval/commands.go index 8b2d94bb7..358279ade 100644 --- a/internal/eval/commands.go +++ b/internal/eval/commands.go @@ -1213,6 +1213,14 @@ var ( NewEval: evalGEODIST, KeySpecs: KeySpecs{BeginIndex: 1}, } + geoPosCmdMeta = DiceCmdMeta{ + Name: "GEOPOS", + Info: `Returns the latitude and longitude of the members identified by the particular index.`, + Arity: -3, + NewEval: evalGEOPOS, + IsMigrated: true, + KeySpecs: KeySpecs{BeginIndex: 1}, + } jsonstrappendCmdMeta = DiceCmdMeta{ Name: "JSON.STRAPPEND", Info: `JSON.STRAPPEND key [path] value @@ -1354,6 +1362,7 @@ func init() { DiceCmds["FLUSHDB"] = flushdbCmdMeta DiceCmds["GEOADD"] = geoAddCmdMeta DiceCmds["GEODIST"] = geoDistCmdMeta + DiceCmds["GEOPOS"] = geoPosCmdMeta DiceCmds["GET"] = getCmdMeta DiceCmds["GETBIT"] = getBitCmdMeta DiceCmds["GETDEL"] = getDelCmdMeta diff --git a/internal/eval/eval_test.go b/internal/eval/eval_test.go index 41b0088df..284f8431e 100644 --- a/internal/eval/eval_test.go +++ b/internal/eval/eval_test.go @@ -130,6 +130,7 @@ func TestEval(t *testing.T) { testEvalBitFieldRO(t, store) testEvalGEOADD(t, store) testEvalGEODIST(t, store) + testEvalGEOPOS(t, store) testEvalJSONSTRAPPEND(t, store) testEvalINCR(t, store) testEvalINCRBY(t, store) @@ -8247,6 +8248,96 @@ func testEvalGEODIST(t *testing.T, store *dstore.Store) { runMigratedEvalTests(t, tests, evalGEODIST, store) } +func testEvalGEOPOS(t *testing.T, store *dstore.Store) { + tests := map[string]evalTestCase{ + "GEOPOS for existing single point": { + setup: func() { + evalGEOADD([]string{"index", "13.361387", "38.115556", "Palermo"}, store) + }, + input: []string{"index", "Palermo"}, + migratedOutput: EvalResponse{ + Result: []interface{}{[]interface{}{float64(13.361387), float64(38.115556)}}, + Error: nil, + }, + }, + "GEOPOS for multiple existing points": { + setup: func() { + evalGEOADD([]string{"points", "13.361387", "38.115556", "Palermo"}, store) + evalGEOADD([]string{"points", "15.087265", "37.502668", "Catania"}, store) + }, + input: []string{"points", "Palermo", "Catania"}, + migratedOutput: EvalResponse{ + Result: []interface{}{ + []interface{}{float64(13.361387), float64(38.115556)}, + []interface{}{float64(15.087265), float64(37.502668)}, + }, + Error: nil, + }, + }, + "GEOPOS for a point that does not exist": { + setup: func() { + evalGEOADD([]string{"index", "13.361387", "38.115556", "Palermo"}, store) + }, + input: []string{"index", "NonExisting"}, + migratedOutput: EvalResponse{ + Result: []interface{}{nil}, + Error: nil, + }, + }, + "GEOPOS for multiple points, one existing and one non-existing": { + setup: func() { + evalGEOADD([]string{"index", "13.361387", "38.115556", "Palermo"}, store) + }, + input: []string{"index", "Palermo", "NonExisting"}, + migratedOutput: EvalResponse{ + Result: []interface{}{ + []interface{}{float64(13.361387), float64(38.115556)}, + nil, + }, + Error: nil, + }, + }, + "GEOPOS for empty index": { + setup: func() { + evalGEOADD([]string{"", "13.361387", "38.115556", "Palermo"}, store) + }, + input: []string{"", "Palermo"}, + migratedOutput: EvalResponse{ + Result: []interface{}{ + []interface{}{float64(13.361387), float64(38.115556)}, + }, + Error: nil, + }, + }, + "GEOPOS with no members in key": { + input: []string{"index", "Palermo"}, + migratedOutput: EvalResponse{ + Result: clientio.NIL, + Error: nil, + }, + }, + "GEOPOS with invalid number of arguments": { + input: []string{"index"}, + migratedOutput: EvalResponse{ + Result: nil, + Error: diceerrors.ErrWrongArgumentCount("GEOPOS"), + }, + }, + "GEOPOS for a key not used for setting geospatial values": { + setup: func() { + evalSET([]string{"k", "v"}, store) + }, + input: []string{"k", "v"}, + migratedOutput: EvalResponse{ + Result: nil, + Error: errors.New("WRONGTYPE Operation against a key holding the wrong kind of value"), + }, + }, + } + + runMigratedEvalTests(t, tests, evalGEOPOS, store) +} + func testEvalJSONSTRAPPEND(t *testing.T, store *dstore.Store) { tests := map[string]evalTestCase{ "append to single field": { diff --git a/internal/eval/store_eval.go b/internal/eval/store_eval.go index 015d20b00..8583cedad 100644 --- a/internal/eval/store_eval.go +++ b/internal/eval/store_eval.go @@ -6222,6 +6222,58 @@ func evalGEODIST(args []string, store *dstore.Store) *EvalResponse { } } +func evalGEOPOS(args []string, store *dstore.Store) *EvalResponse { + if len(args) < 2 { + return &EvalResponse{ + Result: nil, + Error: diceerrors.ErrWrongArgumentCount("GEOPOS"), + } + } + + key := args[0] + obj := store.Get(key) + + if obj == nil { + return &EvalResponse{ + Result: clientio.NIL, + Error: nil, + } + } + + ss, err := sortedset.FromObject(obj) + + if err != nil { + return &EvalResponse{ + Result: nil, + Error: diceerrors.ErrWrongTypeOperation, + } + } + + results := make([]interface{}, len(args)-1) + + for index := 1; index < len(args); index++ { + member := args[index] + hash, ok := ss.Get(member) + + if !ok { + results[index-1] = (nil) + continue + } + + lat, lon := geo.DecodeHash(hash) + + latFloat, _ := strconv.ParseFloat(fmt.Sprintf("%f", lat), 64) + lonFloat, _ := strconv.ParseFloat(fmt.Sprintf("%f", lon), 64) + + results[index-1] = []interface{}{lonFloat, latFloat} + } + + return &EvalResponse{ + Result: results, + Error: nil, + } +} + func evalTouch(args []string, store *dstore.Store) *EvalResponse { if len(args) != 1 { return makeEvalError(diceerrors.ErrWrongArgumentCount("TOUCH")) diff --git a/internal/iothread/cmd_meta.go b/internal/iothread/cmd_meta.go index ce7c35b7b..c740bae25 100644 --- a/internal/iothread/cmd_meta.go +++ b/internal/iothread/cmd_meta.go @@ -112,6 +112,7 @@ const ( CmdRestore = "RESTORE" CmdGeoAdd = "GEOADD" CmdGeoDist = "GEODIST" + CmdGeoPos = "GEOPOS" CmdClient = "CLIENT" CmdLatency = "LATENCY" CmdDel = "DEL" @@ -517,6 +518,9 @@ var CommandsMeta = map[string]CmdMeta{ CmdGeoDist: { CmdType: SingleShard, }, + CmdGeoPos: { + CmdType: SingleShard, + }, CmdClient: { CmdType: SingleShard, }, From fe75261eceea16af13f656b054d655d2a67871ce Mon Sep 17 00:00:00 2001 From: Arpit Bhayani Date: Wed, 11 Dec 2024 11:30:44 +0530 Subject: [PATCH 16/17] Make target for pushging binary on a remote machine --- Makefile | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Makefile b/Makefile index 9e44d0bad..88b6b78ea 100644 --- a/Makefile +++ b/Makefile @@ -105,3 +105,7 @@ release: ## build and push the Docker image to Docker Hub with the latest tag an docker build --tag dicedb/dicedb:latest --tag dicedb/dicedb:$(VERSION) . docker push dicedb/dicedb:$(VERSION) docker push dicedb/dicedb:latest + +push-binary-remote: + $(MAKE) build + scp -i ${SSH_PEM_PATH} ./dicedb ubuntu@${REMOTE_HOST}:. From f2ab39475551ec142df05627ea5b1976895e7df7 Mon Sep 17 00:00:00 2001 From: Arpit Bhayani Date: Fri, 13 Dec 2024 15:52:59 +0530 Subject: [PATCH 17/17] Community and Team (#1370) --- LICENSE.md => LICENSE | 0 docs/astro.config.mjs | 12 +- docs/public/funding.json | 198 +++++++--------- docs/src/components/Hero.astro | 4 +- docs/src/components/Nav.astro | 13 + docs/src/components/UserSocialHandles.astro | 63 +++++ docs/src/content/config.ts | 20 ++ docs/src/content/docs/commands/DECR.md | 2 +- docs/src/content/docs/commands/EXPIRE.md | 2 +- docs/src/content/docs/commands/EXPIREAT.md | 2 +- docs/src/content/docs/commands/EXPIRETIME.md | 2 +- docs/src/content/docs/commands/GEOPOS.md | 38 ++- docs/src/content/docs/commands/HINCRBY.md | 2 +- .../src/content/docs/commands/HINCRBYFLOAT.md | 2 +- docs/src/content/docs/commands/HRANDFIELD.md | 2 +- docs/src/content/docs/commands/INCR.md | 2 +- docs/src/content/docs/commands/JSON.OBJLEN.md | 18 +- docs/src/content/docs/commands/LINSERT.md | 2 +- docs/src/content/docs/commands/LLEN.md | 2 +- docs/src/content/docs/commands/LPOP.md | 2 +- docs/src/content/docs/commands/LPUSH.md | 2 +- docs/src/content/docs/commands/LRANGE.md | 2 +- docs/src/content/docs/commands/PTTL.md | 2 +- docs/src/content/docs/commands/RPOP.md | 2 +- docs/src/content/docs/commands/RPUSH.md | 2 +- docs/src/content/docs/commands/SETEX.md | 20 +- docs/src/content/docs/commands/SMEMBERS.md | 2 +- docs/src/content/docs/commands/TTL.md | 2 +- docs/src/content/docs/commands/ZRANK.md | 2 +- .../content/docs/get-started/hello-world.mdx | 2 +- .../docs/get-started/realtime-leaderboard.mdx | 2 +- .../content/docs/tutorials/url-shortener.md | 222 ++++++++++-------- docs/src/content/team/apoorv.json | 10 + docs/src/content/team/arpit.json | 10 + docs/src/content/team/ashwin.json | 10 + docs/src/content/team/jyotinder.json | 10 + docs/src/content/team/prashant.json | 10 + docs/src/content/team/prateek.json | 10 + docs/src/content/team/soumya.json | 10 + docs/src/content/updates/2024-07-18.md | 14 ++ docs/src/content/updates/2024-08-01.md | 25 ++ docs/src/content/updates/2024-08-08.md | 47 ++++ docs/src/content/updates/2024-08-15.md | 44 ++++ docs/src/content/updates/2024-08-19.md | 29 +++ docs/src/content/updates/2024-08-22.md | 54 +++++ docs/src/content/updates/2024-08-29.md | 40 ++++ docs/src/content/updates/2024-09-05.md | 43 ++++ docs/src/content/updates/2024-09-12.md | 52 ++++ docs/src/content/updates/2024-09-19.md | 64 +++++ docs/src/content/updates/2024-09-26.md | 54 +++++ docs/src/content/updates/2024-10-03.md | 84 +++++++ docs/src/content/updates/2024-10-10.md | 80 +++++++ docs/src/content/updates/2024-10-17.md | 65 +++++ docs/src/content/updates/2024-10-24.md | 59 +++++ docs/src/content/updates/2024-11-07.md | 35 +++ docs/src/content/updates/2024-11-14.md | 29 +++ docs/src/content/updates/2024-11-21.md | 44 ++++ docs/src/content/updates/2024-11-28.md | 48 ++++ docs/src/content/updates/2024-12-05.md | 21 ++ docs/src/layouts/BlogLayout.astro | 77 +++--- docs/src/layouts/Head.astro | 1 - docs/src/pages/community/index.astro | 69 ++++++ docs/src/pages/team.astro | 122 ++++++++++ docs/src/pages/updates/[slug].astro | 20 ++ 64 files changed, 1645 insertions(+), 295 deletions(-) rename LICENSE.md => LICENSE (100%) create mode 100644 docs/src/components/UserSocialHandles.astro create mode 100644 docs/src/content/team/apoorv.json create mode 100644 docs/src/content/team/arpit.json create mode 100644 docs/src/content/team/ashwin.json create mode 100644 docs/src/content/team/jyotinder.json create mode 100644 docs/src/content/team/prashant.json create mode 100644 docs/src/content/team/prateek.json create mode 100644 docs/src/content/team/soumya.json create mode 100644 docs/src/content/updates/2024-07-18.md create mode 100644 docs/src/content/updates/2024-08-01.md create mode 100644 docs/src/content/updates/2024-08-08.md create mode 100644 docs/src/content/updates/2024-08-15.md create mode 100644 docs/src/content/updates/2024-08-19.md create mode 100644 docs/src/content/updates/2024-08-22.md create mode 100644 docs/src/content/updates/2024-08-29.md create mode 100644 docs/src/content/updates/2024-09-05.md create mode 100644 docs/src/content/updates/2024-09-12.md create mode 100644 docs/src/content/updates/2024-09-19.md create mode 100644 docs/src/content/updates/2024-09-26.md create mode 100644 docs/src/content/updates/2024-10-03.md create mode 100644 docs/src/content/updates/2024-10-10.md create mode 100644 docs/src/content/updates/2024-10-17.md create mode 100644 docs/src/content/updates/2024-10-24.md create mode 100644 docs/src/content/updates/2024-11-07.md create mode 100644 docs/src/content/updates/2024-11-14.md create mode 100644 docs/src/content/updates/2024-11-21.md create mode 100644 docs/src/content/updates/2024-11-28.md create mode 100644 docs/src/content/updates/2024-12-05.md create mode 100644 docs/src/pages/community/index.astro create mode 100644 docs/src/pages/team.astro create mode 100644 docs/src/pages/updates/[slug].astro diff --git a/LICENSE.md b/LICENSE similarity index 100% rename from LICENSE.md rename to LICENSE diff --git a/docs/astro.config.mjs b/docs/astro.config.mjs index d3b263be3..90c9a2338 100644 --- a/docs/astro.config.mjs +++ b/docs/astro.config.mjs @@ -15,17 +15,17 @@ export default defineConfig({ // useStarlightDarkModeSwitch: false, favicon: "/favicon.png", editLink: { - baseUrl: 'https://github.com/DiceDB/dice/edit/master/docs/', + baseUrl: "https://github.com/DiceDB/dice/edit/master/docs/", }, lastUpdated: true, expressiveCode: { textMarkers: true, - themes: ['ayu-dark','light-plus'], + themes: ["ayu-dark", "light-plus"], defaultProps: { wrap: true, }, - styleOverrides: { - borderRadius: '0.2rem' + styleOverrides: { + borderRadius: "0.2rem", }, }, sidebar: [ @@ -42,8 +42,8 @@ export default defineConfig({ autogenerate: { directory: "protocols" }, }, { - label: 'Tutorials', - autogenerate: { directory: 'tutorials' } + label: "Tutorials", + autogenerate: { directory: "tutorials" }, }, { label: "Commands", diff --git a/docs/public/funding.json b/docs/public/funding.json index e26b90e43..703ed0d2f 100644 --- a/docs/public/funding.json +++ b/docs/public/funding.json @@ -1,117 +1,101 @@ { "version": "v1.0.0", "entity": { - "type": "organisation", - "role": "owner", + "type": "organisation", + "role": "owner", + "name": "DiceDB", + "email": "arpit@dicedb.io", + "phone": "", + "description": "DiceDB is a redis-compliant, reactive, scalable, highly available, unified cache optimized for modern hardware.", + "webpageUrl": { + "url": "https://dicedb.io" + } + }, + "projects": [ + { + "guid": "dicedb", "name": "DiceDB", - "email": "arpit@dicedb.io", - "phone": "", "description": "DiceDB is a redis-compliant, reactive, scalable, highly available, unified cache optimized for modern hardware.", "webpageUrl": { - "url": "https://dicedb.io" + "url": "https://dicedb.io" + }, + "repositoryUrl": { + "url": "https://github.com/dicedb/dice" + }, + "licenses": ["BSL"], + "tags": ["database", "high-performance", "key-value-store"] + } + ], + "funding": { + "channels": [ + { + "guid": "other", + "type": "other", + "address": "", + "description": "Yet to setup" } - }, - "projects": [ + ], + "plans": [ + { + "guid": "perf-test", + "status": "active", + "name": "Load and performance testing", + "description": "This will cover the monthly server hosting costs to run load tests, performance tests, and CI for core database engine.", + "amount": 5000, + "currency": "USD", + "frequency": "monthly", + "channels": ["other"] + }, { - "guid": "dicedb", - "name": "DiceDB", - "description": "DiceDB is a redis-compliant, reactive, scalable, highly available, unified cache optimized for modern hardware.", - "webpageUrl": { - "url": "https://dicedb.io" - }, - "repositoryUrl": { - "url": "https://github.com/dicedb/dice" - }, - "licenses": [ - "BSL" - ], - "tags": [ - "database", - "high-performance", - "key-value-store" - ] + "guid": "developer-time", + "status": "active", + "name": "Developer compensation", + "description": "This will cover the cost of seven senior engineers working part-time on building DiceDB.", + "amount": 70000, + "currency": "USD", + "frequency": "monthly", + "channels": ["other"] + }, + { + "guid": "commuunity-contributors-and-events", + "status": "active", + "name": "Community swags, events and hackathons", + "description": "This will cover the cost of 160+ OSS contributors by giving them DiceDB swags and keeping their morale high. Some fraction of the funds will also be used to run hackathons and some community events like Database Paper Reading Sessions. We aim to build a thriving community around DiceDB and databases in India.", + "amount": 20000, + "currency": "USD", + "frequency": "monthly", + "channels": ["other"] + }, + { + "guid": "hosting-playground", + "status": "active", + "name": "Hosting Playground", + "description": "This will cover the monthly server hosting costs to run DiceDB and host Playground making it easier for people to try out DiceDB.", + "amount": 4000, + "currency": "USD", + "frequency": "monthly", + "channels": ["other"] + }, + { + "guid": "angel-plan", + "status": "active", + "name": "Goodwill plan", + "description": "Pay anything you wish to show your goodwill for the project.", + "amount": 1000, + "currency": "USD", + "frequency": "one-time", + "channels": ["other"] } - ], - "funding": { - "channels": [ - { - "guid": "other", - "type": "other", - "address": "", - "description": "Yet to setup" - } - ], - "plans": [ - { - "guid": "perf-test", - "status": "active", - "name": "Load and performance testing", - "description": "This will cover the monthly server hosting costs to run load tests, performance tests, and CI for core database engine.", - "amount": 5000, - "currency": "USD", - "frequency": "monthly", - "channels": [ - "other" - ] - }, - { - "guid": "developer-time", - "status": "active", - "name": "Developer compensation", - "description": "This will cover the cost of seven senior engineers working part-time on building DiceDB.", - "amount": 70000, - "currency": "USD", - "frequency": "monthly", - "channels": [ - "other" - ] - }, - { - "guid": "commuunity-contributors-and-events", - "status": "active", - "name": "Community swags, events and hackathons", - "description": "This will cover the cost of 160+ OSS contributors by giving them DiceDB swags and keeping their morale high. Some fraction of the funds will also be used to run hackathons and some community events like Database Paper Reading Sessions. We aim to build a thriving community around DiceDB and databases in India.", - "amount": 20000, - "currency": "USD", - "frequency": "monthly", - "channels": [ - "other" - ] - }, - { - "guid": "hosting-playground", - "status": "active", - "name": "Hosting Playground", - "description": "This will cover the monthly server hosting costs to run DiceDB and host Playground making it easier for people to try out DiceDB.", - "amount": 4000, - "currency": "USD", - "frequency": "monthly", - "channels": [ - "other" - ] - }, - { - "guid": "angel-plan", - "status": "active", - "name": "Goodwill plan", - "description": "Pay anything you wish to show your goodwill for the project.", - "amount": 1000, - "currency": "USD", - "frequency": "one-time", - "channels": [ - "other" - ] - } - ], - "history": [ - { - "year": 2024, - "income": 0, - "expenses": 100, - "taxes": 0, - "currency": "USD", - "description": "Infrastrucuture cost" - } - ] + ], + "history": [ + { + "year": 2024, + "income": 0, + "expenses": 100, + "taxes": 0, + "currency": "USD", + "description": "Infrastrucuture cost" + } + ] } -} \ No newline at end of file +} diff --git a/docs/src/components/Hero.astro b/docs/src/components/Hero.astro index dbe53d9b8..9dd3d46c8 100644 --- a/docs/src/components/Hero.astro +++ b/docs/src/components/Hero.astro @@ -1,7 +1,5 @@ --- -import { Github, MoveRight } from "lucide-astro"; -import Dice from "./Dice.astro"; -import site from "../data/site.json"; +import { Github } from "lucide-astro"; ---
diff --git a/docs/src/components/Nav.astro b/docs/src/components/Nav.astro index 38f5a671e..d153160f9 100644 --- a/docs/src/components/Nav.astro +++ b/docs/src/components/Nav.astro @@ -1,6 +1,7 @@ --- const pathname = new URL(Astro.request.url).pathname; const currentPage = pathname.split("/")[1]; +const currentSubPage = pathname.split("/")[2]; // To identify subpages under "community" import SocialHandlesIcons from "./SocialHandlesIcons.astro"; --- @@ -50,6 +51,18 @@ import SocialHandlesIcons from "./SocialHandlesIcons.astro"; > Blog + + Community + + + Team + diff --git a/docs/src/components/UserSocialHandles.astro b/docs/src/components/UserSocialHandles.astro new file mode 100644 index 000000000..ffc531408 --- /dev/null +++ b/docs/src/components/UserSocialHandles.astro @@ -0,0 +1,63 @@ +--- +import { Twitter, Github, Linkedin, Globe } from "lucide-astro"; +const { x, github, linkedin, website } = Astro.props; +--- + + diff --git a/docs/src/content/config.ts b/docs/src/content/config.ts index 28a0c6e25..7713bcd56 100644 --- a/docs/src/content/config.ts +++ b/docs/src/content/config.ts @@ -31,9 +31,29 @@ const releases = defineCollection({ }), }); +const updates = defineCollection({ + type: "content", + schema: z.object({}), +}); + +const team = defineCollection({ + type: "data", + schema: z.object({ + name: z.string(), + avatar_url: z.string(), + roles: z.array(z.string()), + x: z.string(), + linkedin: z.string(), + github: z.string(), + website: z.string(), + }), +}); + export const collections = { blog, authors, releases, + team, + updates, docs: defineCollection({ schema: docsSchema() }), }; diff --git a/docs/src/content/docs/commands/DECR.md b/docs/src/content/docs/commands/DECR.md index e63ce4549..d813bf206 100644 --- a/docs/src/content/docs/commands/DECR.md +++ b/docs/src/content/docs/commands/DECR.md @@ -100,4 +100,4 @@ OK ## Alternatives -You can also use the [`DECRBY`](/commands/decrby) command to decrement the value of a key by a specified amount. \ No newline at end of file +You can also use the [`DECRBY`](/commands/decrby) command to decrement the value of a key by a specified amount. diff --git a/docs/src/content/docs/commands/EXPIRE.md b/docs/src/content/docs/commands/EXPIRE.md index f31ce0d89..fe4954a5c 100644 --- a/docs/src/content/docs/commands/EXPIRE.md +++ b/docs/src/content/docs/commands/EXPIRE.md @@ -103,4 +103,4 @@ This example shows what happens when trying to set an expiration on a non-existe ## Alternatives -- Use [`EXPIREAT`](/commands/expireat) command for more precise expiration control based on Unix timestamps \ No newline at end of file +- Use [`EXPIREAT`](/commands/expireat) command for more precise expiration control based on Unix timestamps diff --git a/docs/src/content/docs/commands/EXPIREAT.md b/docs/src/content/docs/commands/EXPIREAT.md index 933050530..582d612a6 100644 --- a/docs/src/content/docs/commands/EXPIREAT.md +++ b/docs/src/content/docs/commands/EXPIREAT.md @@ -144,4 +144,4 @@ OK ## Alternatives -- Use [`EXPIRE`](/commands/expire) command for simpler expiration control based on relative time \ No newline at end of file +- Use [`EXPIRE`](/commands/expire) command for simpler expiration control based on relative time diff --git a/docs/src/content/docs/commands/EXPIRETIME.md b/docs/src/content/docs/commands/EXPIRETIME.md index ededacc4c..e235174d5 100644 --- a/docs/src/content/docs/commands/EXPIRETIME.md +++ b/docs/src/content/docs/commands/EXPIRETIME.md @@ -76,4 +76,4 @@ In this example, the key `nonExistentKey` does not exist in the database. The `E ## Alternatives - Use [`TTL`](/commands/ttl) to get relative expiration times -- Use [`PTTL`](/commands/pttl) to get relative expiration times in milliseconds \ No newline at end of file +- Use [`PTTL`](/commands/pttl) to get relative expiration times in milliseconds diff --git a/docs/src/content/docs/commands/GEOPOS.md b/docs/src/content/docs/commands/GEOPOS.md index 2d2d16ae7..049908eb9 100644 --- a/docs/src/content/docs/commands/GEOPOS.md +++ b/docs/src/content/docs/commands/GEOPOS.md @@ -1,6 +1,6 @@ --- title: GEOPOS -description: The `GEOPOS` command in DiceDB is used to return the longitude, latitude to a specified key, as stored in the sorted set. +description: The `GEOPOS` command in DiceDB is used to return the longitude, latitude to a specified key, as stored in the sorted set. --- The `GEOPOS` command in DiceDB is used to return the longitude, latitude to a specified key which is stored in a sorted set. When elements are added via `GEOADD` then they are stored in 52 bit geohash hence the values returned by `GEOPOS` might have small margins of error. @@ -10,29 +10,37 @@ The `GEOPOS` command in DiceDB is used to return the longitude, latitude to a sp ```bash GEOPOS key [member [member ...]] ``` + ## Parameters -| Parameter | Description | Type | Required | -| --------- | --------------------------------------------------------------------------------- | ------ | -------- | -| key | The name of the sorted set key whose member's coordinates are to be returned | string | Yes | -| member | A unique identifier for the location. | string | Yes | + +| Parameter | Description | Type | Required | +| --------- | ---------------------------------------------------------------------------- | ------ | -------- | +| key | The name of the sorted set key whose member's coordinates are to be returned | string | Yes | +| member | A unique identifier for the location. | string | Yes | + ## Return Values -| Condition | Return Value | -| ------------------------------------------------------------ | ----------------------------------------------------------- | -| Coordinates exist for the specified member(s) | Returns an ordered list of coordinates (longitude, latitude) for each specified member | -| Coordinates do not exist for the specified member(s) | Returns `(nil)` for each member without coordinates -| Incorrect Argument Count |`ERR wrong number of arguments for 'geopos' command` | -| Key does not exist in the sorted set |`Error: nil` | + +| Condition | Return Value | +| ---------------------------------------------------- | -------------------------------------------------------------------------------------- | +| Coordinates exist for the specified member(s) | Returns an ordered list of coordinates (longitude, latitude) for each specified member | +| Coordinates do not exist for the specified member(s) | Returns `(nil)` for each member without coordinates | +| Incorrect Argument Count | `ERR wrong number of arguments for 'geopos' command` | +| Key does not exist in the sorted set | `Error: nil` | + ## Behaviour + When the GEOPOS command is issued, DiceDB performs the following steps: + 1. It checks if argument count is valid or not. If not an error is thrown. 2. It checks the validity of the key. 3. If the key is invalid then an error is returned. 4. Else it checks the members provided after the key. 5. For each member it checks the coordinates of the member. 6. If the coordinates exist then it is returned in an ordered list of latitude, longitude for the particular member. -7. If the coordinates do not exist then a ``(nil)`` is returned for that member. +7. If the coordinates do not exist then a `(nil)` is returned for that member. ## Errors + 1. `Wrong number of arguments for 'GEOPOS' command` - Error Message: (error) ERR wrong number of arguments for 'geoadd' command. - Occurs when the command is executed with an incorrect number of arguments. @@ -43,7 +51,9 @@ When the GEOPOS command is issued, DiceDB performs the following steps: ## Example Usage Here are a few examples demonstrating the usage of the GEOPOS command: + ### Example: Fetching the latitude, longitude of an existing member of the set + ```bash 127.0.0.1:7379> GEOADD Sicily 13.361389 38.115556 "Palermo" 15.087269 37.502669 "Catania" 2 @@ -51,8 +61,10 @@ Here are a few examples demonstrating the usage of the GEOPOS command: 1) 1) 13.361387 2) 38.115556 ``` + ### Example: Fetching the latitude, longitude of a member not in the set + ```bash 127.0.0.1:7379> GEOPOS Sicily "Agrigento" 1) (nil) -``` \ No newline at end of file +``` diff --git a/docs/src/content/docs/commands/HINCRBY.md b/docs/src/content/docs/commands/HINCRBY.md index 1261719da..a975b87d2 100644 --- a/docs/src/content/docs/commands/HINCRBY.md +++ b/docs/src/content/docs/commands/HINCRBY.md @@ -157,4 +157,4 @@ Incrementing the hash value with a very large integer results in an integer over ## Notes -- The `HINCRBY` command is a powerful tool for managing counters and numerical values stored in hash fields, making it essential for applications that rely on incremental updates. \ No newline at end of file +- The `HINCRBY` command is a powerful tool for managing counters and numerical values stored in hash fields, making it essential for applications that rely on incremental updates. diff --git a/docs/src/content/docs/commands/HINCRBYFLOAT.md b/docs/src/content/docs/commands/HINCRBYFLOAT.md index 3f12489f8..1d9d464a2 100644 --- a/docs/src/content/docs/commands/HINCRBYFLOAT.md +++ b/docs/src/content/docs/commands/HINCRBYFLOAT.md @@ -136,4 +136,4 @@ Executing `hincrbyfloat` with a string increment value - The `HINCRBYFLOAT` command is a powerful tool for managing floating-point counters and numerical values stored in hash fields, making it essential for applications that require precision in incremental updates. - The command operates atomically, meaning it will complete without interruption, making it safe to use in concurrent environments where multiple clients may modify the same hash fields. -- `HINCRBYFLOAT` can be beneficial in scenarios such as tracking scores in a game, maintaining balances in accounts, or managing quantities in inventory systems where floating-point values are common. \ No newline at end of file +- `HINCRBYFLOAT` can be beneficial in scenarios such as tracking scores in a game, maintaining balances in accounts, or managing quantities in inventory systems where floating-point values are common. diff --git a/docs/src/content/docs/commands/HRANDFIELD.md b/docs/src/content/docs/commands/HRANDFIELD.md index 4fe6811c6..173643e1b 100644 --- a/docs/src/content/docs/commands/HRANDFIELD.md +++ b/docs/src/content/docs/commands/HRANDFIELD.md @@ -128,4 +128,4 @@ Passing invalid number of arguments to the `hrandfield` command ## Notes - The `HRANDFIELD` command is useful for scenarios where random selection from hash fields is required, such as in games, lotteries, or randomized surveys. -- The command can return multiple fields at once, allowing for efficient random sampling without the need for multiple calls. This can be particularly advantageous when working with larger hashes. \ No newline at end of file +- The command can return multiple fields at once, allowing for efficient random sampling without the need for multiple calls. This can be particularly advantageous when working with larger hashes. diff --git a/docs/src/content/docs/commands/INCR.md b/docs/src/content/docs/commands/INCR.md index 208658e4d..15210f268 100644 --- a/docs/src/content/docs/commands/INCR.md +++ b/docs/src/content/docs/commands/INCR.md @@ -94,4 +94,4 @@ Incrementing a key `mykey` with a value that exceeds the maximum integer value: ## Alternatives - You can also use the [`INCRBY`](/commands/incrby) command to increment the value of a key by a specified amount. -- You can also use the [`INCRBYFLOAT`](/commands/incrbyfloat) command to increment the value of a key by a fractional amount. \ No newline at end of file +- You can also use the [`INCRBYFLOAT`](/commands/incrbyfloat) command to increment the value of a key by a fractional amount. diff --git a/docs/src/content/docs/commands/JSON.OBJLEN.md b/docs/src/content/docs/commands/JSON.OBJLEN.md index 5c756348c..a92289c06 100644 --- a/docs/src/content/docs/commands/JSON.OBJLEN.md +++ b/docs/src/content/docs/commands/JSON.OBJLEN.md @@ -22,13 +22,13 @@ JSON.OBJLEN key [path] ## Return values -| Condition | Return Value | -| ------------------------------- | ------------------------------------------------------------------------------------------- | -| Command is successful | `Integer` denoting the number of keys length of the list at the specified key. | -| Wrong number of arguments | Error: `(error) ERR wrong number of arguments for JSON.OBJLEN command` | -| Key does not exist | Error: `(error) ERR could not perform this operation on a key that doesn't exist` | -| Key is not for a JSON object | Error: `(error) ERR WRONGTYPE Operation against a key holding the wrong kind of value` | -| Path malformed or doesn't exist | Error: `(error) ERR Path 'foo' does not exist` | +| Condition | Return Value | +| ------------------------------- | -------------------------------------------------------------------------------------- | +| Command is successful | `Integer` denoting the number of keys length of the list at the specified key. | +| Wrong number of arguments | Error: `(error) ERR wrong number of arguments for JSON.OBJLEN command` | +| Key does not exist | Error: `(error) ERR could not perform this operation on a key that doesn't exist` | +| Key is not for a JSON object | Error: `(error) ERR WRONGTYPE Operation against a key holding the wrong kind of value` | +| Path malformed or doesn't exist | Error: `(error) ERR Path 'foo' does not exist` | ## Behaviour @@ -70,15 +70,16 @@ Get number of keys in the Root JSON Object. You can specify the JSON root using 127.0.0.1:7379> JSON.OBJLEN a $ 1) 3 ``` + It returns 3, because there are three root keys in the root JSON object: `name`, `age`, and `address`. Or, if you don't want to specify a JSON path, it may be omitted. The path defaults to the root, and the result is given as a scalar: + ```bash 127.0.0.1:7379> JSON.OBJLEN a 3 ``` - ### Keys inside nested object To count the number of keys inside a nested object, specify a JSON Path. The root of the JSON object is referred to by the `$` symbol. @@ -89,6 +90,7 @@ To count the number of keys inside a nested object, specify a JSON Path. The roo 127.0.0.1:7379> JSON.OBJLEN b $.address 1) 3 ``` + Here, it returns 3 because it's counting the three keys inside the `$.address` JSON object: `city`, `state`, and `zipcode`. ### When path is not a JSON object diff --git a/docs/src/content/docs/commands/LINSERT.md b/docs/src/content/docs/commands/LINSERT.md index c00f4858d..7be8ba0a7 100644 --- a/docs/src/content/docs/commands/LINSERT.md +++ b/docs/src/content/docs/commands/LINSERT.md @@ -98,4 +98,4 @@ OK - Check Key Type: Before using `LINSERT`, ensure that the key is associated with a list to avoid errors. - Handle Non-Existent Keys: Be prepared to handle the case where the key does not exist, as `LINSERT` will return `0` in such scenarios. -- Use in Conjunction with Other List Commands: The `LINSERT` command is often used alongside other list commands like [`RPUSH`](/commands/rpush), [`LPUSH`](/commands/lpush), [`LPOP`](/commands/lpop), and [`RPOP`](/commands/rpop) to manage and process lists effectively. \ No newline at end of file +- Use in Conjunction with Other List Commands: The `LINSERT` command is often used alongside other list commands like [`RPUSH`](/commands/rpush), [`LPUSH`](/commands/lpush), [`LPOP`](/commands/lpop), and [`RPOP`](/commands/rpop) to manage and process lists effectively. diff --git a/docs/src/content/docs/commands/LLEN.md b/docs/src/content/docs/commands/LLEN.md index 2c70581f1..a0b501419 100644 --- a/docs/src/content/docs/commands/LLEN.md +++ b/docs/src/content/docs/commands/LLEN.md @@ -84,4 +84,4 @@ OK - Check Key Type: Before using `LLEN`, ensure that the key is associated with a list to avoid errors. - Handle Non-Existent Keys: Be prepared to handle the case where the key does not exist, as `LLEN` will return `0` in such scenarios. -- Use in Conjunction with Other List Commands: The `LLEN` command is often used alongside other list commands like [`RPUSH`](/commands/rpush), [`LPUSH`](/commands/lpush), [`LPOP`](/commands/lpop), and [`RPOP`](/commands/rpop) to manage and process lists effectively. \ No newline at end of file +- Use in Conjunction with Other List Commands: The `LLEN` command is often used alongside other list commands like [`RPUSH`](/commands/rpush), [`LPUSH`](/commands/lpush), [`LPOP`](/commands/lpop), and [`RPOP`](/commands/rpop) to manage and process lists effectively. diff --git a/docs/src/content/docs/commands/LPOP.md b/docs/src/content/docs/commands/LPOP.md index fcf6e9249..54918e521 100644 --- a/docs/src/content/docs/commands/LPOP.md +++ b/docs/src/content/docs/commands/LPOP.md @@ -113,4 +113,4 @@ LPOP mylist secondlist - `Handle Non-Existent Keys`: Be prepared to handle the case where the key does not exist, as `LPOP` will return `nil` in such scenarios. - `Use in Conjunction with Other List Commands`: The `LPOP` command is often used alongside other list commands like [`RPUSH`](/commands/rpush), [`LPUSH`](/commands/lpush), [`LLEN`](/commands/llen), and [`RPOP`](/commands/rpop) to manage and process lists effectively. -By understanding and using the `LPOP` command effectively, you can manage list data structures in DiceDB efficiently, implementing queue-like behaviors and more. \ No newline at end of file +By understanding and using the `LPOP` command effectively, you can manage list data structures in DiceDB efficiently, implementing queue-like behaviors and more. diff --git a/docs/src/content/docs/commands/LPUSH.md b/docs/src/content/docs/commands/LPUSH.md index f5829e860..f882c774f 100644 --- a/docs/src/content/docs/commands/LPUSH.md +++ b/docs/src/content/docs/commands/LPUSH.md @@ -101,4 +101,4 @@ LPUSH mylist - `Use in Conjunction with Other List Commands`: The `LPUSH` command is often used alongside other list commands like [`RPUSH`](/commands/rpush), [`LLEN`](/commands/llen), [`LPOP`](/commands/lpop), and [`RPOP`](/commands/rpop) to manage and process lists effectively. - The `LPUSH` command can be used to implement a stack (LIFO) by always pushing new elements to the head of the list. -By understanding the `LPUSH` command, you can efficiently manage lists in DiceDB, ensuring that elements are added to the head of the list as needed. \ No newline at end of file +By understanding the `LPUSH` command, you can efficiently manage lists in DiceDB, ensuring that elements are added to the head of the list as needed. diff --git a/docs/src/content/docs/commands/LRANGE.md b/docs/src/content/docs/commands/LRANGE.md index 4e4d62381..76023960d 100644 --- a/docs/src/content/docs/commands/LRANGE.md +++ b/docs/src/content/docs/commands/LRANGE.md @@ -111,4 +111,4 @@ OK - Check Key Type: Before using `LRANGE`, ensure that the key is associated with a list to avoid errors. - Handle Non-Existent Keys: Be prepared to handle the case where the key does not exist, as `LRANGE` will return an empty array in such scenarios. -- Use in Conjunction with Other List Commands: The `LRANGE` command is often used alongside other list commands like [`RPUSH`](/commands/rpush), [`LPUSH`](/commands/lpush), [`LPOP`](/commands/lpop), and [`RPOP`](/commands/rpop) to manage and process lists effectively. \ No newline at end of file +- Use in Conjunction with Other List Commands: The `LRANGE` command is often used alongside other list commands like [`RPUSH`](/commands/rpush), [`LPUSH`](/commands/lpush), [`LPOP`](/commands/lpop), and [`RPOP`](/commands/rpop) to manage and process lists effectively. diff --git a/docs/src/content/docs/commands/PTTL.md b/docs/src/content/docs/commands/PTTL.md index 933466427..43d998eb4 100644 --- a/docs/src/content/docs/commands/PTTL.md +++ b/docs/src/content/docs/commands/PTTL.md @@ -85,4 +85,4 @@ In this example, the `PTTL` command is used with an extra argument. This results ## Alternatives -- [`TTL`](/commands/ttl): Similar to `PTTL` but returns the time-to-live in seconds instead of milliseconds \ No newline at end of file +- [`TTL`](/commands/ttl): Similar to `PTTL` but returns the time-to-live in seconds instead of milliseconds diff --git a/docs/src/content/docs/commands/RPOP.md b/docs/src/content/docs/commands/RPOP.md index 0fb5407cf..941e43165 100644 --- a/docs/src/content/docs/commands/RPOP.md +++ b/docs/src/content/docs/commands/RPOP.md @@ -91,4 +91,4 @@ RPOP mylist secondlist - `Handle Non-Existent Keys`: Be prepared to handle the case where the key does not exist, as `RPOP` will return `nil` in such scenarios. - `Use in Conjunction with Other List Commands`: The `RPOP` command is often used alongside other list commands like [`RPUSH`](/commands/rpush), [`LPUSH`](/commands/lpush), [`LLEN`](/commands/llen), and [`LPOP`](/commands/lpop) to manage and process lists effectively. -By understanding the `RPOP` command, you can effectively manage lists in DiceDB, ensuring that you can retrieve and process elements in a LIFO order. \ No newline at end of file +By understanding the `RPOP` command, you can effectively manage lists in DiceDB, ensuring that you can retrieve and process elements in a LIFO order. diff --git a/docs/src/content/docs/commands/RPUSH.md b/docs/src/content/docs/commands/RPUSH.md index b8163e382..f6fce3d65 100644 --- a/docs/src/content/docs/commands/RPUSH.md +++ b/docs/src/content/docs/commands/RPUSH.md @@ -87,4 +87,4 @@ RPUSH mylist ## Best Practices - `Check Key Type`: Before using `RPUSH`, ensure that the key is associated with a list to avoid errors. -- `Use in Conjunction with Other List Commands`: The `RPUSH` command is often used alongside other list commands like [`LLEN`](/commands/llen), [`LPUSH`](/commands/lpush), [`LPOP`](/commands/lpop), and [`RPOP`](/commands/rpop) to manage and process lists effectively. \ No newline at end of file +- `Use in Conjunction with Other List Commands`: The `RPUSH` command is often used alongside other list commands like [`LLEN`](/commands/llen), [`LPUSH`](/commands/lpush), [`LPOP`](/commands/lpop), and [`RPOP`](/commands/rpop) to manage and process lists effectively. diff --git a/docs/src/content/docs/commands/SETEX.md b/docs/src/content/docs/commands/SETEX.md index 7415a2173..4567d66e5 100644 --- a/docs/src/content/docs/commands/SETEX.md +++ b/docs/src/content/docs/commands/SETEX.md @@ -17,18 +17,18 @@ SETEX key seconds value ## Parameters -| Parameter | Description | Type | Required | -| --------- | ------------------------------------------------------------------------- | ------- | -------- | -| `key` | The name of the key to be set. | String | Yes | -| `seconds` | Expiration time for the key in seconds. | Integer | Yes | -| `value` | The value to be set for the key. | Integer | No | +| Parameter | Description | Type | Required | +| --------- | --------------------------------------- | ------- | -------- | +| `key` | The name of the key to be set. | String | Yes | +| `seconds` | Expiration time for the key in seconds. | Integer | Yes | +| `value` | The value to be set for the key. | Integer | No | ## Return values -| Condition | Return Value | -| ------------------------------------------- | ----------------------------------------------- | -| Command is successful | `OK` | -| Syntax or specified constraints are invalid | error | +| Condition | Return Value | +| ------------------------------------------- | ------------ | +| Command is successful | `OK` | +| Syntax or specified constraints are invalid | error | ## Behaviour @@ -78,10 +78,12 @@ Setting a key with an invalid expiration time will result in an error: ``` Attempting to use the command with missing arguments will result in an error: + ```bash 127.0.0.1:7379> SETEX foo 10 (error) ERROR wrong number of arguments for 'setex' command ``` ### Notes: + `SETEX` can be replaced via [`SET`](/commands/set) with `EX` option. diff --git a/docs/src/content/docs/commands/SMEMBERS.md b/docs/src/content/docs/commands/SMEMBERS.md index 1d7c23157..0207ba100 100644 --- a/docs/src/content/docs/commands/SMEMBERS.md +++ b/docs/src/content/docs/commands/SMEMBERS.md @@ -89,4 +89,4 @@ OK 1. The command returns all members at once, which may impact performance for very large sets 2. The order of returned elements is not guaranteed and may vary between calls 3. Memory usage scales linearly with the size of the set -4. This command has O(N) time complexity where N is the set size \ No newline at end of file +4. This command has O(N) time complexity where N is the set size diff --git a/docs/src/content/docs/commands/TTL.md b/docs/src/content/docs/commands/TTL.md index aef736326..8d6779091 100644 --- a/docs/src/content/docs/commands/TTL.md +++ b/docs/src/content/docs/commands/TTL.md @@ -85,4 +85,4 @@ In this example, the `TTL` command is used with an extra argument. This results ## Alternatives -- [`PTTL`](/commands/pttl): Similar to `TTL` but returns the time-to-live in milliseconds instead of seconds \ No newline at end of file +- [`PTTL`](/commands/pttl): Similar to `TTL` but returns the time-to-live in milliseconds instead of seconds diff --git a/docs/src/content/docs/commands/ZRANK.md b/docs/src/content/docs/commands/ZRANK.md index 777dc027f..1ff619d05 100644 --- a/docs/src/content/docs/commands/ZRANK.md +++ b/docs/src/content/docs/commands/ZRANK.md @@ -80,4 +80,4 @@ Retrieve both the rank and the score of `member2` in the sorted set `myzset`: ## Notes -- This command is particularly useful for implementing leaderboards, pagination in ranked lists, and analytics on data distribution. \ No newline at end of file +- This command is particularly useful for implementing leaderboards, pagination in ranked lists, and analytics on data distribution. diff --git a/docs/src/content/docs/get-started/hello-world.mdx b/docs/src/content/docs/get-started/hello-world.mdx index 5cd0621a4..692dc26a3 100644 --- a/docs/src/content/docs/get-started/hello-world.mdx +++ b/docs/src/content/docs/get-started/hello-world.mdx @@ -17,7 +17,7 @@ You can follow the steps mentioned in the [installation](/get-started/installati Once the DiceDB server starts, you will see output similar to this ``` - ██████╗ ██╗ ██████╗███████╗██████╗ ██████╗ + ██████╗ ██╗ ██████╗███████╗██████╗ ██████╗ ██╔══██╗██║██╔════╝██╔════╝██╔══██╗██╔══██╗ ██║ ██║██║██║ █████╗ ██║ ██║██████╔╝ ██║ ██║██║██║ ██╔══╝ ██║ ██║██╔══██╗ diff --git a/docs/src/content/docs/get-started/realtime-leaderboard.mdx b/docs/src/content/docs/get-started/realtime-leaderboard.mdx index b47d0e38b..b59ee1ad9 100644 --- a/docs/src/content/docs/get-started/realtime-leaderboard.mdx +++ b/docs/src/content/docs/get-started/realtime-leaderboard.mdx @@ -32,7 +32,7 @@ docker run -p 7379:7379 dicedb/dicedb --enable-watch Once the DiceDB server starts, you will see output similar to this ``` - ██████╗ ██╗ ██████╗███████╗██████╗ ██████╗ + ██████╗ ██╗ ██████╗███████╗██████╗ ██████╗ ██╔══██╗██║██╔════╝██╔════╝██╔══██╗██╔══██╗ ██║ ██║██║██║ █████╗ ██║ ██║██████╔╝ ██║ ██║██║██║ ██╔══╝ ██║ ██║██╔══██╗ diff --git a/docs/src/content/docs/tutorials/url-shortener.md b/docs/src/content/docs/tutorials/url-shortener.md index fb47fd2d6..40d790e9c 100644 --- a/docs/src/content/docs/tutorials/url-shortener.md +++ b/docs/src/content/docs/tutorials/url-shortener.md @@ -13,15 +13,19 @@ This tutorial guides you through creating a URL shortener using DiceDB, a key-va ## Setup ### 1. Install and Run DiceDB + Start a DiceDB server using Docker: -```bash + +```bash docker run -d -p 7379:7379 dicedb/dicedb ``` - + This command pulls the DiceDB Docker image and runs it, exposing it on port `7379`. ### 2. Initialize a New Go Project + Create a new directory for your project and initialize a Go module: + ```bash mkdir url-shortener cd url-shortener @@ -29,7 +33,9 @@ go mod init url-shortener ``` ### 3. Install Required Packages + Install the DiceDB Go SDK and other dependencies: + ```bash go get github.com/dicedb/dicedb-go go get github.com/gin-gonic/gin @@ -37,119 +43,130 @@ go get github.com/google/uuid ``` ## Understanding DiceDB Commands + We'll use the following DiceDB commands: + ### `SET` Command + Stores a key-value pair in DiceDB. + - **Syntax**: `SET key value [expiration]` - `key`: Unique identifier (e.g., short URL code) - `value`: Data to store (e.g., serialized JSON) - `expiration`: Optional; time-to-live in seconds (use `0` for no expiration) + ### `GET` Command + Retrieves the value associated with a key. + - **Syntax**: `GET key` - `key`: Identifier for the data to retrieve ## Writing the Code + Create a file named `main.go` and add the following code: - `main.go`: - ```go - package main - - import ( - "context" - "encoding/json" - "log" - "net/http" - - "github.com/gin-gonic/gin" - "github.com/google/uuid" - "github.com/dicedb/dicedb-go" // DiceDB Go SDK - ) - - type URL struct { - ID string `json:"id"` - LongURL string `json:"long_url"` - ShortURL string `json:"short_url"` - } - - var db *dicedb.Client - - // Initialize DiceDB connection - func init() { - db = dicedb.NewClient(&dicedb.Options{ - Addr: "localhost:7379", - }) - } - - // Creates a short URL from a given long URL - func CreateShortURL(c *gin.Context) { - var requestBody URL - if err := c.ShouldBindJSON(&requestBody); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request"}) - return - } - - // Generate unique short ID and construct the short URL - shortID := uuid.New().String()[:8] - requestBody.ID = shortID - requestBody.ShortURL = "http://localhost:8080/" + shortID - - // Serialize URL struct to JSON and store it in DiceDB - urlData, err := json.Marshal(requestBody) - if err != nil { - c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to save URL"}) - return - } - - if err := db.Set(context.Background(), shortID, urlData, 0).Err(); err != nil { - c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to save URL"}) - return - } - - c.JSON(http.StatusCreated, gin.H{"short_url": requestBody.ShortURL}) - } - - // Redirects to the original URL based on the short URL ID - func RedirectURL(c *gin.Context) { - id := c.Param("id") - - // Retrieve stored URL data from DiceDB - urlData, err := db.Get(context.Background(), id).Result() - if err != nil { - c.JSON(http.StatusNotFound, gin.H{"error": "URL not found"}) - return - } - - // Deserialize JSON data back into URL struct - var url URL - if err := json.Unmarshal([]byte(urlData), &url); err != nil { - c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to decode URL data"}) - return - } - - // Redirect user to the original long URL - c.Redirect(http.StatusFound, url.LongURL) - } - - func main() { - router := gin.Default() - - // Define endpoints for creating short URLs and redirecting - router.POST("/shorten", CreateShortURL) - router.GET("/:id", RedirectURL) - - // Start the server on port 8080 - if err := router.Run(":8080"); err != nil { - log.Fatal("Failed to start server:", err) - } - } - ``` + + ```go + package main + + import ( + "context" + "encoding/json" + "log" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/dicedb/dicedb-go" // DiceDB Go SDK + ) + + type URL struct { + ID string `json:"id"` + LongURL string `json:"long_url"` + ShortURL string `json:"short_url"` + } + + var db *dicedb.Client + + // Initialize DiceDB connection + func init() { + db = dicedb.NewClient(&dicedb.Options{ + Addr: "localhost:7379", + }) + } + + // Creates a short URL from a given long URL + func CreateShortURL(c *gin.Context) { + var requestBody URL + if err := c.ShouldBindJSON(&requestBody); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request"}) + return + } + + // Generate unique short ID and construct the short URL + shortID := uuid.New().String()[:8] + requestBody.ID = shortID + requestBody.ShortURL = "http://localhost:8080/" + shortID + + // Serialize URL struct to JSON and store it in DiceDB + urlData, err := json.Marshal(requestBody) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to save URL"}) + return + } + + if err := db.Set(context.Background(), shortID, urlData, 0).Err(); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to save URL"}) + return + } + + c.JSON(http.StatusCreated, gin.H{"short_url": requestBody.ShortURL}) + } + + // Redirects to the original URL based on the short URL ID + func RedirectURL(c *gin.Context) { + id := c.Param("id") + + // Retrieve stored URL data from DiceDB + urlData, err := db.Get(context.Background(), id).Result() + if err != nil { + c.JSON(http.StatusNotFound, gin.H{"error": "URL not found"}) + return + } + + // Deserialize JSON data back into URL struct + var url URL + if err := json.Unmarshal([]byte(urlData), &url); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to decode URL data"}) + return + } + + // Redirect user to the original long URL + c.Redirect(http.StatusFound, url.LongURL) + } + + func main() { + router := gin.Default() + + // Define endpoints for creating short URLs and redirecting + router.POST("/shorten", CreateShortURL) + router.GET("/:id", RedirectURL) + + // Start the server on port 8080 + if err := router.Run(":8080"); err != nil { + log.Fatal("Failed to start server:", err) + } + } + ``` ## Explanation ### 1. Initialize the DiceDB Client + We set up the DiceDB client in the `init` function: + ```go db = dicedb.NewClient(&dicedb.Options{ Addr: "localhost:7379", @@ -157,6 +174,7 @@ db = dicedb.NewClient(&dicedb.Options{ ``` ### 2. Create Short URL Endpoint + - **Input Validation**: Ensures the `long_url` field is present. - **Short ID Generation**: Uses `uuid` to create a unique 8-character ID. - **Data Serialization**: Converts the `URL` struct to JSON. @@ -164,50 +182,62 @@ db = dicedb.NewClient(&dicedb.Options{ - **Response**: Returns the generated short URL. ### 3. Redirect to Original URL Endpoint + - **Data Retrieval**: Fetches the URL data from DiceDB using the `Get` command. - **Data Deserialization**: Converts JSON back to the `URL` struct. - **Redirection**: Redirects the user to the `LongURL`. ### 4. Start the Server + The `main` function sets up the routes and starts the server on port `8080`. ## Running the Application ### 1. Start the Go Application + ```bash go run main.go ``` + This will start the application server on port 8080 by default, you should see output similar to + ```bash [GIN-debug] Listening and serving HTTP on :8080 ``` ### 2. Ensure DiceDB is Running + Ensure your DiceDB server is up and running on port `7379`. ## Testing the application ### 1. Shorten URL: + **Using `curl`:** + ```bash curl -X POST -H "Content-Type: application/json" -d '{"long_url": "https://example.com"}' http://localhost:8080/shorten ``` **Response:** + ```json { -"short_url": "http://localhost:8080/" + "short_url": "http://localhost:8080/" } ``` ### 2. Redirect to Original URL: + **Using `curl`:** + ```bash curl -L http://localhost:8080/abcd1234 ``` - + **Using a Browser:** Navigate to: + ``` http://localhost:8080/abcd1234 ``` diff --git a/docs/src/content/team/apoorv.json b/docs/src/content/team/apoorv.json new file mode 100644 index 000000000..737063252 --- /dev/null +++ b/docs/src/content/team/apoorv.json @@ -0,0 +1,10 @@ +{ + "name": "Apoorv Yadav", + "avatar_url": "https://avatars.githubusercontent.com/u/32174554?v=4", + "url": "", + "x": "", + "linkedin": "https://www.linkedin.com/in/yadavapoorv/", + "github": "https://github.com/apoorvyadav1111", + "website": "", + "roles": ["committer"] +} diff --git a/docs/src/content/team/arpit.json b/docs/src/content/team/arpit.json new file mode 100644 index 000000000..cc1a4c814 --- /dev/null +++ b/docs/src/content/team/arpit.json @@ -0,0 +1,10 @@ +{ + "name": "Arpit Bhayani", + "avatar_url": "https://edge.arpitbhayani.me/img/arpit-2.png", + "url": "https://arpitbhayani.me/", + "x": "https://x.com/arpit_bhayani", + "linkedin": "https://linkedin.com/in/arpitbhayani", + "github": "https://github.com/arpitbbhayani", + "website": "https://arpitbhayani.me/", + "roles": ["pmc"] +} diff --git a/docs/src/content/team/ashwin.json b/docs/src/content/team/ashwin.json new file mode 100644 index 000000000..bfc98426b --- /dev/null +++ b/docs/src/content/team/ashwin.json @@ -0,0 +1,10 @@ +{ + "name": "Ashwin Kulkarni", + "avatar_url": "https://avatars.githubusercontent.com/u/19169648?v=4", + "url": "", + "x": "https://x.com/ashwinkulkarni4", + "linkedin": "https://www.linkedin.com/in/iashwin28/", + "github": "https://github.com/AshwinKul28", + "website": "", + "roles": ["pmc"] +} diff --git a/docs/src/content/team/jyotinder.json b/docs/src/content/team/jyotinder.json new file mode 100644 index 000000000..b6ad4984b --- /dev/null +++ b/docs/src/content/team/jyotinder.json @@ -0,0 +1,10 @@ +{ + "name": "Jyotinder Singh", + "avatar_url": "https://avatars.githubusercontent.com/u/33001894?v=4", + "url": "https://jyotindersingh.com/", + "x": "https://x.com/Jyotinder_Singh", + "linkedin": "https://linkedin.com/in/jyotinder-singh", + "github": "https://github.com/JyotinderSingh", + "website": "https://jyotindersingh.com/", + "roles": ["pmc"] +} diff --git a/docs/src/content/team/prashant.json b/docs/src/content/team/prashant.json new file mode 100644 index 000000000..561ce56ec --- /dev/null +++ b/docs/src/content/team/prashant.json @@ -0,0 +1,10 @@ +{ + "name": "Prashant Shubham", + "avatar_url": "https://avatars.githubusercontent.com/u/13472823?v=4", + "url": "https://prasha.me", + "x": "https://x.com/prashacr7", + "linkedin": "https://www.linkedin.com/in/prashant-shubham07", + "github": "https://github.com/lucifercr07", + "website": "https://prasha.me", + "roles": ["pmc"] +} diff --git a/docs/src/content/team/prateek.json b/docs/src/content/team/prateek.json new file mode 100644 index 000000000..c62fc8bf0 --- /dev/null +++ b/docs/src/content/team/prateek.json @@ -0,0 +1,10 @@ +{ + "name": "Prateek Rathore", + "avatar_url": "https://avatars.githubusercontent.com/u/25244718?v=4", + "url": "", + "x": "https://twitter.com/psrvere", + "linkedin": "https://www.linkedin.com/in/prateek-singh-rathore/", + "github": "https://github.com/psrvere", + "website": "", + "roles": ["committer"] +} diff --git a/docs/src/content/team/soumya.json b/docs/src/content/team/soumya.json new file mode 100644 index 000000000..584d42e0a --- /dev/null +++ b/docs/src/content/team/soumya.json @@ -0,0 +1,10 @@ +{ + "name": "Soumya Panigrahi", + "avatar_url": "https://avatars.githubusercontent.com/u/151079203?v=4", + "url": "", + "x": "https://x.com/soumyapanigrahi", + "linkedin": "https://www.linkedin.com/in/soumya-panigrahi-8038118/", + "github": "https://github.com/soumya-codes", + "website": "", + "roles": ["pmc"] +} diff --git a/docs/src/content/updates/2024-07-18.md b/docs/src/content/updates/2024-07-18.md new file mode 100644 index 000000000..db0b615c6 --- /dev/null +++ b/docs/src/content/updates/2024-07-18.md @@ -0,0 +1,14 @@ +--- +--- + +Agenda: + +- First community call 🎉 +- The documentation website is up: [https://dicedb-docs.netlify.app/](https://dicedb-docs.netlify.app/) +- Plan to keep DiceDB a drop-in replacement of Redis, however, redis feature-set completion won’t be a blocker for launch. +- Aim to support a basic, foundational feature set +- Plans for real-time reactivity: +- Support for JSON +- Filtering on JSON fields +- Optimizations for DSQL query executor: +- Instead of re.match use a specific regex matcher for better performance. diff --git a/docs/src/content/updates/2024-08-01.md b/docs/src/content/updates/2024-08-01.md new file mode 100644 index 000000000..5c5b8a28a --- /dev/null +++ b/docs/src/content/updates/2024-08-01.md @@ -0,0 +1,25 @@ +--- +--- + +Agenda: + +- Updates from Arpit +- Design discussion around multi-threaded architecture for Dice being implemented by Yash +- JSON integration with QWATCH +- Add JSON support to the executor to filter on JSON fields using JSONPath syntax. +- JSON-to-JSON comparisons in WHERE clause? +- Research from Cockroach labs on Json libs \- [https://www.cockroachlabs.com/blog/high-performance-json-parsing/](https://www.cockroachlabs.com/blog/high-performance-json-parsing/) + +## Jul 25, 2024 | [DiceDB Weekly Community Call](https://www.google.com/calendar/event?eid=M2Judmhnb2E0YnFsY2d0NWUwMDBhdnZjbmUgYXJwaXQubWFzdGVyY2xhc3NAbQ) + +Agenda: + +- Commands for supporting JSON as native Data Type \- do we agree on [this](https://redis.io/docs/latest/develop/data-types/json/). I see the issue has already been created, we just need an alignment. +- Jyotinder to check for absolute essentials for Realtime Reactivity +- Crips documentation around the DSQL capabilities as of today +- JSON support for JSONPath and partial updates +- Yash to discuss multi-threaded implementation +- ByteArray divergence \- BITX commands for ByteArray type +- String and ByteArray implementation will have different performance +- Make sure this is documented +- Arpit to write documentation around \`SET\` and \`GET\` command in docs repository. diff --git a/docs/src/content/updates/2024-08-08.md b/docs/src/content/updates/2024-08-08.md new file mode 100644 index 000000000..1ea195eb6 --- /dev/null +++ b/docs/src/content/updates/2024-08-08.md @@ -0,0 +1,47 @@ +--- +--- + +Shipped 🚀 + +- Command Enhancements: +- KEYS command implementation +- SET command enhancements +- COMMAND GETKEYS command added +- MSET command implementation +- JSON Support: +- JSON.SET and JSON.GET commands +- JSONPath support for querying JSON data +- AUTH command +- BIT commands +- Performance and Stability: +- Fixed server reconnection issues +- Key Expiry fixes +- Executor Benchmarks added +- Thread safety improvements for dencoding +- Data Handling: +- RESP parsing fix for handling RESP_NIL in BulkString +- Signed integer dencoding support +- Development and Testing: +- Test improvements +- Live Reloading on dev server + +Agenda: + +- soumya: Do we need to support range queries? +- gaurav897: _Do we have a good monitoring/metrics/logging story around DiceDB? Given that we are early in the project, should we start proactively on this?_ +- Prometheus +- opentelemetry +- Executor performance +- [https://discord.com/channels/1034342738960855120/1264145943884992595/1269314643298488370](https://discord.com/channels/1034342738960855120/1264145943884992595/1269314643298488370) +- Integration test setup? + +Action Items + +- Memtier +- SQL equivalence +- JSON where clause support +- dice-cli stability with QWATCH +- Python-sdk +- Java-sdk +- Js-sdk +- Stripped locking diff --git a/docs/src/content/updates/2024-08-15.md b/docs/src/content/updates/2024-08-15.md new file mode 100644 index 000000000..c56fce2c0 --- /dev/null +++ b/docs/src/content/updates/2024-08-15.md @@ -0,0 +1,44 @@ +--- +--- + +Shipped + +- Commands: +- TOUCH and OBJECT IDLETIME +- KEEPTTL option for SET command +- COPY command +- PTTL command +- GETEX command +- GETDEL command +- RENAME command +- MGET command +- DECRBY command +- EXISTS command +- DECR command +- Fixes +- Fixed atomicity issue in GetAll() function +- Improved atomicity of Get() function +- Simplified AUTH flow +- Fixed failing TestJSONSetWithInvalidJSON +- Fixed issue with emitting watchEvent from DelByPointer +- Testing and code quality: +- Added script to plot executor benchmark results +- Added various new test cases: +- For DEL command +- For DISCARD command +- For expired keys in StackRef.Pop() method +- Additional unit tests for eval.go file +- Introduced golangci linter to improve code quality +- Improved queueref benchmark + +## Discussion + +- Consensus on consistency - Do we need locks, readers-writers block, etc. +- Blocking operations +- Gaurav and Soumya to explain their reasoning + +## Action Items + +- New In memory shard for serving watch queries. +- Make target for linting +- Sleep Timer stub diff --git a/docs/src/content/updates/2024-08-19.md b/docs/src/content/updates/2024-08-19.md new file mode 100644 index 000000000..a74b13181 --- /dev/null +++ b/docs/src/content/updates/2024-08-19.md @@ -0,0 +1,29 @@ +--- +--- + +Discussed: + +Separation of Concerns: + +- Commands like MGET/MSET - Key used across shards, +- Similar for Qwatch - when we are watching a pattern, the pattern can exist across multiple shards. +- Transactional commands like MGET/MSET - We need an abstraction layer outside shards that can manage the transaction across teh shards. + +IOLayer - Should it wait for all shards to complete the tasks, proceed only after all shards have responded. + +Keep store dumb. All ops on store as atomic as possible. +. Add complexity to IOLayer / Coordinator in future if optimisation are needed in future. + +Next step task + +- Remove locks and provide atomic operations. +- Address review comments and merge Store abstraction refactor Yash +- Watch to move out of store / Or implement a scan operator in store Jyotinder +- Limit Store operations to Put / Get / Delete & Scan Pratik +- We are using unsafe pointer, should we move to empty struct ot interface. Move to generics. Ashwin Kulkarni +- Removing all Mutex from Store after the shards and channels are created for multi threads. soumya +- Skeleton for multithreaded mode Yash +- Extract Coordinator from IO Tasks. Gaurav +- Qwatch: each io thread launches a qwatch command, this fans out to every shard. Each shard now maintains records for which io threads are listening to which queries . Jyotinder +- moar thoughts - the watchlist can be maintained at the said coordinator level +- Fanout commands that require consistency soumya diff --git a/docs/src/content/updates/2024-08-22.md b/docs/src/content/updates/2024-08-22.md new file mode 100644 index 000000000..0c2ea6cdb --- /dev/null +++ b/docs/src/content/updates/2024-08-22.md @@ -0,0 +1,54 @@ +--- +--- + +## Changelog + +### New Commands + +- Support for JSON.TYPE command +- Support for EXPIREAT command +- Support for DBSIZE command +- Support for EXPIRETIME command +- Deque implementation for LPUSH, RPUSH, LPOP, and RPOP commands +- JSON support in WHERE clause for queries + +### Performance + +- Improved performance of JSON WHERE clause evaluation +- Refactored store abstraction as a prerequisite for multi-threading + +### Bug Fixes + +- Type deduction logic for integers and related fixes +- RENAME command now fetches the key before renaming +- Fixed QWATCH response format and executor type inference logic +- Fixed type casting logic for SET and GET commands +- Corrected INCR/DECR logic and added tests +- Fixed incorrect benchmark iterations for QueueRef +- Corrected the JSON.SET command Arity +- Standardized error messages across codebase +- Updated AUTH error message to be consistent with Redis +- Keep JSON.TYPE consistent with Redis +- Fixed flaky tests due to JSON ordering + +### Testing + +- Added unit and integration tests for EXPIRE and EXPIRETIME commands +- Added stubbed time for tests +- Run complete test suite on PRs + +### Docs + +- Created document on inconsistencies between DiceDB and Redis + +### Developer Workflow + +- Added make target for linter +- Updated linter.yml + +## Discussions + +- Compare and contrast the list of Object Types and Encoding types supported by REDIS and DiceDB. Attempt to arrive at the next set of action items. +- [https://github.com/DiceDB/dice/issues/386\#issuecomment-2303968575](https://github.com/DiceDB/dice/issues/386#issuecomment-2303968575) +- Discuss on the current implementation of Keypool and its necessity. +- [https://github.com/DiceDB/dice/issues/386](https://github.com/DiceDB/dice/issues/386) diff --git a/docs/src/content/updates/2024-08-29.md b/docs/src/content/updates/2024-08-29.md new file mode 100644 index 000000000..37c0d6f15 --- /dev/null +++ b/docs/src/content/updates/2024-08-29.md @@ -0,0 +1,40 @@ +--- +--- + +## Changelog + +### New Commands + +- Implement \`HSET\` command +- Add support for options for EXPIRE and EXPIREAT +- Add support for command \`JSON.CLEAR\` +- Added \`SADD\`, \`SREM\`, \`MEMBERS\`, \`SCARD\`, \`SINTER\`, \`SDIFF\` commands +- Implement \`GETSET\` command +- Added support for \`FLUSHDB\` command +- Add support for \`BITPOS\` command +- Added support for \`QUNWATCH\` command + +### Bug Fixes + +- Fix: \`STACKREFLEN\` and \`QUEUEREFLEN\` are not aware of the deletion of key + +### Testing + +- Add asserts checking non-set commands: addresses \#411 +- Adds memtier benchmark and load tests preset + +### General Improvements + +- Move QWATCH logic to QueryWatcher +- Reorganize async server code +- Moved from unsafe pointer to string type for the store + +## Action Items + +- Hacktoberfest focus +- Docs: Inaccuracies, fixes. +- Testing +- Dice-cli +- Language-specific SDKs +- Windows Support +- Report command inconsistencies diff --git a/docs/src/content/updates/2024-09-05.md b/docs/src/content/updates/2024-09-05.md new file mode 100644 index 000000000..0a36df74f --- /dev/null +++ b/docs/src/content/updates/2024-09-05.md @@ -0,0 +1,43 @@ +--- +--- + +## Changelog + +### New Commands + +1. Added support for JSON.DEL command +2. Implemented PFADD/PFCOUNT commands +3. Added COMMAND LIST command +4. Added support for HGETALL command + +### Features + +1. Implemented caching for QWATCH queries to improve performance +2. Added JSON support to ORDER BY clause for DSQL queries +3. Added ShardThread and ShardManager for improved concurrency +4. Switched the store to use SwissTable instead of HashTable for better performance +5. Migrated set data structure from built-in go map to swissTable + +### Bug Fixes + +1. Fixed integer type handling inconsistencies in SQL executor +2. Fixed SETBIT command for encoding +3. Fixed benchmark issue in BenchmarkEvalMSET by resetting store for each iteration + +### Testing + +1. Added integration tests for JSON.CLEAR and JSON.DEL commands +2. Temporarily reverted TCL tests integration in Dice + +### Other + +1. Removed query-watcher dependency from store +2. Updated tests to use new Map implementation +3. Fixed linting errors in SwissTable implementation + +## Action Items + +- Allow people to run queries without using cache +- Move set back to hashmap +- Set up a temporary issue for the qwatch leaderboard impl. +- Improve qwatch testing, chaos testing, try to break it. diff --git a/docs/src/content/updates/2024-09-12.md b/docs/src/content/updates/2024-09-12.md new file mode 100644 index 000000000..098de5a64 --- /dev/null +++ b/docs/src/content/updates/2024-09-12.md @@ -0,0 +1,52 @@ +--- +--- + +## Changelog + +### New Commands + +1. Added support for `JSON.ARRLEN` command +2. Implemented `HGET` command +3. Added support for `COMMAND` command + +### Features + +1. Improved `ORDER BY` clause handling in SQL parser +2. Added support for `LIKE` and `NOT LIKE` to the SQL Executor +3. Added retry with exponential backoff for QWATCH writes +4. Added support for common cache per fingerprint in QWATCH +5. Deprecated `FROM` clause in DSQL and moved key filtering to `WHERE` +6. Added realtime leaderboard demo using QWATCH + +### Bug Fixes + +1. Fixed hyperloglog type assertion check +2. Fixed inconsistent `EXPIRE` with conflicting options: `LT` `GT` +3. Fixed data race occurring when emitting a WatchEvent +4. Fixed inconsistent `EXPIRE` when ran with flags `XX`, `LT`\-`XX` and `GT` on a key without ttl +5. Fixed bitmap commands deviation from Redis implementation +6. Optimized type error return + +### Testing + +1. Added TestEnvEnabled config and avoiding AOF dump for test env +2. Added unit and integration tests for `HGET` command + +### Other + +1. Version bump +2. Notifies Query Manager of key changes asynchronously +3. Removed locking structures for store +4. Refactored constants from constants package +5. Refactored the codebase and moved packages to internal +6. Removed keypool and related structures from the store +7. Reverted set data structure to map +8. Updated README with Leaderboard example + +## Discussion + +- Stability \+ latencies +- Redis exporter, node exporter, prometheus +- TCL test documentation +- Multi-threading progress +- New query fingerprinting logic diff --git a/docs/src/content/updates/2024-09-19.md b/docs/src/content/updates/2024-09-19.md new file mode 100644 index 000000000..aa1d3db44 --- /dev/null +++ b/docs/src/content/updates/2024-09-19.md @@ -0,0 +1,64 @@ +--- +--- + +## Changelog + +### New Commands + +1. Added `JSON.INGEST` support for HTTP +2. Added `COMMAND HELP` +3. Added `JSON.ARRPOP` command +4. Implemented `JSON.NUMINCRBY` command +5. Added support for `JSON.MGET` +6. Added support for `JSON.TOGGLE` +7. Implemented `JSON.ARRAPPEND` command +8. Added support for `JSON.DEBUG` +9. Added support for `LLEN` command +10. Added support for `SELECT` command +11. Added `HLEN` support +12. Added support for `JSON.OBJLEN` +13. Added support for `JSON.STRLEN` +14. Implemented `PFMERGE` command +15. Implemented `JSON.FORGET` command + +### Features + +1. Added support for arm64 architecture +2. Implemented LFU cache with approximate counting +3. Added config file support +4. Added HTTP support +5. SQL Executor Performance Improvements +6. Added Darwin arm64 compatibility +7. Bumped Go SDK to 1.23 +8. Added minimum and maximum Expiry time + +### Bug Fixes + +1. Fixed auth command line flags to be respected during server startup +2. Fixed `MSET` command consistency issue with Redis +3. Fixed flaky unit test for `JSON` comparisons +4. Fixed flaky `JSON` tests +5. Fixed `GETEX` with `EX` for big integers +6. Fixed TestParseHTTPRequest +7. Fixed `GETKEYS` commands failure for wrong number of arguments + +### Testing + +1. Enhanced `INCR` command test suite with comprehensive edge cases +2. Improved `QWATCH` tests post fingerprinting +3. Enhanced `GETDEL` to handle Set, JSON, ByteArray, String, Integer and expanded test coverage +4. Improved test folder structure +5. Added test for abort command + +### Other + +1. `PFCOUNT` benchmark and caching implementation analysis +2. Updated CONTRIBUTING.md +3. ted report_command_bug.md +4. Reverted change to launch new goroutine for each mutate notification + +# Action Items + +1. Return query fingerprint to users subscribing to QWATCH. +2. Allow unsubscriptions using QWATCH by passing the query fingerprint. +3. Add documentation for QUNWATCH diff --git a/docs/src/content/updates/2024-09-26.md b/docs/src/content/updates/2024-09-26.md new file mode 100644 index 000000000..201a51cdc --- /dev/null +++ b/docs/src/content/updates/2024-09-26.md @@ -0,0 +1,54 @@ +--- +--- + +## Changelog + +New Commands: + +1. Added support for `QWATCH` Command with HTTP (Prashant Shubham) +2. Implemented `HSETNX` command (shreyas23sk) +3. Added support for `INCRBY` command (Saubhik Kumar) +4. Added support for `JSON.OBJKEYS` (Saubhik Kumar) +5. Added support for `JSON.ARRINSERT` (jujiale) +6. Implemented `HDEL` command (manishrw) +7. Added support for `COMMAND INFO` command (meetghodasara) +8. Added support for `HSTRLEN` command (c-harish) +9. Added support for `INCRBYFLOAT` command (hash-f) +10. Added support for `SETEX` command (AshikBN) +11. Added support for `GETRANGE` command (KaviiSuri) +12. Added support for `JSON.NUMMULTBY` (c-harish) +13. Added `ECHO` command (hgupta12) + +Features: + +1. Added multi-threading capability to DiceDB (soumya-codes) +2. Enhanced support for commands over HTTP (Pratik Pandey) +3. Added flag to enable/disable multi-threading on server (Prashant Shubham) +4. Setup slog with zerolog (KaviiSuri) + +Bug Fixes: + +1. Fixed `EXPIRE` with empty string as `TTL` (Abh-ay) +2. Fixed `BITCOUNT` when no key provided (Shardul Silswal) +3. Fixed startup statement (Ashwin Kulkarni) +4. Fixed inconsistent `BITOP` with non-string source key (Karan Dixit) +5. Fixed bitcount operations (Ved Chitnis) +6. Fixed handling of multiple failure/error scenarios during socket reads (ayushsatyam146) +7. Fixed inconsistency in the `INCR` command (Maveric-k07) +8. Fixed keyspacestats and `FLUSHDB` (apoorvyadav1111) +9. Fixed `GETEX` command returning value for JSON-based object (Kapish Malik) +10. Fixed incorrect logging when default port is already in use (Shardul Silswal) + +Testing and Documentation: + +1. Updated README for Multithreading (Prashant Shubham) +2. Added more tests for HTTP support (Pratik Pandey) +3. Added integration tests to validate args for `BITCOUNT` (Shardul Silswal) +4. Added integration tests for HTTP, `ABORT` support for HTTP (Pratik Pandey) + +Other: + +1. Reverted dice binary checked in and added it to .gitignore (Prashant Shubham) +2. Updated README.md (Arpit Bhayani) +3. Eval refactoring for multithreading (Ashwin Kulkarni) +4. Version patch and bump to 0.0.4 (Arpit Bhayani, Jyotinder Singh) diff --git a/docs/src/content/updates/2024-10-03.md b/docs/src/content/updates/2024-10-03.md new file mode 100644 index 000000000..f51b92498 --- /dev/null +++ b/docs/src/content/updates/2024-10-03.md @@ -0,0 +1,84 @@ +--- +--- + +## Changelog + +### New Commands and Features + +1. Added support for `DUMP` command (TheRanomial) +2. Added support for `HKEYS` command (swarajrb7) +3. Added support for `HEXISTS` command (c-harish) +4. Added `JSON.RESP` Command (prateek.singh.rathore) +5. Added support for `JSON.ARRTRIM` (jujiale) +6. Added `HINCRBYFLOAT` command (ayushsatyam146) +7. Added `ZADD` and `ZRANGE` commands (Jyotinder Singh) +8. Added `HINCRBY` Command (ankkyprasad) +9. Added `HVALS` command (bijuli74) +10. Added `HRANDFIELD` command (Indranil0603) +11. Added `APPEND` Command (Aditya-Bhalerao) + +### Improvements and Refactoring + +1. Abstracted out Table implementation and supporting Swiss and Go maps (Arpit Bhayani) +2. Eval Refactor for better compatibility with all protocols and multithreading (Ashwin Kulkarni) +3. Made DiceDB build work on different architectures (swaingotnochill) +4. Added Websocket support (prateek.singh.rathore) +5. Improved code structure for evalHVALS (Jyotinder Singh) + +### Bug Fixes + +1. Fixed `BITPOS` command (Bhavya Jain) +2. Fixed `TYPE` command return encoding (apoorvyadav1111) +3. Fixed Child Process not working for `BGREWRITEAOF` on MacOS (KaviiSuri) + +### Testing and Documentation + +1. Added integration tests for `KEYS`, `MGET` , `MSET`, `JSON` commands using HTTP (Shubh1815) +2. Added HTTP Integration Tests for various commands (Karan Dixit, HARSH VARDHAN SINGH, apoorvyadav1111) +3. Added Websocket integration tests for `SET` and `GET` (prateek.singh.rathore) +4. Updated documentation for various commands (multiple contributors) +5. Improved consistency in documentation for multiple commands (multiple contributors) + +### Other + +1. Made keystat localized to store instance (soumya-codes) +2. Added check for controlling the max TCP connections (Ayush Saluja) +3. Updated README with various improvements (Arpit Bhayani, Dhroov Gupta) +4. Version patch and updates (Arpit Bhayani) + +## Discussion + +Points to discuss \[from Arpit\] + +- Playground mono +- Let’s use the word DiceDB and not Dice, every where, there are variable and config places where the word Dice is used +- The config “REQUEST_LIMIT” should be “REQUEST_LIMIT_PER_MIN” +- The config “REQUEST_WINDOW” should be “REQUEST_WINDOW_SEC” +- What is the purpose of the package internals/cmds? +- Why is there a package called db that just is a wrapper over DiceDB commands? Supporting a new command in Playground now means making a change in both backend and frontend. Also, these functions are essentially useless now. +- Also, A better way would be for SDK to expose a generic command that takes command and argos and executes the. What do you think? +- With this, the need for diced.go vanishes +- Instead of exposing the endpoint as \`/cli\`, let’s call it \`/shell/exec\` +- We need all errors to be standardized w.r.t casing. I propose everything to be lowercase. +- server/httpServer.go \-\> server/http.go, why again the word “server” +- “errorResponse” function should do JSON marshal and not string format +- I see places where log and slog both being used, let’s just use slog everywhere +- Need to understand pkg/util/helpers.go, neither the name not the function made much sense. +- Playground Web +- Have a .env file that can take playground mono URL which points to API URL +- Do not call it \`CLI_COMMAND_URL\`, a bad name. Call it \`PLAYGROUND_MONO_URL\` +- Should we not check in package-lock.json? When I installed the file, it got modified +- README lacks node version requirement +- Does this output static site or does it require a server to run? I see the command \`npm run start\` to start the server in production. Ideally, we want a static website for the playground web. No servers. +- Why is Dockerfile_Backend file even present in web? Also, very bad name. We are calling it Playground Mono. Also, the name is neither snake nor camel case. +- “CLI” is a bad name, let’s call it Shell. +- “ // comment this to hide footer” is such a useless comment. +- Comments are supposed to be \`// comment\` and not \`//comment\` +- “DiceDB PlayGround” as name should be “DiceDB Logo” “Playground” +- “Cleanup in :” and “Command left:” have inconsistent styling. Also, it should be \`Commands left\`. Add a tool tip (i) button, next to both that tells what these limits are all about. +- Move commands from command.ts file to using DiceDB commands command +- Delete mock data file +- In api.ts, propagate the error to the use instead of just logging it to console. Show user the error that was emitted from the backend/mono. +- Constants file may not be required rightnow, given we should use Playground Mono URL from .env +- Instead of calling “blacklist” let’s use the word “blocklist” +- Why cliUtils has get and set and delete, do we not support any other command? Also, given this file is present in utils folder it should be called \`utils/cli.ts\` and not \`utils/cliUtils.ts\` diff --git a/docs/src/content/updates/2024-10-10.md b/docs/src/content/updates/2024-10-10.md new file mode 100644 index 000000000..ab1e8d80c --- /dev/null +++ b/docs/src/content/updates/2024-10-10.md @@ -0,0 +1,80 @@ +--- +--- + +### DB + +New Commands and Features: + +1. Added support for ZRANGE.WATCH (Jyotinder Singh) +2. Implemented GET.WATCH command support (Jyotinder Singh) +3. Added support for HMSET (Raghav Babbar) +4. Implemented GEOADD and GEODIST commands (KaviiSuri) +5. Added support for BITFIELD command with all subcommands (apoorvyadav1111) +6. Implemented HMGET command (vishnuchandrashekar) +7. Added HSCAN command (manish wadhwani) + +Improvements and Refactoring: + +1. Improved Watch Manager and Store API compatibility (Jyotinder Singh) +2. Refactored to handle Watch command type in the generic flow of command handlers (Ashwin Kulkarni) +3. Added query fingerprinting feature (kakdeykaushik) +4. Allowed users to customize the KeysLimit value at server start (vpsinghg) +5. Renamed all appropriate variables from Redis to DiceDB (Ashwin Kulkarni) + +Bug Fixes: + +1. Fixed return value in evalPersist function (shashi-sah2003) +2. Fixed Deque-push-output issue (bhima2001) +3. Fixed DELETE command with 0 arguments (Vansh Chopra) +4. Added Hashmap incrementFloatValue overflow check (dograprabhav) +5. Fixed SINTER to support single key parameter (arushi-08) + +Testing and Documentation: + +1. Added GET.WATCH integration test using SDK (Jyotinder Singh) +2. Added Integration Tests for QWATCH Where Clause (prateek.singh.rathore) +3. Added HTTP integration tests for various commands (Harshit Gupta) +4. Updated documentation for multiple commands (various contributors) +5. Added default config file to be used when dice server starts (Prashant Shubham) + +Performance and Optimization: + +1. Updated JSON.ARRLEN to match RedisCLI outputs (Surya Teja) +2. Marshalling the value in SET command for HTTP (lovish2525) + +Other: + +1. Updated benchmark numbers as of 10th Oct 2024 (Arpit Bhayani) +2. Upgraded vulnerable packages (Progyan) +3. Added support for getting adhocReqChan buffer from config (Rohan Chavan) +4. Changed Github workflow to not run actions for docs related PRs (suryavirkapur) + +### Playground + +#### Playground-web contributions + +1. Render commands with newline char (Tarun) +2. Add tooltip component (Rishabh Gupta) +3. Refactor: monorepo alloy (KaviiSuri) +4. fix: code organisation and rendering (KaviiSuri) +5. Update env vars for Playground Mono and add webservice (Tarun Kantiwal) +6. Cleanup & Adjustment to support static site generation (N3XT) +7. \[Dx\] added pre-commit hooks (Shubham raj) +8. Adding support for generic command execution (Prashant Shubham) +9. Refactor repo for consistency (RishabhC-137) +10. UX Refactor \- 2 (Sarthak Kimtani) +11. Search box scroll added (Aryan Nagbanshi)Updated the Playground mono url endpoint (rishav vajpayee) +12. Enhance command history in CLI (Prathamesh Koshti) +13. Jest testing setup (Prathamesh Koshti) +14. Add prettier for lint and Github workflow (samanyu) + +#### Playground mono contributions + +1. Integration Test for Hash commands (Anish Koulgi) +2. Adding rate limiting headers to responses (ayushsatyam146) +3. Disable list of commands from playground repositories \#897 \- Commands Blacklisted (Yash Budhia) +4. Adding support for generic command execution (Prashant Shubham) +5. Refactored repo for consistency (rishav vajpayee) +6. Add trailing slash middleware to prevent unexpected API crash (Tarun Kantiwal) +7. Integration and stress tests for ratelimiter +8. Added CORS support to server (rishav vajpayee) diff --git a/docs/src/content/updates/2024-10-17.md b/docs/src/content/updates/2024-10-17.md new file mode 100644 index 000000000..240469454 --- /dev/null +++ b/docs/src/content/updates/2024-10-17.md @@ -0,0 +1,65 @@ +--- +--- + +## DB + +### New Features and Commands + +1. Renamed `QWATCH` to `Q.WATCH` for consistency (Jyotinder Singh) +2. Added `BITFIELD_RO` command (Saubhik Kumar) +3. Added support for `OBJECT ENCODING` (Saish Pawar) +4. Added support for command `JSON.STRAPPEND` (Karan Dixit) +5. Migrated `PFADD`, `PFCOUNT`, `PFMERGE` to Multi-threaded architecture (Surya Teja) +6. Migrated `ZADD` and `ZRANGE` commands to multi-threaded architecture (Benjamin Mishra) + +### Improvements and Refactoring + +1. Added profiling for DiceDB (Soumya Panigrahi) +2. Improved config with HTTP and websocket properties (Vinit Parekh) +3. Fixed `.WATCH` fingerprinting to generate consistent fingerprints (Jyotinder Singh) +4. Updated SDK and added tests for `.WATCH` command using new APIs (Jyotinder Singh) +5. Migrated `JSON.CLEAR`, `JSON.STRLRN`, `JSON.OBJLEN` commands (jujiale) + +### Bug Fixes + +1. Fixed `JSON.STRLEN` for no path and type error cases (bhima2001) +2. Fixed handling of multiple failure/error scenarios during socket writes (ayushsatyam146) +3. Fixed HTTP JSON response to use JSON null (Vinit Parekh) +4. Fixed null value in removing array element (Dhroov Gupta) +5. Fixed `JSON.GET` to return error when JSON path does not exist (Samarpan Harit) +6. Refactored evalAPPEND to handle leading zeros in value (Shashi sah) + +### Testing and Documentation + +1. Added more tests for SET command (prateek.singh.rathore) +2. Added HTTP Integration tests for `COMMAND` and `COMMAND/COUNT` (svkaizoku) +3. ted documentation for multiple commands (various contributors) +4. Added common spelling fixes (Abhijeet Kasurde) +5. Optimized README for clipboard copy action (Aditya Poddar) + +### Other + +1. Added Ping telemetry and streamlined Version (Arpit Bhayani) +2. Made error statements similar to Redis in migrated errors (Ashwin Kulkarni) +3. Updated stars on docs (Apoorv Yadav) +4. Switched logging mode to dev by default (KaviiSuri) + +## playground-mono + +1. Add integration tests for SET commands (Dhroov Gupta) +2. Expose ratelimit headers (aasifkhan7) +3. Add Dice command integration tests (EXISTS, EXPIRE, EXPIREAT, EXPIRETIME) and package for validation (Rishabh Gupta) +4. Adding support for pretty response (pshubham) +5. Integration Test for Hash commands (anishkoulgi) +6. Adding support for rendering list based responses (tarun-kavipurapu) + +## playground-web + +1. Adding support for JSON.SET command (pshubham) +2. Unit tests for playground-web repository (ViragJain) +3. fix: docker compose contexts to ensure docker compose runs (Kavii) +4. fix: docker compose contexts to ensure docker compose runs (Kavii) +5. \[DiceDB Console\] Setup NextJS with Electron (Ajay Poshak) +6. Setup NextJS with Electron (Ajay Poshak) +7. Add external links to footer items (pshubham) +8. Fixed the styling of get started button (Aryan Nagbanshi) diff --git a/docs/src/content/updates/2024-10-24.md b/docs/src/content/updates/2024-10-24.md new file mode 100644 index 000000000..917eeae43 --- /dev/null +++ b/docs/src/content/updates/2024-10-24.md @@ -0,0 +1,59 @@ +--- +--- + +## DB + +### New Features + +1. Migrated commands `HLEN`, `HSTRLEN`, and `HSCAN` with integration tests for HTTP, WebSocket, and RESP. (@c-harish, @ashwin-kulkarni128) (\#1024) +2. Added support for `ZPOPMIN` command. (@teja8551) (\#1128) +3. Implemented multi-shard commands: `RENAME`, `COPY` , `MSET`, and `MGET`. (@ashwin-kulkarni128) (\#1059) +4. Bloom Filter Commands: Migrated commands such as `bf.add`, `bf.reserve`, `bf.exists`, and `bf.info` to store_eval. (@apoorvyadav1111) (\#1031) +5. Added support for command `ZRANK`. (@SyedMa3) (\#1126) + +### Enhancements + +1. Migrated INCR commands and added integration tests for WebSocket. (@pg30) (\#1016) +2. Migrated commands `HINCRBY`, `HINCRBYFLOAT`, and `HRANDFIELD` with refactored cmdmeta and eval logic. (@saisaipawar, @ashwin-kulkarni128) (\#1081) +3. Moved supported protocols under Protocols. (@arpitbhayani) +4. Added CLI example to WebSocket documentation. (@JP-sDEV) (\#1113) + +### Bug Fixes + +1. Fixed issues in specific use cases of `ZPOPMIN` command. (@teja8551) (\#1164) +2. Handled root path '.' correctly in `JSON.OBJLEN` command. (@saubhikpandey) (\#603) +3. Fixed `ZRANK` command to return score as string with `WITHSCORE`. (@shashi-sah2003) (\#1165) +4. Fixed bugs in WebSocket integration tests. (@psr) (\#1142) + +### Documentation Updates + +1. Audited and enhanced documentation for the `COPY` command. (@onlybond) (\#816) +2. Added Hello, World examples for Simple and Reactive protocols. (@arpitbhayani) +3. Added documentation for Reactive Hello, World\! (@arpitbhayani) +4. Enhanced documentation for the sleep command. (@kdivyansh268, @pshubham) (\#832) +5. Audited `Q.WATCH` documentation for consistency. (@shashank-priyadarshi) (\#781) +6. Updated `INCR` command documentation for consistency with Redis. (@pg30) +7. Audited documentation for the `JSON.TYPE` command. (@onlybond) (\#827) +8. Added cURL examples to HTTP documentation. (@vanshavenger) (\#1113) + +### Refactoring + +1. Refactored HyperLogLog command responses and removed logger references. (@prashant1996cr07) (\#1183) +2. Refactored HTTP response structure. (@prashant1996cr07) (\#1150) + +### Logging and Configuration + +1. Made logging consistent across start and shutdown sequences. (@arpitbhayani) (\#1172) +2. Added an option to set default log level to info. (@arpitbhayani) (\#1172) +3. Configured the number of shards as a configuration, defaulting to the number of cores. (@arpitbhayani) (\#1172) +4. Set connection close log level to debug. (@soumya-codes) (\#1139) + +### Miscellaneous + +1. Updated README.md with new CLI instructions. (@arpitbhayani) +2. Linked and documented best practices for contributions and logging. (@arpitbhayani) +3. Added DiceDB ASCII art for CLI. (@arpitbhayani) +4. Added a template for command documentation. (@apoorvyadav1111) (\#1147) +5. Removed unnecessary log references. (@prashant1996cr07) +6. Removed multi-threading experimental mode from the README. (@arpitbhayani) +7. Version bump to 0.0.5. (@arpitbhayani) diff --git a/docs/src/content/updates/2024-11-07.md b/docs/src/content/updates/2024-11-07.md new file mode 100644 index 000000000..a921f7c6a --- /dev/null +++ b/docs/src/content/updates/2024-11-07.md @@ -0,0 +1,35 @@ +--- +--- + +## DB + +### New Features + +- Migrate commands `SADD`, `SREM`, `SCARD`, `SMEMBERS` to store_eval. (@sahoss) (\#1020) +- Add support for commands `LINSERT` and `LRANGE`. (@ParvBudh28) (\#659) +- Add integration tests for commands `HSET`, `HGET`, `HDEL`. (@c-harish) (\#1021) +- Migrate `GETEX` and `GETDEL` commands. (@Ehijoe) (\#1061) +- Migrated commands `EXPIRE`, `EXPIREAT`, `EXPIRETIME`, `TTL`, `PTTL`. (@SyedMa3) (\#1149) +- Added `GET.UNWATCH` command support and fixed related issues. (@psr) (\#998) + +### Enhancements + +- ZPOPMIN command tests enhanced to check outputs with ZCOUNT command. (@teja8551) (\#1225) +- Moved HExists, HKeys, HVals commands in commands folder. (@apoorvyadav1111) (\#1229) + +### Fixes + +- Fix typo in documentation. (@vinitparekh1742) (\#1240) +- Fix `validateCmdMeta` to handle Unwatch commands. (@jyotindrsingh) + +### Documentation Updates + +- Revamped documentation with separate sections for Blog, Benchmarks, and Roadmap. (@arpitbhayani) (\#1236) +- Add support for command SELECT documentation. (@vanshavenger) (\#820) +- Add documentation for `LINSERT` and `LRANGE` commands. (@ParvBudh28) (\#1040) +- Release page added with blogs and release notes on the homepage. (@arpitbhayani) +- Lint across docs codebase. (@arpitbhayani) +- Removed memtier benchmark from Docs. (@arpitbhayani) +- Updated benchmark numbers and Redis compatibility page. (@arpitbhayani) +- Made landing page simpler and denser. (@arpitbhayani) +- Code documentation fixes. (@aadi-1024) (\#1232) diff --git a/docs/src/content/updates/2024-11-14.md b/docs/src/content/updates/2024-11-14.md new file mode 100644 index 000000000..ec3c925e2 --- /dev/null +++ b/docs/src/content/updates/2024-11-14.md @@ -0,0 +1,29 @@ +--- +--- + +## DB + +### New Features + +- Migrated JSON.RESP & JSON.DEBUG commands. (@c-harish) (\#1030) +- Migrated LPUSH, RPUSH, LPOP, RPOP, LLEN commands. (@Aditya-Chowdhary) (\#1019) +- Implemented ZADD options: XX, NX, CH, INCR, LT, GT according to Redis source. (@rushabhk04) (\#761) +- Migrate commands SETBIT, GETBIT, BITCOUNT, BITPOS, BITFIELD, BITFIELD_RO. (@vishnuchandrashekar) (\#1017) +- Added support for PFCOUNT.WATCH command. (@saubhikpandey) (\#1133) +- Command migration for single shard, multi-shard, and custom commands. (@ashwin-kulkarni128, @apoorvyadav1111) (\#1276) +- Refactored eviction framework with Basic LRU-based batch eviction. (@soumya-codes) (\#1268) +- Default WAL set to Null with WAL implementation added. (@arpitbhayani) + +### Bug Fixes + +- Fixed RESP parser to parse strings with multiple `\r`. (@c-harish) (\#1245) +- Fix LPOP to support multiple arguments. (@tren03) (\#1100) +- Added command metadata for LRANGE. (@shashi-sah2003) (\#1272) +- Enhanced GETRANGE to support byte array. (@c-harish) (\#1194) +- Added type check for string compatibility in evalAPPEND function. (@shashi-sah2003) (\#1193) + +### Documentation Updates + +- Reactive value proposition added to documentation. (@arpitbhayani) (\#1256) +- Releases page with blogs and release notes on the homepage. (@arpitbhayani) +- Roadmap and Redis compatibility page updated. (@arpitbhayani) diff --git a/docs/src/content/updates/2024-11-21.md b/docs/src/content/updates/2024-11-21.md new file mode 100644 index 000000000..9ab560688 --- /dev/null +++ b/docs/src/content/updates/2024-11-21.md @@ -0,0 +1,44 @@ +--- +--- + +## DB + +### New Features + +- Add support for `PFCOUNT.UNWATCH` command and cleanup reactivity tests. (@jyotindrsingh) (\#1030) +- Preserve `TTL` after `APPEND` command. (@dhanrajkotian3) (\#1036) +- Command migration for multishard commands: `TOUCH`, `DBSIZE`, `KEYS`. (@ashwin-kulkarni128, @jyotindrsingh) (\#1301) +- Migrated `GEOADD` and `GEODIST` commands. (@ygosain44) (\#1033) +- Add support for byte array/bitmap to APPEND command. (@c-harish) (\#1286) + +### Enhancements + +- Fix enable-multithreading flag and `PING` support. (@jyotindrsingh) +- Added labels for `WATCH` commands. (@psr) (\#1267) +- Refactored configuration management to use a `.conf` file. (@vinitparekh1742) (\#1292) +- Refactored `IOHandler` code. (@soumya-codes) (\#1306) +- Added `ZADD` documentation and improved error messages. (@vanshavenger) (\#1283) + +### Bug Fixes + +- Fixed goroutine leaks and deadlocks in worker and RESP integration tests. (@psr) (\#1298) +- Fix typo in `benchmarks.md`. (@vinitparekh1742) +- Ensure CI runs only when source code has changes. (@bhima2001) (\#1260) + +### Documentation Updates + +- Updated package-lock. (@apoorvyadav1111) (\#1311) +- Added documentation for new watch/unwatch commands. (@apoorvyadav1111) (\#1295) +- Changes in README for new configuration. (@vinitparekh1742) (\#1309) +- Added type documentation. (@vanshavenger) (\#1280) +- Reactivity page and quick write-up added. (@arpitbhayani) (\#1308) + +### Refactoring + +- Refactored Makefile. (@vinitparekh1742) (\#1288) +- Fixed CI and linting issues across codebase. (@arpitbhayani, @jyotindrsingh) + +### Miscellaneous + +- Consistent blog card styling on the website. (@realChakrawarti) (\#1300) +- Set up a `/docs` redirect. (@arpitbhayani) diff --git a/docs/src/content/updates/2024-11-28.md b/docs/src/content/updates/2024-11-28.md new file mode 100644 index 000000000..920c8e64c --- /dev/null +++ b/docs/src/content/updates/2024-11-28.md @@ -0,0 +1,48 @@ +--- +--- + +## DB + +### New Features + +- Command Migration: Migrated `DEL`, `EXISTS`, `PERSIST`, and `TYPE` commands.\*\* (@mohitnagaraj20, @jyotindrsingh, @apoorvyadav1111) (\#1015) +- Added support for PFCOUNT.UNWATCH command. (@jyotindrsingh) (\#1335) +- Integration tests added for COMMAND DOCS command. (@sa-k-shore) (\#1323) + +### Enhancements + +- Preserve TTL after APPEND command. (@dhanrajkotian3) (\#1036) +- Refactored configuration management to improve flag handling and introduce early returns for errors. (@jyotindrsingh) +- Rounded borders, word wrap, and badges added for improved UI. (@apoorvyadav1111) (\#1327) +- Reorganized IOThread code and added IOThread interface to BaseIOThread. (@jyotindrsingh) (\#1331) +- Cleaned up startup logs. (@jyotindrsingh) (\#1316) +- Removed async server. (@ashwin-kulkarni128, @jyotindrsingh, @apoorvyadav1111) (\#1318) +- Fixed Redis CLI connection error. (@prashant1996cr07) (\#1207) + +### Bug Fixes + +- Fixed configuration flags for better compatibility. (@jyotindrsingh) (\#1331) +- Fix Redis CLI connect error. (@prashant1996cr07) (\#1207) +- Fix to ensure fingerprints are retained until all clients unsubscribe. (@prashant1996cr07) (\#1335) +- Fixed linter issues across multiple files. (@jyotindrsingh, @apoorvyadav1111) (\#1314, \#1333) +- Removed unused utilities and dead code. (@jyotindrsingh) (\#1331) +- Fix for `GETRANGE` markdown file extension detection. (@Arijit6258) (\#1329) + +### Documentation Updates + +- Added links to examples in documentation. (@apoorvyadav1111) (\#1333) +- Temporarily removed `Q.WATCH` documentation from the website. (@jyotindrsingh) (\#1334) +- Updated documentation with improved clarity and layout. (@vinitparekh1742, @jyotindrsingh) (\#1332) +- Added hyperlinks for commands. (@pankajshadev) (\#1325) + +### Refactoring + +- Makefile updates for consistency. (@vinitparekh1742) (\#1310) +- Removed cache from linter for better performance. (@apoorvyadav1111) (\#1319) +- Reorganized and renamed Worker to IOThread. (@jyotindrsingh) (\#1330) + +### Miscellaneous + +- Version bump. (@jyotindrsingh) (\#1316) +- Dependency upgrade: Bumped `golang.org/x/net` from `0.21.0` to `0.23.0`.\*\* (@dependabot\[bot\]) (\#1316) +- Deleted outdated `GETRANGE` documentation. (@c-harish) (\#1338) diff --git a/docs/src/content/updates/2024-12-05.md b/docs/src/content/updates/2024-12-05.md new file mode 100644 index 000000000..68f23b897 --- /dev/null +++ b/docs/src/content/updates/2024-12-05.md @@ -0,0 +1,21 @@ +--- +--- + +### New Features + +- Bloom Filter and Type Standardization: This PR introduces bloom filter and type standardization to improve performance (@arpitbbhayani) (\#1357) +- Add documentation for SETEX command: Added documentation for the SETEX command, making it easier for users to understand its usage (@tarun-29) (\#1350) + +### Bug Fixes + +- Fixed build error in bytearray.go: This PR fixes a build error that occurred when building the bytearray package (@piyushhhxyz) (\#1351) +- fix: modify the config default value keep consistency: This PR standardizes the default values of configuration options across the codebase, ensuring consistency and preventing potential issues (@jujiale) (\#1352) + +### Documentation + +- Add documentation for JSON.OBJLEN command: Created a new page in the documentation for the JSON.OBJLEN command, while also fixing typos on the JSON.OBJKEYS page (@paulwalrath) (\#1345) +- Remove \--enable-multithreading flag usage in README: Removed the reference to the deprecated \`--enable-multithreading\` flag from the README, as it is no longer supported (@rahul-mallick-15) (\#1349) + +### General Enhancements + +- Remove encoding and supporting only Type: This PR removes unused encoding code and supports only type (@arpitbbhayani) (\#1341) diff --git a/docs/src/layouts/BlogLayout.astro b/docs/src/layouts/BlogLayout.astro index f2222fb0c..5644e69aa 100644 --- a/docs/src/layouts/BlogLayout.astro +++ b/docs/src/layouts/BlogLayout.astro @@ -4,7 +4,11 @@ import Layout from "./Layout.astro"; import AboutMeVertical from "../components/AboutMeVertical.astro"; const { blog, type } = Astro.props; -const author = await getEntry("authors", blog.data.author.id); +let author = null; + +if (blog?.data?.author?.id) { + author = await getEntry("authors", blog.data.author.id); +} const JSONLD = { "@context": "https://schema.org/", @@ -18,8 +22,8 @@ const JSONLD = { dateModified: blog.data.published_at, author: { "@type": "Person", - name: author.data.name, - image: author.data.avatar_url, + name: author?.data?.name, + image: author?.data?.avatar_url, }, url: Astro.url, isPartOf: { @@ -28,8 +32,8 @@ const JSONLD = { name: "DiceDB Blog", publisher: { "@type": "Person", - name: author.data.name, - image: author.data.avatar_url, + name: author?.data?.name, + image: author?.data?.avatar_url, }, }, }; @@ -51,6 +55,13 @@ const JSONLD = {
  • Releases
  • + ) : type === "update" ? ( +
  • + Weekly Updates +
  • +
  • + {blog.slug} +
  • ) : (
  • Blog @@ -68,35 +79,39 @@ const JSONLD = { > {blog.data.title} -
    -
    -
    - -
    -
    -
    -

    - {author.data.name} -

    -

    - {author.data.bio} -

    -
    -
    + { + author && ( +
    +
    +
    + +
    +
    +
    +

    + + {author.data.name} + +

    +

    {author.data.bio}

    +
    +
    + ) + }

  • -

    - { - blog.data.published_at.toLocaleString("en-us", { - day: "2-digit", - month: "short", - year: "numeric", - }) - } -

    + { + type !== "update" && ( +

    + {blog.data.published_at.toLocaleString("en-us", { + day: "2-digit", + month: "short", + year: "numeric", + })} +

    + ) + }
    diff --git a/docs/src/layouts/Head.astro b/docs/src/layouts/Head.astro index 0c4c7263f..c379a8c71 100644 --- a/docs/src/layouts/Head.astro +++ b/docs/src/layouts/Head.astro @@ -14,6 +14,5 @@ const { title, description, img, blog, video, session, JSONLD } = Astro.props; {title} - diff --git a/docs/src/pages/community/index.astro b/docs/src/pages/community/index.astro new file mode 100644 index 000000000..588b386c5 --- /dev/null +++ b/docs/src/pages/community/index.astro @@ -0,0 +1,69 @@ +--- +import Layout from "../../layouts/Layout.astro"; +import { getCollection } from "astro:content"; +import { Video } from "lucide-astro"; + +const updates = await getCollection("updates"); +updates.reverse(); + +const title = "Community"; +const description = ""; +const callLink = "https://meet.google.com/qdf-pfkb-ckm"; +--- + + +
    +
    +

    Community

    +

    + DiceDB is a community-driven project, and every individual who has + contributed their time, expertise, and patches has played a crucial role + in shaping its success. If you're interested in contributing to DiceDB, + you can always +

    + The community is always open to new ideas and contributions, so feel free + to drop a note on our Discord server. +

    +
    +

    Weekly calls

    +

    + We need every Thursday at 19:00 IST to discuss what we did and what we + plan to do. The meeting is open to everyone, and you can join the call + by clicking the button below. +

    + +
    +

    Here are the notes from our past weekly calls

    +
    + +
    +
    +
    +
    diff --git a/docs/src/pages/team.astro b/docs/src/pages/team.astro new file mode 100644 index 000000000..a2df83d4e --- /dev/null +++ b/docs/src/pages/team.astro @@ -0,0 +1,122 @@ +--- +import Layout from "../layouts/Layout.astro"; +import { getCollection } from "astro:content"; +import UserSocialHandles from "../components/UserSocialHandles.astro"; +const team = await getCollection("team"); + +const pmc = team.filter((member) => member.data.roles.includes("pmc")); +const committers = team.filter((member) => + member.data.roles.includes("committer"), +); + +const title = "Team"; +const description = + "DiceDB is a community-driven project, and every individual who has contributed their time, expertise, and patches has played a crucial role in shaping its success."; +--- + + +
    +
    +

    Team

    +

    + DiceDB is a community-driven project, and every individual who has + contributed their time, expertise, and patches has played a crucial role + in shaping its success. Our team is organized into three key groups: +

      +
    • Project Management Committee (PMC)
    • +
    • Committers
    • +
    • Contributors
    • +
    +

    +
    +

    Project Management Committee

    +

    + Our Project Management Committee (PMC) steers the vision, ensuring + DiceDB stays aligned with its mission of building a reactive, scalable, + highly available, unified cache optimized for modern hardware. +

    +
    +
    + { + pmc.map((member) => ( +
    + {member.data.name} +

    + {member.data.name.split(" ").map((char) => ( +
    {char}
    + ))} +

    + +
    + )) + } +
    +
    +

    Committers

    +

    + The committers, with their deep expertise, safeguard the stability and + quality of the codebase along with working on critical features and + improvements. +

    +
    + { + committers.map((member) => ( +
    + {member.data.name} +

    {member.data.name}

    + +
    + )) + } +
    +
    +

    Contributors

    +

    + The contributors, with their fresh ideas and energy, push the boundaries + of what's possible. Each project has its own contributors, and you can + find them below. We regularly evaluate the contributions and promote + contributors to committers. +

    +

    +
    +
    +
    + + diff --git a/docs/src/pages/updates/[slug].astro b/docs/src/pages/updates/[slug].astro new file mode 100644 index 000000000..7fb5498b4 --- /dev/null +++ b/docs/src/pages/updates/[slug].astro @@ -0,0 +1,20 @@ +--- +import { getCollection } from "astro:content"; +import BlogLayout from "../../layouts/BlogLayout.astro"; +export async function getStaticPaths() { + const updates = (await getCollection("updates")).sort( + (a, b) => new Date(b.slug).getTime() - new Date(a.slug).getTime(), + ); + return updates.map((update) => ({ + params: { slug: update.slug }, + props: { update }, + })); +} + +const { update } = Astro.props; +const { Content } = await update.render(); +--- + + + +