diff --git a/.github/workflows/services.yml b/.github/workflows/services.yml index da03a2badfe..07e7803171e 100644 --- a/.github/workflows/services.yml +++ b/.github/workflows/services.yml @@ -87,7 +87,7 @@ jobs: strategy: matrix: ## TODO: add more modules - module: [database, pay, account, minio, launchpad, exceptionmonitor] + module: [database, pay, account, minio, launchpad, exceptionmonitor, aiproxy] steps: - name: Checkout uses: actions/checkout@v3 @@ -182,7 +182,7 @@ jobs: strategy: matrix: ## TODO: add more modules - module: [database, pay, account, minio, launchpad, exceptionmonitor] + module: [database, pay, account, minio, launchpad, exceptionmonitor, aiproxy] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/service/aiproxy/Dockerfile b/service/aiproxy/Dockerfile new file mode 100644 index 00000000000..f078274204e --- /dev/null +++ b/service/aiproxy/Dockerfile @@ -0,0 +1,7 @@ +FROM gcr.io/distroless/static:nonroot +ARG TARGETARCH +COPY bin/service-aiproxy-$TARGETARCH /manager +EXPOSE 3000 +USER 65532:65532 + +ENTRYPOINT ["/manager"] \ No newline at end of file diff --git a/service/aiproxy/Makefile b/service/aiproxy/Makefile new file mode 100644 index 00000000000..fae59539d48 --- /dev/null +++ b/service/aiproxy/Makefile @@ -0,0 +1,53 @@ +IMG ?= ghcr.io/labring/sealos-aiproxy-service:latest + +# Get the currently used golang install path (in GOPATH/bin, unless GOBIN is set) +ifeq (,$(shell go env GOBIN)) +GOBIN=$(shell go env GOPATH)/bin +else +GOBIN=$(shell go env GOBIN) +endif + +# only support linux, non cgo +PLATFORMS ?= linux_arm64 linux_amd64 +GOOS=linux +GOARCH=$(shell go env GOARCH) + +.PHONY: all +all: build + +##@ General + +# The help target prints out all targets with their descriptions organized +# beneath their categories. The categories are represented by '##@' and the +# target descriptions by '##'. The awk commands is responsible for reading the +# entire set of makefiles included in this invocation, looking for lines of the +# file as xyz: ## something, and then pretty-format the target and help. Then, +# if there's a line with ##@ something, that gets pretty-printed as a category. +# More info on the usage of ANSI control characters for terminal formatting: +# https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters +# More info on the awk command: +# http://linuxcommand.org/lc3_adv_awk.php + +.PHONY: help +help: ## Display this help. + @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) + +##@ Build + +.PHONY: clean +clean: + rm -f $(SERVICE_NAME) + +.PHONY: build +build: ## Build service-hub binary. + LD_FLAGS="-s -w -extldflags '-static'"; \ + CGO_ENABLED=0 GOOS=linux go build -tags "jsoniter" -ldflags "$${LD_FLAGS}" -trimpath -o bin/manager main.go + +.PHONY: docker-build +docker-build: build + mv bin/manager bin/service-aiproxy-${TARGETARCH} + docker build -t $(IMG) . + +.PHONY: docker-push +docker-push: + docker push $(IMG) diff --git a/service/aiproxy/common/balance/balance.go b/service/aiproxy/common/balance/balance.go new file mode 100644 index 00000000000..b1eac62231a --- /dev/null +++ b/service/aiproxy/common/balance/balance.go @@ -0,0 +1,14 @@ +package balance + +import "context" + +type GroupBalance interface { + GetGroupRemainBalance(ctx context.Context, group string) (float64, PostGroupConsumer, error) +} + +type PostGroupConsumer interface { + PostGroupConsume(ctx context.Context, tokenName string, usage float64) (float64, error) + GetBalance(ctx context.Context) (float64, error) +} + +var Default GroupBalance = NewMockGroupBalance() diff --git a/service/aiproxy/common/balance/mock.go b/service/aiproxy/common/balance/mock.go new file mode 100644 index 00000000000..8cb2ddc7e86 --- /dev/null +++ b/service/aiproxy/common/balance/mock.go @@ -0,0 +1,27 @@ +package balance + +import "context" + +var _ GroupBalance = (*MockGroupBalance)(nil) + +const ( + mockBalance = 10000000 +) + +type MockGroupBalance struct{} + +func NewMockGroupBalance() *MockGroupBalance { + return &MockGroupBalance{} +} + +func (q *MockGroupBalance) GetGroupRemainBalance(_ context.Context, _ string) (float64, PostGroupConsumer, error) { + return mockBalance, q, nil +} + +func (q *MockGroupBalance) PostGroupConsume(_ context.Context, _ string, usage float64) (float64, error) { + return usage, nil +} + +func (q *MockGroupBalance) GetBalance(_ context.Context) (float64, error) { + return mockBalance, nil +} diff --git a/service/aiproxy/common/balance/sealos.go b/service/aiproxy/common/balance/sealos.go new file mode 100644 index 00000000000..777fe3c222f --- /dev/null +++ b/service/aiproxy/common/balance/sealos.go @@ -0,0 +1,279 @@ +package balance + +import ( + "bytes" + "context" + "errors" + "fmt" + "math/rand/v2" + "net/http" + "time" + + "github.com/golang-jwt/jwt/v5" + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/env" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/redis/go-redis/v9" + "github.com/shopspring/decimal" +) + +const ( + defaultAccountURL = "http://account-service.account-system.svc.cluster.local:2333" + balancePrecision = 1000000 + appType = "LLM-TOKEN" + sealosRequester = "sealos-admin" + sealosGroupBalanceKey = "sealos:balance:%s" +) + +var ( + _ GroupBalance = (*Sealos)(nil) + sealosHTTPClient = &http.Client{} + decimalBalancePrecision = decimal.NewFromInt(balancePrecision) + minConsumeAmount = decimal.NewFromInt(1) + jwtToken string + sealosRedisCacheEnable = env.Bool("BALANCE_SEALOS_REDIS_CACHE_ENABLE", true) + sealosCacheExpire = 15 * time.Second +) + +type Sealos struct { + accountURL string +} + +// FIXME: 如果获取余额能成功,但是消费永远失败,需要加一个失败次数限制,如果失败次数超过一定阈值,暂停服务 +func InitSealos(jwtKey string, accountURL string) error { + token, err := newSealosToken(jwtKey) + if err != nil { + return fmt.Errorf("failed to generate sealos jwt token: %w", err) + } + jwtToken = token + Default = NewSealos(accountURL) + return nil +} + +func NewSealos(accountURL string) *Sealos { + if accountURL == "" { + accountURL = defaultAccountURL + } + return &Sealos{accountURL: accountURL} +} + +type sealosClaims struct { + Requester string `json:"requester"` + jwt.RegisteredClaims +} + +func newSealosToken(key string) (string, error) { + claims := &sealosClaims{ + Requester: sealosRequester, + RegisteredClaims: jwt.RegisteredClaims{ + NotBefore: jwt.NewNumericDate(time.Now()), + }, + } + return jwt.NewWithClaims(jwt.SigningMethodHS256, claims).SignedString(conv.StringToBytes(key)) +} + +type sealosGetGroupBalanceResp struct { + UserUID string `json:"userUID"` + Error string `json:"error"` + Balance int64 `json:"balance"` +} + +type sealosPostGroupConsumeReq struct { + Namespace string `json:"namespace"` + AppType string `json:"appType"` + AppName string `json:"appName"` + UserUID string `json:"userUID"` + Amount int64 `json:"amount"` +} + +type sealosPostGroupConsumeResp struct { + Error string `json:"error"` +} + +type sealosCache struct { + UserUID string `redis:"u"` + Balance int64 `redis:"b"` +} + +func cacheSetGroupBalance(ctx context.Context, group string, balance int64, userUID string) error { + if !common.RedisEnabled || !sealosRedisCacheEnable { + return nil + } + pipe := common.RDB.Pipeline() + pipe.HSet(ctx, fmt.Sprintf(sealosGroupBalanceKey, group), sealosCache{ + Balance: balance, + UserUID: userUID, + }) + expireTime := sealosCacheExpire + time.Duration(rand.Int64N(10)-5)*time.Second + pipe.Expire(ctx, fmt.Sprintf(sealosGroupBalanceKey, group), expireTime) + _, err := pipe.Exec(ctx) + return err +} + +func cacheGetGroupBalance(ctx context.Context, group string) (*sealosCache, error) { + if !common.RedisEnabled || !sealosRedisCacheEnable { + return nil, redis.Nil + } + var cache sealosCache + if err := common.RDB.HGetAll(ctx, fmt.Sprintf(sealosGroupBalanceKey, group)).Scan(&cache); err != nil { + return nil, err + } + return &cache, nil +} + +var decreaseGroupBalanceScript = redis.NewScript(` + local balance = redis.call("HGet", KEYS[1], "balance") + if balance == false then + return redis.status_reply("ok") + end + redis.call("HSet", KEYS[1], "balance", balance - ARGV[1]) + return redis.status_reply("ok") +`) + +func cacheDecreaseGroupBalance(ctx context.Context, group string, amount int64) error { + if !common.RedisEnabled || !sealosRedisCacheEnable { + return nil + } + return decreaseGroupBalanceScript.Run(ctx, common.RDB, []string{fmt.Sprintf(sealosGroupBalanceKey, group)}, amount).Err() +} + +// GroupBalance interface implementation +func (s *Sealos) GetGroupRemainBalance(ctx context.Context, group string) (float64, PostGroupConsumer, error) { + if cache, err := cacheGetGroupBalance(ctx, group); err == nil && cache.UserUID != "" { + return decimal.NewFromInt(cache.Balance).Div(decimalBalancePrecision).InexactFloat64(), + newSealosPostGroupConsumer(s.accountURL, group, cache.UserUID, cache.Balance), nil + } else if err != nil && !errors.Is(err, redis.Nil) { + logger.Errorf(ctx, "get group (%s) balance cache failed: %s", group, err) + } + + ctx, cancel := context.WithTimeout(ctx, 5*time.Second) + defer cancel() + + balance, userUID, err := s.fetchBalanceFromAPI(ctx, group) + if err != nil { + return 0, nil, err + } + + if err := cacheSetGroupBalance(ctx, group, balance, userUID); err != nil { + logger.Errorf(ctx, "set group (%s) balance cache failed: %s", group, err) + } + + return decimal.NewFromInt(balance).Div(decimalBalancePrecision).InexactFloat64(), + newSealosPostGroupConsumer(s.accountURL, group, userUID, balance), nil +} + +func (s *Sealos) fetchBalanceFromAPI(ctx context.Context, group string) (balance int64, userUID string, err error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, + fmt.Sprintf("%s/admin/v1alpha1/account-with-workspace?namespace=%s", s.accountURL, group), nil) + if err != nil { + return 0, "", err + } + + req.Header.Set("Authorization", "Bearer "+jwtToken) + resp, err := sealosHTTPClient.Do(req) + if err != nil { + return 0, "", err + } + defer resp.Body.Close() + + var sealosResp sealosGetGroupBalanceResp + if err := json.NewDecoder(resp.Body).Decode(&sealosResp); err != nil { + return 0, "", err + } + + if sealosResp.Error != "" { + logger.Errorf(ctx, "get group (%s) balance failed: %s", group, sealosResp.Error) + return 0, "", fmt.Errorf("get group (%s) balance failed", group) + } + + if resp.StatusCode != http.StatusOK { + return 0, "", fmt.Errorf("get group (%s) balance failed with status code %d", group, resp.StatusCode) + } + + return sealosResp.Balance, sealosResp.UserUID, nil +} + +type SealosPostGroupConsumer struct { + accountURL string + group string + uid string + balance int64 +} + +func newSealosPostGroupConsumer(accountURL, group, uid string, balance int64) *SealosPostGroupConsumer { + return &SealosPostGroupConsumer{ + accountURL: accountURL, + group: group, + uid: uid, + balance: balance, + } +} + +func (s *SealosPostGroupConsumer) GetBalance(_ context.Context) (float64, error) { + return decimal.NewFromInt(s.balance).Div(decimalBalancePrecision).InexactFloat64(), nil +} + +func (s *SealosPostGroupConsumer) PostGroupConsume(ctx context.Context, tokenName string, usage float64) (float64, error) { + amount := s.calculateAmount(usage) + + if err := cacheDecreaseGroupBalance(ctx, s.group, amount.IntPart()); err != nil { + logger.Errorf(ctx, "decrease group (%s) balance cache failed: %s", s.group, err) + } + + if err := s.postConsume(ctx, amount.IntPart(), tokenName); err != nil { + return 0, err + } + + return amount.Div(decimalBalancePrecision).InexactFloat64(), nil +} + +func (s *SealosPostGroupConsumer) calculateAmount(usage float64) decimal.Decimal { + amount := decimal.NewFromFloat(usage).Mul(decimalBalancePrecision).Ceil() + if amount.LessThan(minConsumeAmount) { + amount = minConsumeAmount + } + return amount +} + +func (s *SealosPostGroupConsumer) postConsume(ctx context.Context, amount int64, tokenName string) error { + reqBody, err := json.Marshal(sealosPostGroupConsumeReq{ + Namespace: s.group, + Amount: amount, + AppType: appType, + AppName: tokenName, + UserUID: s.uid, + }) + if err != nil { + return err + } + + req, err := http.NewRequestWithContext(ctx, + http.MethodPost, + s.accountURL+"/admin/v1alpha1/charge-billing", + bytes.NewBuffer(reqBody)) + if err != nil { + return err + } + + req.Header.Set("Authorization", "Bearer "+jwtToken) + resp, err := sealosHTTPClient.Do(req) + if err != nil { + return fmt.Errorf("post group (%s) consume failed: %w", s.group, err) + } + defer resp.Body.Close() + + var sealosResp sealosPostGroupConsumeResp + if err := json.NewDecoder(resp.Body).Decode(&sealosResp); err != nil { + return fmt.Errorf("post group (%s) consume failed: %w", s.group, err) + } + + if resp.StatusCode != http.StatusOK { + logger.Errorf(ctx, "group (%s) consume failed with status code %d: %s", + s.group, resp.StatusCode, sealosResp.Error) + return fmt.Errorf("group (%s) consume failed with status code %d", s.group, resp.StatusCode) + } + + return nil +} diff --git a/service/aiproxy/common/client/init.go b/service/aiproxy/common/client/init.go new file mode 100644 index 00000000000..37b040c441f --- /dev/null +++ b/service/aiproxy/common/client/init.go @@ -0,0 +1,63 @@ +package client + +import ( + "fmt" + "net/http" + "net/url" + "time" + + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/common/logger" +) + +var ( + HTTPClient *http.Client + ImpatientHTTPClient *http.Client + UserContentRequestHTTPClient *http.Client +) + +func Init() { + if config.UserContentRequestProxy != "" { + logger.SysLog(fmt.Sprintf("using %s as proxy to fetch user content", config.UserContentRequestProxy)) + proxyURL, err := url.Parse(config.UserContentRequestProxy) + if err != nil { + logger.FatalLog("USER_CONTENT_REQUEST_PROXY set but invalid: " + config.UserContentRequestProxy) + } + transport := &http.Transport{ + Proxy: http.ProxyURL(proxyURL), + } + UserContentRequestHTTPClient = &http.Client{ + Transport: transport, + Timeout: time.Second * time.Duration(config.UserContentRequestTimeout), + } + } else { + UserContentRequestHTTPClient = &http.Client{} + } + var transport http.RoundTripper + if config.RelayProxy != "" { + logger.SysLog(fmt.Sprintf("using %s as api relay proxy", config.RelayProxy)) + proxyURL, err := url.Parse(config.RelayProxy) + if err != nil { + logger.FatalLog("USER_CONTENT_REQUEST_PROXY set but invalid: " + config.UserContentRequestProxy) + } + transport = &http.Transport{ + Proxy: http.ProxyURL(proxyURL), + } + } + + if config.RelayTimeout == 0 { + HTTPClient = &http.Client{ + Transport: transport, + } + } else { + HTTPClient = &http.Client{ + Timeout: time.Duration(config.RelayTimeout) * time.Second, + Transport: transport, + } + } + + ImpatientHTTPClient = &http.Client{ + Timeout: 5 * time.Second, + Transport: transport, + } +} diff --git a/service/aiproxy/common/config/config.go b/service/aiproxy/common/config/config.go new file mode 100644 index 00000000000..94991567cd6 --- /dev/null +++ b/service/aiproxy/common/config/config.go @@ -0,0 +1,188 @@ +package config + +import ( + "os" + "strconv" + "sync" + "sync/atomic" + "time" + + "github.com/labring/sealos/service/aiproxy/common/env" +) + +var ( + OptionMap map[string]string + OptionMapRWMutex sync.RWMutex +) + +var ( + DebugEnabled, _ = strconv.ParseBool(os.Getenv("DEBUG")) + DebugSQLEnabled, _ = strconv.ParseBool(os.Getenv("DEBUG_SQL")) +) + +var ( + // 当测试或请求的时候发生错误是否自动禁用渠道 + automaticDisableChannelEnabled atomic.Bool + // 当测试成功是否自动启用渠道 + automaticEnableChannelWhenTestSucceedEnabled atomic.Bool + // 是否近似计算token + approximateTokenEnabled atomic.Bool + // 重试次数 + retryTimes atomic.Int64 + // 暂停服务 + disableServe atomic.Bool +) + +func GetDisableServe() bool { + return disableServe.Load() +} + +func SetDisableServe(disabled bool) { + disableServe.Store(disabled) +} + +func GetAutomaticDisableChannelEnabled() bool { + return automaticDisableChannelEnabled.Load() +} + +func SetAutomaticDisableChannelEnabled(enabled bool) { + automaticDisableChannelEnabled.Store(enabled) +} + +func GetAutomaticEnableChannelWhenTestSucceedEnabled() bool { + return automaticEnableChannelWhenTestSucceedEnabled.Load() +} + +func SetAutomaticEnableChannelWhenTestSucceedEnabled(enabled bool) { + automaticEnableChannelWhenTestSucceedEnabled.Store(enabled) +} + +func GetApproximateTokenEnabled() bool { + return approximateTokenEnabled.Load() +} + +func SetApproximateTokenEnabled(enabled bool) { + approximateTokenEnabled.Store(enabled) +} + +func GetRetryTimes() int64 { + return retryTimes.Load() +} + +func SetRetryTimes(times int64) { + retryTimes.Store(times) +} + +var DisableAutoMigrateDB = os.Getenv("DISABLE_AUTO_MIGRATE_DB") == "true" + +var RelayTimeout = env.Int("RELAY_TIMEOUT", 0) // unit is second + +var RateLimitKeyExpirationDuration = 20 * time.Minute + +var ( + // 是否根据请求成功率禁用渠道,默认不开启 + EnableMetric = env.Bool("ENABLE_METRIC", false) + // 指标队列大小 + MetricQueueSize = env.Int("METRIC_QUEUE_SIZE", 10) + // 请求成功率阈值,默认80% + MetricSuccessRateThreshold = env.Float64("METRIC_SUCCESS_RATE_THRESHOLD", 0.8) + // 请求成功率指标队列大小 + MetricSuccessChanSize = env.Int("METRIC_SUCCESS_CHAN_SIZE", 1024) + // 请求失败率指标队列大小 + MetricFailChanSize = env.Int("METRIC_FAIL_CHAN_SIZE", 128) +) + +var OnlyOneLogFile = env.Bool("ONLY_ONE_LOG_FILE", false) + +var ( + // 代理地址 + RelayProxy = env.String("RELAY_PROXY", "") + // 用户内容请求代理地址 + UserContentRequestProxy = env.String("USER_CONTENT_REQUEST_PROXY", "") + // 用户内容请求超时时间,单位为秒 + UserContentRequestTimeout = env.Int("USER_CONTENT_REQUEST_TIMEOUT", 30) +) + +var AdminKey = env.String("ADMIN_KEY", "") + +var ( + globalAPIRateLimitNum atomic.Int64 + defaultChannelModels atomic.Value + defaultChannelModelMapping atomic.Value + defaultGroupQPM atomic.Int64 + groupMaxTokenNum atomic.Int32 +) + +func init() { + defaultChannelModels.Store(make(map[int][]string)) + defaultChannelModelMapping.Store(make(map[int]map[string]string)) +} + +// 全局qpm,不是根据ip限制,而是所有请求共享一个qpm +func GetGlobalAPIRateLimitNum() int64 { + return globalAPIRateLimitNum.Load() +} + +func SetGlobalAPIRateLimitNum(num int64) { + globalAPIRateLimitNum.Store(num) +} + +// group默认qpm,如果group没有设置qpm,则使用该qpm +func GetDefaultGroupQPM() int64 { + return defaultGroupQPM.Load() +} + +func SetDefaultGroupQPM(qpm int64) { + defaultGroupQPM.Store(qpm) +} + +func GetDefaultChannelModels() map[int][]string { + return defaultChannelModels.Load().(map[int][]string) +} + +func SetDefaultChannelModels(models map[int][]string) { + defaultChannelModels.Store(models) +} + +func GetDefaultChannelModelMapping() map[int]map[string]string { + return defaultChannelModelMapping.Load().(map[int]map[string]string) +} + +func SetDefaultChannelModelMapping(mapping map[int]map[string]string) { + defaultChannelModelMapping.Store(mapping) +} + +// 那个group最多可创建的token数量,0表示不限制 +func GetGroupMaxTokenNum() int32 { + return groupMaxTokenNum.Load() +} + +func SetGroupMaxTokenNum(num int32) { + groupMaxTokenNum.Store(num) +} + +var ( + geminiSafetySetting atomic.Value + geminiVersion atomic.Value +) + +func init() { + geminiSafetySetting.Store("BLOCK_NONE") + geminiVersion.Store("v1") +} + +func GetGeminiSafetySetting() string { + return geminiSafetySetting.Load().(string) +} + +func SetGeminiSafetySetting(setting string) { + geminiSafetySetting.Store(setting) +} + +func GetGeminiVersion() string { + return geminiVersion.Load().(string) +} + +func SetGeminiVersion(version string) { + geminiVersion.Store(version) +} diff --git a/service/aiproxy/common/constants.go b/service/aiproxy/common/constants.go new file mode 100644 index 00000000000..65d61413e40 --- /dev/null +++ b/service/aiproxy/common/constants.go @@ -0,0 +1,5 @@ +package common + +import "time" + +var StartTime = time.Now().UnixMilli() // unit: millisecond diff --git a/service/aiproxy/common/conv/any.go b/service/aiproxy/common/conv/any.go new file mode 100644 index 00000000000..ed6de0d1c12 --- /dev/null +++ b/service/aiproxy/common/conv/any.go @@ -0,0 +1,23 @@ +package conv + +import "unsafe" + +func AsString(v any) string { + str, _ := v.(string) + return str +} + +// The change of bytes will cause the change of string synchronously +func BytesToString(b []byte) string { + return *(*string)(unsafe.Pointer(&b)) +} + +// If string is readonly, modifying bytes will cause panic +func StringToBytes(s string) []byte { + return *(*[]byte)(unsafe.Pointer( + &struct { + string + Cap int + }{s, len(s)}, + )) +} diff --git a/service/aiproxy/common/ctxkey/key.go b/service/aiproxy/common/ctxkey/key.go new file mode 100644 index 00000000000..c2adec1a4e6 --- /dev/null +++ b/service/aiproxy/common/ctxkey/key.go @@ -0,0 +1,24 @@ +package ctxkey + +const ( + Config = "config" + Status = "status" + Channel = "channel" + ChannelID = "channel_id" + APIKey = "api_key" + SpecificChannelID = "specific_channel_id" + RequestModel = "request_model" + ConvertedRequest = "converted_request" + OriginalModel = "original_model" + Group = "group" + GroupQPM = "group_qpm" + ModelMapping = "model_mapping" + ChannelName = "channel_name" + TokenID = "token_id" + TokenName = "token_name" + TokenUsedAmount = "token_used_amount" + TokenQuota = "token_quota" + BaseURL = "base_url" + AvailableModels = "available_models" + KeyRequestBody = "key_request_body" +) diff --git a/service/aiproxy/common/custom-event.go b/service/aiproxy/common/custom-event.go new file mode 100644 index 00000000000..a7a76219fb9 --- /dev/null +++ b/service/aiproxy/common/custom-event.go @@ -0,0 +1,67 @@ +// Copyright 2014 Manu Martinez-Almeida. All rights reserved. +// Use of this source code is governed by a MIT style +// license that can be found in the LICENSE file. + +package common + +import ( + "io" + "net/http" + "strings" + + "github.com/labring/sealos/service/aiproxy/common/conv" +) + +// Server-Sent Events +// W3C Working Draft 29 October 2009 +// http://www.w3.org/TR/2009/WD-eventsource-20091029/ + +var ( + contentType = []string{"text/event-stream"} + noCache = []string{"no-cache"} +) + +var dataReplacer = strings.NewReplacer( + "\n", "\ndata:", + "\r", "\\r") + +type CustomEvent struct { + Data string + Event string + ID string + Retry uint +} + +func encode(writer io.Writer, event CustomEvent) error { + return writeData(writer, event.Data) +} + +const nn = "\n\n" + +var nnBytes = conv.StringToBytes(nn) + +func writeData(w io.Writer, data string) error { + _, err := dataReplacer.WriteString(w, data) + if err != nil { + return err + } + if strings.HasPrefix(data, "data") { + _, err := w.Write(nnBytes) + return err + } + return nil +} + +func (r CustomEvent) Render(w http.ResponseWriter) error { + r.WriteContentType(w) + return encode(w, r) +} + +func (r CustomEvent) WriteContentType(w http.ResponseWriter) { + header := w.Header() + header["Content-Type"] = contentType + + if _, exist := header["Cache-Control"]; !exist { + header["Cache-Control"] = noCache + } +} diff --git a/service/aiproxy/common/database.go b/service/aiproxy/common/database.go new file mode 100644 index 00000000000..a164266c27a --- /dev/null +++ b/service/aiproxy/common/database.go @@ -0,0 +1,16 @@ +package common + +import ( + "github.com/labring/sealos/service/aiproxy/common/env" +) + +var ( + UsingSQLite = false + UsingPostgreSQL = false + UsingMySQL = false +) + +var ( + SQLitePath = "aiproxy.db" + SQLiteBusyTimeout = env.Int("SQLITE_BUSY_TIMEOUT", 3000) +) diff --git a/service/aiproxy/common/env/helper.go b/service/aiproxy/common/env/helper.go new file mode 100644 index 00000000000..fdb9f827ac2 --- /dev/null +++ b/service/aiproxy/common/env/helper.go @@ -0,0 +1,42 @@ +package env + +import ( + "os" + "strconv" +) + +func Bool(env string, defaultValue bool) bool { + if env == "" || os.Getenv(env) == "" { + return defaultValue + } + return os.Getenv(env) == "true" +} + +func Int(env string, defaultValue int) int { + if env == "" || os.Getenv(env) == "" { + return defaultValue + } + num, err := strconv.Atoi(os.Getenv(env)) + if err != nil { + return defaultValue + } + return num +} + +func Float64(env string, defaultValue float64) float64 { + if env == "" || os.Getenv(env) == "" { + return defaultValue + } + num, err := strconv.ParseFloat(os.Getenv(env), 64) + if err != nil { + return defaultValue + } + return num +} + +func String(env string, defaultValue string) string { + if env == "" || os.Getenv(env) == "" { + return defaultValue + } + return os.Getenv(env) +} diff --git a/service/aiproxy/common/fastJSONSerializer/fastJSONSerializer.go b/service/aiproxy/common/fastJSONSerializer/fastJSONSerializer.go new file mode 100644 index 00000000000..98a55ae32cf --- /dev/null +++ b/service/aiproxy/common/fastJSONSerializer/fastJSONSerializer.go @@ -0,0 +1,43 @@ +package fastjsonserializer + +import ( + "context" + "fmt" + "reflect" + + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/conv" + + "gorm.io/gorm/schema" +) + +type JSONSerializer struct{} + +func (*JSONSerializer) Scan(ctx context.Context, field *schema.Field, dst reflect.Value, dbValue any) (err error) { + fieldValue := reflect.New(field.FieldType) + + if dbValue != nil { + var bytes []byte + switch v := dbValue.(type) { + case []byte: + bytes = v + case string: + bytes = conv.StringToBytes(v) + default: + return fmt.Errorf("failed to unmarshal JSONB value: %#v", dbValue) + } + + err = json.Unmarshal(bytes, fieldValue.Interface()) + } + + field.ReflectValueOf(ctx, dst).Set(fieldValue.Elem()) + return +} + +func (*JSONSerializer) Value(_ context.Context, _ *schema.Field, _ reflect.Value, fieldValue any) (any, error) { + return json.Marshal(fieldValue) +} + +func init() { + schema.RegisterSerializer("fastjson", new(JSONSerializer)) +} diff --git a/service/aiproxy/common/gin.go b/service/aiproxy/common/gin.go new file mode 100644 index 00000000000..f1027137533 --- /dev/null +++ b/service/aiproxy/common/gin.go @@ -0,0 +1,53 @@ +package common + +import ( + "bytes" + "fmt" + "io" + + "github.com/gin-gonic/gin" + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" +) + +func GetRequestBody(c *gin.Context) ([]byte, error) { + requestBody, ok := c.Get(ctxkey.KeyRequestBody) + if ok { + return requestBody.([]byte), nil + } + var buf []byte + var err error + defer func() { + c.Request.Body.Close() + if err == nil { + c.Request.Body = io.NopCloser(bytes.NewBuffer(buf)) + } + }() + if c.Request.ContentLength <= 0 || c.Request.Header.Get("Content-Type") != "application/json" { + buf, err = io.ReadAll(c.Request.Body) + } else { + buf = make([]byte, c.Request.ContentLength) + _, err = io.ReadFull(c.Request.Body, buf) + } + if err != nil { + return nil, fmt.Errorf("request body read failed: %w", err) + } + c.Set(ctxkey.KeyRequestBody, buf) + return buf, nil +} + +func UnmarshalBodyReusable(c *gin.Context, v any) error { + requestBody, err := GetRequestBody(c) + if err != nil { + return err + } + return json.Unmarshal(requestBody, &v) +} + +func SetEventStreamHeaders(c *gin.Context) { + c.Writer.Header().Set("Content-Type", "text/event-stream") + c.Writer.Header().Set("Cache-Control", "no-cache") + c.Writer.Header().Set("Connection", "keep-alive") + c.Writer.Header().Set("Transfer-Encoding", "chunked") + c.Writer.Header().Set("X-Accel-Buffering", "no") +} diff --git a/service/aiproxy/common/helper/helper.go b/service/aiproxy/common/helper/helper.go new file mode 100644 index 00000000000..3a8f55e58a5 --- /dev/null +++ b/service/aiproxy/common/helper/helper.go @@ -0,0 +1,40 @@ +package helper + +import ( + "fmt" + "strconv" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/random" +) + +func GenRequestID() string { + return GetTimeString() + random.GetRandomNumberString(8) +} + +func GetResponseID(c *gin.Context) string { + logID := c.GetString(string(RequestIDKey)) + return "chatcmpl-" + logID +} + +func AssignOrDefault(value string, defaultValue string) string { + if len(value) != 0 { + return value + } + return defaultValue +} + +func MessageWithRequestID(message string, id string) string { + return fmt.Sprintf("%s (request id: %s)", message, id) +} + +func String2Int(keyword string) int { + if keyword == "" { + return 0 + } + i, err := strconv.Atoi(keyword) + if err != nil { + return 0 + } + return i +} diff --git a/service/aiproxy/common/helper/key.go b/service/aiproxy/common/helper/key.go new file mode 100644 index 00000000000..bc9c949eb9c --- /dev/null +++ b/service/aiproxy/common/helper/key.go @@ -0,0 +1,7 @@ +package helper + +type Key string + +const ( + RequestIDKey Key = "X-Request-Id" +) diff --git a/service/aiproxy/common/helper/time.go b/service/aiproxy/common/helper/time.go new file mode 100644 index 00000000000..302746dbff9 --- /dev/null +++ b/service/aiproxy/common/helper/time.go @@ -0,0 +1,15 @@ +package helper + +import ( + "fmt" + "time" +) + +func GetTimestamp() int64 { + return time.Now().Unix() +} + +func GetTimeString() string { + now := time.Now() + return fmt.Sprintf("%s%d", now.Format("20060102150405"), now.UnixNano()%1e9) +} diff --git a/service/aiproxy/common/image/image.go b/service/aiproxy/common/image/image.go new file mode 100644 index 00000000000..4a768cef980 --- /dev/null +++ b/service/aiproxy/common/image/image.go @@ -0,0 +1,119 @@ +package image + +import ( + "bytes" + "context" + "encoding/base64" + "errors" + "fmt" + "image" + + // import gif decoder + _ "image/gif" + // import jpeg decoder + _ "image/jpeg" + // import png decoder + _ "image/png" + "io" + "net/http" + "regexp" + "strings" + + // import webp decoder + _ "golang.org/x/image/webp" + + "github.com/labring/sealos/service/aiproxy/common/client" +) + +// Regex to match data URL pattern +var dataURLPattern = regexp.MustCompile(`data:image/([^;]+);base64,(.*)`) + +func IsImageURL(resp *http.Response) bool { + return strings.HasPrefix(resp.Header.Get("Content-Type"), "image/") +} + +func GetImageSizeFromURL(url string) (width int, height int, err error) { + req, err := http.NewRequestWithContext(context.Background(), http.MethodGet, url, nil) + if err != nil { + return 0, 0, err + } + resp, err := client.UserContentRequestHTTPClient.Do(req) + if err != nil { + return + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return 0, 0, fmt.Errorf("status code: %d", resp.StatusCode) + } + + isImage := IsImageURL(resp) + if !isImage { + return + } + img, _, err := image.DecodeConfig(resp.Body) + if err != nil { + return + } + return img.Width, img.Height, nil +} + +func GetImageFromURL(url string) (string, string, error) { + // Check if the URL is a data URL + matches := dataURLPattern.FindStringSubmatch(url) + if len(matches) == 3 { + // URL is a data URL + return "image/" + matches[1], matches[2], nil + } + + req, err := http.NewRequestWithContext(context.Background(), http.MethodGet, url, nil) + if err != nil { + return "", "", err + } + resp, err := client.UserContentRequestHTTPClient.Do(req) + if err != nil { + return "", "", err + } + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + return "", "", fmt.Errorf("status code: %d", resp.StatusCode) + } + var buf []byte + if resp.ContentLength <= 0 { + buf, err = io.ReadAll(resp.Body) + } else { + buf = make([]byte, resp.ContentLength) + _, err = io.ReadFull(resp.Body, buf) + } + if err != nil { + return "", "", err + } + isImage := IsImageURL(resp) + if !isImage { + return "", "", errors.New("not an image") + } + return resp.Header.Get("Content-Type"), base64.StdEncoding.EncodeToString(buf), nil +} + +var reg = regexp.MustCompile(`data:image/([^;]+);base64,`) + +func GetImageSizeFromBase64(encoded string) (width int, height int, err error) { + decoded, err := base64.StdEncoding.DecodeString(reg.ReplaceAllString(encoded, "")) + if err != nil { + return 0, 0, err + } + + img, _, err := image.DecodeConfig(bytes.NewReader(decoded)) + if err != nil { + return 0, 0, err + } + + return img.Width, img.Height, nil +} + +func GetImageSize(image string) (width int, height int, err error) { + if strings.HasPrefix(image, "data:image/") { + return GetImageSizeFromBase64(image) + } + return GetImageSizeFromURL(image) +} diff --git a/service/aiproxy/common/image/image_test.go b/service/aiproxy/common/image/image_test.go new file mode 100644 index 00000000000..7dad94a0c5a --- /dev/null +++ b/service/aiproxy/common/image/image_test.go @@ -0,0 +1,176 @@ +package image_test + +import ( + "encoding/base64" + "image" + _ "image/gif" + _ "image/jpeg" + _ "image/png" + "io" + "net/http" + "strconv" + "strings" + "testing" + + "github.com/labring/sealos/service/aiproxy/common/client" + + img "github.com/labring/sealos/service/aiproxy/common/image" + + "github.com/stretchr/testify/assert" + _ "golang.org/x/image/webp" +) + +type CountingReader struct { + reader io.Reader + BytesRead int +} + +func (r *CountingReader) Read(p []byte) (n int, err error) { + n, err = r.reader.Read(p) + r.BytesRead += n + return n, err +} + +var cases = []struct { + url string + format string + width int + height int +}{ + {"https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg", "jpeg", 2560, 1669}, + {"https://upload.wikimedia.org/wikipedia/commons/9/97/Basshunter_live_performances.png", "png", 4500, 2592}, + {"https://upload.wikimedia.org/wikipedia/commons/c/c6/TO_THE_ONE_SOMETHINGNESS.webp", "webp", 984, 985}, + {"https://upload.wikimedia.org/wikipedia/commons/d/d0/01_Das_Sandberg-Modell.gif", "gif", 1917, 1533}, + {"https://upload.wikimedia.org/wikipedia/commons/6/62/102Cervus.jpg", "jpeg", 270, 230}, +} + +func TestMain(m *testing.M) { + client.Init() + m.Run() +} + +func TestDecode(t *testing.T) { + // Bytes read: varies sometimes + // jpeg: 1063892 + // png: 294462 + // webp: 99529 + // gif: 956153 + // jpeg#01: 32805 + for _, c := range cases { + t.Run("Decode:"+c.format, func(t *testing.T) { + resp, err := http.Get(c.url) + assert.NoError(t, err) + defer resp.Body.Close() + reader := &CountingReader{reader: resp.Body} + img, format, err := image.Decode(reader) + assert.NoError(t, err) + size := img.Bounds().Size() + assert.Equal(t, c.format, format) + assert.Equal(t, c.width, size.X) + assert.Equal(t, c.height, size.Y) + t.Logf("Bytes read: %d", reader.BytesRead) + }) + } + + // Bytes read: + // jpeg: 4096 + // png: 4096 + // webp: 4096 + // gif: 4096 + // jpeg#01: 4096 + for _, c := range cases { + t.Run("DecodeConfig:"+c.format, func(t *testing.T) { + resp, err := http.Get(c.url) + assert.NoError(t, err) + defer resp.Body.Close() + reader := &CountingReader{reader: resp.Body} + config, format, err := image.DecodeConfig(reader) + assert.NoError(t, err) + assert.Equal(t, c.format, format) + assert.Equal(t, c.width, config.Width) + assert.Equal(t, c.height, config.Height) + t.Logf("Bytes read: %d", reader.BytesRead) + }) + } +} + +func TestBase64(t *testing.T) { + // Bytes read: + // jpeg: 1063892 + // png: 294462 + // webp: 99072 + // gif: 953856 + // jpeg#01: 32805 + for _, c := range cases { + t.Run("Decode:"+c.format, func(t *testing.T) { + resp, err := http.Get(c.url) + assert.NoError(t, err) + defer resp.Body.Close() + data, err := io.ReadAll(resp.Body) + assert.NoError(t, err) + encoded := base64.StdEncoding.EncodeToString(data) + body := base64.NewDecoder(base64.StdEncoding, strings.NewReader(encoded)) + reader := &CountingReader{reader: body} + img, format, err := image.Decode(reader) + assert.NoError(t, err) + size := img.Bounds().Size() + assert.Equal(t, c.format, format) + assert.Equal(t, c.width, size.X) + assert.Equal(t, c.height, size.Y) + t.Logf("Bytes read: %d", reader.BytesRead) + }) + } + + // Bytes read: + // jpeg: 1536 + // png: 768 + // webp: 768 + // gif: 1536 + // jpeg#01: 3840 + for _, c := range cases { + t.Run("DecodeConfig:"+c.format, func(t *testing.T) { + resp, err := http.Get(c.url) + assert.NoError(t, err) + defer resp.Body.Close() + data, err := io.ReadAll(resp.Body) + assert.NoError(t, err) + encoded := base64.StdEncoding.EncodeToString(data) + body := base64.NewDecoder(base64.StdEncoding, strings.NewReader(encoded)) + reader := &CountingReader{reader: body} + config, format, err := image.DecodeConfig(reader) + assert.NoError(t, err) + assert.Equal(t, c.format, format) + assert.Equal(t, c.width, config.Width) + assert.Equal(t, c.height, config.Height) + t.Logf("Bytes read: %d", reader.BytesRead) + }) + } +} + +func TestGetImageSize(t *testing.T) { + for i, c := range cases { + t.Run("Decode:"+strconv.Itoa(i), func(t *testing.T) { + width, height, err := img.GetImageSize(c.url) + assert.NoError(t, err) + assert.Equal(t, c.width, width) + assert.Equal(t, c.height, height) + }) + } +} + +func TestGetImageSizeFromBase64(t *testing.T) { + for i, c := range cases { + t.Run("Decode:"+strconv.Itoa(i), func(t *testing.T) { + resp, err := http.Get(c.url) + assert.NoError(t, err) + defer resp.Body.Close() + data, err := io.ReadAll(resp.Body) + assert.NoError(t, err) + encoded := base64.StdEncoding.EncodeToString(data) + width, height, err := img.GetImageSizeFromBase64(encoded) + assert.NoError(t, err) + assert.Equal(t, c.width, width) + assert.Equal(t, c.height, height) + }) + } +} diff --git a/service/aiproxy/common/init.go b/service/aiproxy/common/init.go new file mode 100644 index 00000000000..7d26db36d6c --- /dev/null +++ b/service/aiproxy/common/init.go @@ -0,0 +1,37 @@ +package common + +import ( + "flag" + "log" + "os" + "path/filepath" + + "github.com/labring/sealos/service/aiproxy/common/logger" +) + +var ( + Port = flag.Int("port", 3000, "the listening port") + LogDir = flag.String("log-dir", "", "specify the log directory") +) + +func Init() { + flag.Parse() + + if os.Getenv("SQLITE_PATH") != "" { + SQLitePath = os.Getenv("SQLITE_PATH") + } + if *LogDir != "" { + var err error + *LogDir, err = filepath.Abs(*LogDir) + if err != nil { + log.Fatal(err) + } + if _, err := os.Stat(*LogDir); os.IsNotExist(err) { + err = os.Mkdir(*LogDir, 0o777) + if err != nil { + log.Fatal(err) + } + } + logger.LogDir = *LogDir + } +} diff --git a/service/aiproxy/common/logger/constants.go b/service/aiproxy/common/logger/constants.go new file mode 100644 index 00000000000..49df31ec715 --- /dev/null +++ b/service/aiproxy/common/logger/constants.go @@ -0,0 +1,3 @@ +package logger + +var LogDir string diff --git a/service/aiproxy/common/logger/logger.go b/service/aiproxy/common/logger/logger.go new file mode 100644 index 00000000000..ae777610f94 --- /dev/null +++ b/service/aiproxy/common/logger/logger.go @@ -0,0 +1,128 @@ +package logger + +import ( + "context" + "fmt" + "io" + "log" + "os" + "path/filepath" + "sync" + "time" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/common/helper" +) + +const ( + loggerDEBUG = "DEBUG" + loggerINFO = "INFO" + loggerWarn = "WARN" + loggerError = "ERR" +) + +var setupLogOnce sync.Once + +func SetupLogger() { + setupLogOnce.Do(func() { + if LogDir != "" { + var logPath string + if config.OnlyOneLogFile { + logPath = filepath.Join(LogDir, "aiproxy.log") + } else { + logPath = filepath.Join(LogDir, fmt.Sprintf("aiproxy-%s.log", time.Now().Format("20060102"))) + } + fd, err := os.OpenFile(logPath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o644) + if err != nil { + log.Fatal("failed to open log file") + } + gin.DefaultWriter = io.MultiWriter(os.Stdout, fd) + gin.DefaultErrorWriter = io.MultiWriter(os.Stderr, fd) + } + }) +} + +func SysLog(s string) { + t := time.Now() + _, _ = fmt.Fprintf(gin.DefaultWriter, "[SYS] %v | %s \n", t.Format("2006/01/02 - 15:04:05"), s) +} + +func SysLogf(format string, a ...any) { + SysLog(fmt.Sprintf(format, a...)) +} + +func SysDebug(s string) { + if config.DebugEnabled { + SysLog(s) + } +} + +func SysDebugf(format string, a ...any) { + if config.DebugEnabled { + SysLogf(format, a...) + } +} + +func SysError(s string) { + t := time.Now() + _, _ = fmt.Fprintf(gin.DefaultErrorWriter, "[SYS] %v | %s \n", t.Format("2006/01/02 - 15:04:05"), s) +} + +func SysErrorf(format string, a ...any) { + SysError(fmt.Sprintf(format, a...)) +} + +func Debug(ctx context.Context, msg string) { + if config.DebugEnabled { + logHelper(ctx, loggerDEBUG, msg) + } +} + +func Info(ctx context.Context, msg string) { + logHelper(ctx, loggerINFO, msg) +} + +func Warn(ctx context.Context, msg string) { + logHelper(ctx, loggerWarn, msg) +} + +func Error(ctx context.Context, msg string) { + logHelper(ctx, loggerError, msg) +} + +func Debugf(ctx context.Context, format string, a ...any) { + Debug(ctx, fmt.Sprintf(format, a...)) +} + +func Infof(ctx context.Context, format string, a ...any) { + Info(ctx, fmt.Sprintf(format, a...)) +} + +func Warnf(ctx context.Context, format string, a ...any) { + Warn(ctx, fmt.Sprintf(format, a...)) +} + +func Errorf(ctx context.Context, format string, a ...any) { + Error(ctx, fmt.Sprintf(format, a...)) +} + +func logHelper(ctx context.Context, level string, msg string) { + writer := gin.DefaultErrorWriter + if level == loggerINFO { + writer = gin.DefaultWriter + } + id := ctx.Value(helper.RequestIDKey) + if id == nil { + id = helper.GenRequestID() + } + now := time.Now() + _, _ = fmt.Fprintf(writer, "[%s] %v | %s | %s \n", level, now.Format("2006/01/02 - 15:04:05"), id, msg) + SetupLogger() +} + +func FatalLog(v ...any) { + t := time.Now() + _, _ = fmt.Fprintf(gin.DefaultErrorWriter, "[FATAL] %v | %v \n", t.Format("2006/01/02 - 15:04:05"), v) + os.Exit(1) +} diff --git a/service/aiproxy/common/network/ip.go b/service/aiproxy/common/network/ip.go new file mode 100644 index 00000000000..cb335ad642a --- /dev/null +++ b/service/aiproxy/common/network/ip.go @@ -0,0 +1,53 @@ +package network + +import ( + "context" + "fmt" + "net" + "strings" + + "github.com/labring/sealos/service/aiproxy/common/logger" +) + +func splitSubnets(subnets string) []string { + res := strings.Split(subnets, ",") + for i := 0; i < len(res); i++ { + res[i] = strings.TrimSpace(res[i]) + } + return res +} + +func isValidSubnet(subnet string) error { + _, _, err := net.ParseCIDR(subnet) + if err != nil { + return fmt.Errorf("failed to parse subnet: %w", err) + } + return nil +} + +func isIPInSubnet(ctx context.Context, ip string, subnet string) bool { + _, ipNet, err := net.ParseCIDR(subnet) + if err != nil { + logger.Errorf(ctx, "failed to parse subnet: %s", err.Error()) + return false + } + return ipNet.Contains(net.ParseIP(ip)) +} + +func IsValidSubnets(subnets string) error { + for _, subnet := range splitSubnets(subnets) { + if err := isValidSubnet(subnet); err != nil { + return err + } + } + return nil +} + +func IsIPInSubnets(ctx context.Context, ip string, subnets string) bool { + for _, subnet := range splitSubnets(subnets) { + if isIPInSubnet(ctx, ip, subnet) { + return true + } + } + return false +} diff --git a/service/aiproxy/common/network/ip_test.go b/service/aiproxy/common/network/ip_test.go new file mode 100644 index 00000000000..24a92d74f38 --- /dev/null +++ b/service/aiproxy/common/network/ip_test.go @@ -0,0 +1,19 @@ +package network + +import ( + "context" + "testing" + + "github.com/smartystreets/goconvey/convey" +) + +func TestIsIpInSubnet(t *testing.T) { + ctx := context.Background() + ip1 := "192.168.0.5" + ip2 := "125.216.250.89" + subnet := "192.168.0.0/24" + convey.Convey("TestIsIpInSubnet", t, func() { + convey.So(isIPInSubnet(ctx, ip1, subnet), convey.ShouldBeTrue) + convey.So(isIPInSubnet(ctx, ip2, subnet), convey.ShouldBeFalse) + }) +} diff --git a/service/aiproxy/common/random/main.go b/service/aiproxy/common/random/main.go new file mode 100644 index 00000000000..79ba35e39a7 --- /dev/null +++ b/service/aiproxy/common/random/main.go @@ -0,0 +1,57 @@ +package random + +import ( + "math/rand/v2" + "strings" + + "github.com/google/uuid" + "github.com/labring/sealos/service/aiproxy/common/conv" +) + +func GetUUID() string { + code := uuid.New().String() + code = strings.Replace(code, "-", "", -1) + return code +} + +const ( + keyChars = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" + keyNumbers = "0123456789" +) + +func GenerateKey() string { + key := make([]byte, 48) + for i := 0; i < 16; i++ { + key[i] = keyChars[rand.IntN(len(keyChars))] + } + uuid := GetUUID() + for i := 0; i < 32; i++ { + c := uuid[i] + if i%2 == 0 && c >= 'a' && c <= 'z' { + c = c - 'a' + 'A' + } + key[i+16] = c + } + return conv.BytesToString(key) +} + +func GetRandomString(length int) string { + key := make([]byte, length) + for i := 0; i < length; i++ { + key[i] = keyChars[rand.IntN(len(keyChars))] + } + return conv.BytesToString(key) +} + +func GetRandomNumberString(length int) string { + key := make([]byte, length) + for i := 0; i < length; i++ { + key[i] = keyNumbers[rand.IntN(len(keyNumbers))] + } + return conv.BytesToString(key) +} + +// RandRange returns a random number between min and max (max is not included) +func RandRange(_min, _max int) int { + return _min + rand.IntN(_max-_min) +} diff --git a/service/aiproxy/common/rate-limit.go b/service/aiproxy/common/rate-limit.go new file mode 100644 index 00000000000..a94b6496fc2 --- /dev/null +++ b/service/aiproxy/common/rate-limit.go @@ -0,0 +1,93 @@ +package common + +import ( + "sync" + "time" +) + +type InMemoryRateLimiter struct { + store map[string]*RateLimitWindow + mutex sync.RWMutex + expirationDuration time.Duration +} + +type RateLimitWindow struct { + timestamps []int64 + lastAccess int64 +} + +func (l *InMemoryRateLimiter) Init(expirationDuration time.Duration) { + if l.store == nil { + l.mutex.Lock() + if l.store == nil { + l.store = make(map[string]*RateLimitWindow) + l.expirationDuration = expirationDuration + if expirationDuration > 0 { + go l.clearExpiredItems() + } + } + l.mutex.Unlock() + } +} + +func (l *InMemoryRateLimiter) clearExpiredItems() { + ticker := time.NewTicker(l.expirationDuration) + defer ticker.Stop() + + for range ticker.C { + l.mutex.Lock() + now := time.Now().Unix() + for key, window := range l.store { + if now-window.lastAccess > int64(l.expirationDuration.Seconds()) { + delete(l.store, key) + } + } + l.mutex.Unlock() + } +} + +// Request parameter duration's unit is seconds +func (l *InMemoryRateLimiter) Request(key string, maxRequestNum int, duration time.Duration) bool { + now := time.Now().Unix() + cutoff := now - int64(duration.Seconds()) + + l.mutex.RLock() + window, exists := l.store[key] + l.mutex.RUnlock() + + if !exists { + l.mutex.Lock() + window = &RateLimitWindow{ + timestamps: make([]int64, 0, maxRequestNum), + lastAccess: now, + } + l.store[key] = window + window.timestamps = append(window.timestamps, now) + l.mutex.Unlock() + return true + } + + l.mutex.Lock() + defer l.mutex.Unlock() + + // Update last access time + window.lastAccess = now + + // Remove expired timestamps + idx := 0 + for i, ts := range window.timestamps { + if ts > cutoff { + idx = i + break + } + } + window.timestamps = window.timestamps[idx:] + + // Check if we can add a new request + if len(window.timestamps) < maxRequestNum { + window.timestamps = append(window.timestamps, now) + return true + } + + return false +} diff --git a/service/aiproxy/common/redis.go b/service/aiproxy/common/redis.go new file mode 100644 index 00000000000..074fc84f452 --- /dev/null +++ b/service/aiproxy/common/redis.go @@ -0,0 +1,54 @@ +package common + +import ( + "context" + "os" + "time" + + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/redis/go-redis/v9" +) + +var ( + RDB *redis.Client + RedisEnabled = false +) + +// InitRedisClient This function is called after init() +func InitRedisClient() (err error) { + if os.Getenv("REDIS_CONN_STRING") == "" { + logger.SysLog("REDIS_CONN_STRING not set, Redis is not enabled") + return nil + } + RedisEnabled = true + logger.SysLog("Redis is enabled") + opt, err := redis.ParseURL(os.Getenv("REDIS_CONN_STRING")) + if err != nil { + logger.FatalLog("failed to parse Redis connection string: " + err.Error()) + } + RDB = redis.NewClient(opt) + + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + + _, err = RDB.Ping(ctx).Result() + if err != nil { + logger.FatalLog("Redis ping test failed: " + err.Error()) + } + return err +} + +func RedisSet(key string, value string, expiration time.Duration) error { + ctx := context.Background() + return RDB.Set(ctx, key, value, expiration).Err() +} + +func RedisGet(key string) (string, error) { + ctx := context.Background() + return RDB.Get(ctx, key).Result() +} + +func RedisDel(key string) error { + ctx := context.Background() + return RDB.Del(ctx, key).Err() +} diff --git a/service/aiproxy/common/render/render.go b/service/aiproxy/common/render/render.go new file mode 100644 index 00000000000..9a7d0fe0d68 --- /dev/null +++ b/service/aiproxy/common/render/render.go @@ -0,0 +1,33 @@ +package render + +import ( + "fmt" + "strings" + + "github.com/gin-gonic/gin" + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/conv" +) + +func StringData(c *gin.Context, str string) { + str = strings.TrimPrefix(str, "data:") + // str = strings.TrimSuffix(str, "\r") + c.Render(-1, common.CustomEvent{Data: "data: " + strings.TrimSpace(str)}) + c.Writer.Flush() +} + +func ObjectData(c *gin.Context, object any) error { + jsonData, err := json.Marshal(object) + if err != nil { + return fmt.Errorf("error marshalling object: %w", err) + } + StringData(c, conv.BytesToString(jsonData)) + return nil +} + +const DONE = "[DONE]" + +func Done(c *gin.Context) { + StringData(c, DONE) +} diff --git a/service/aiproxy/controller/channel-billing.go b/service/aiproxy/controller/channel-billing.go new file mode 100644 index 00000000000..dcfd836b1f7 --- /dev/null +++ b/service/aiproxy/controller/channel-billing.go @@ -0,0 +1,412 @@ +package controller + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "strconv" + "time" + + json "github.com/json-iterator/go" + + "github.com/labring/sealos/service/aiproxy/common/balance" + "github.com/labring/sealos/service/aiproxy/common/client" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/model" + "github.com/labring/sealos/service/aiproxy/relay/channeltype" + + "github.com/gin-gonic/gin" +) + +// https://github.com/labring/sealos/service/aiproxy/issues/79 + +type OpenAISubscriptionResponse struct { + Object string `json:"object"` + HasPaymentMethod bool `json:"has_payment_method"` + SoftLimitUSD float64 `json:"soft_limit_usd"` + HardLimitUSD float64 `json:"hard_limit_usd"` + SystemHardLimitUSD float64 `json:"system_hard_limit_usd"` + AccessUntil int64 `json:"access_until"` +} + +type OpenAIUsageDailyCost struct { + LineItems []struct { + Name string `json:"name"` + Cost float64 `json:"cost"` + } + Timestamp float64 `json:"timestamp"` +} + +type OpenAICreditGrants struct { + Object string `json:"object"` + TotalGranted float64 `json:"total_granted"` + TotalUsed float64 `json:"total_used"` + TotalAvailable float64 `json:"total_available"` +} + +type OpenAIUsageResponse struct { + Object string `json:"object"` + // DailyCosts []OpenAIUsageDailyCost `json:"daily_costs"` + TotalUsage float64 `json:"total_usage"` // unit: 0.01 dollar +} + +type OpenAISBUsageResponse struct { + Data *struct { + Credit string `json:"credit"` + } `json:"data"` + Msg string `json:"msg"` +} + +type AIProxyUserOverviewResponse struct { + Message string `json:"message"` + ErrorCode int `json:"error_code"` + Data struct { + TotalPoints float64 `json:"totalPoints"` + } `json:"data"` + Success bool `json:"success"` +} + +type API2GPTUsageResponse struct { + Object string `json:"object"` + TotalGranted float64 `json:"total_granted"` + TotalUsed float64 `json:"total_used"` + TotalRemaining float64 `json:"total_remaining"` +} + +type APGC2DGPTUsageResponse struct { + // Grants interface{} `json:"grants"` + Object string `json:"object"` + TotalAvailable float64 `json:"total_available"` + TotalGranted float64 `json:"total_granted"` + TotalUsed float64 `json:"total_used"` +} + +type SiliconFlowUsageResponse struct { + Message string `json:"message"` + Data struct { + ID string `json:"id"` + Name string `json:"name"` + Image string `json:"image"` + Email string `json:"email"` + Balance string `json:"balance"` + Status string `json:"status"` + Introduction string `json:"introduction"` + Role string `json:"role"` + ChargeBalance string `json:"chargeBalance"` + TotalBalance string `json:"totalBalance"` + Category string `json:"category"` + IsAdmin bool `json:"isAdmin"` + } `json:"data"` + Code int `json:"code"` + Status bool `json:"status"` +} + +// GetAuthHeader get auth header +func GetAuthHeader(token string) http.Header { + h := http.Header{} + h.Add("Authorization", "Bearer "+token) + return h +} + +func GetResponseBody(method, url string, _ *model.Channel, headers http.Header) ([]byte, error) { + req, err := http.NewRequestWithContext(context.Background(), method, url, nil) + if err != nil { + return nil, err + } + for k := range headers { + req.Header.Add(k, headers.Get(k)) + } + res, err := client.HTTPClient.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return nil, fmt.Errorf("status code: %d", res.StatusCode) + } + body, err := io.ReadAll(res.Body) + if err != nil { + return nil, err + } + return body, nil +} + +func updateChannelCloseAIBalance(channel *model.Channel) (float64, error) { + url := channel.BaseURL + "/dashboard/billing/credit_grants" + body, err := GetResponseBody("GET", url, channel, GetAuthHeader(channel.Key)) + if err != nil { + return 0, err + } + response := OpenAICreditGrants{} + err = json.Unmarshal(body, &response) + if err != nil { + return 0, err + } + channel.UpdateBalance(response.TotalAvailable) + return response.TotalAvailable, nil +} + +func updateChannelOpenAISBBalance(channel *model.Channel) (float64, error) { + url := "https://api.openai-sb.com/sb-api/user/status?api_key=" + channel.Key + body, err := GetResponseBody("GET", url, channel, GetAuthHeader(channel.Key)) + if err != nil { + return 0, err + } + response := OpenAISBUsageResponse{} + err = json.Unmarshal(body, &response) + if err != nil { + return 0, err + } + if response.Data == nil { + return 0, errors.New(response.Msg) + } + balance, err := strconv.ParseFloat(response.Data.Credit, 64) + if err != nil { + return 0, err + } + channel.UpdateBalance(balance) + return balance, nil +} + +func updateChannelAIProxyBalance(channel *model.Channel) (float64, error) { + url := "https://aiproxy.io/api/report/getUserOverview" + headers := http.Header{} + headers.Add("Api-Key", channel.Key) + body, err := GetResponseBody("GET", url, channel, headers) + if err != nil { + return 0, err + } + response := AIProxyUserOverviewResponse{} + err = json.Unmarshal(body, &response) + if err != nil { + return 0, err + } + if !response.Success { + return 0, fmt.Errorf("code: %d, message: %s", response.ErrorCode, response.Message) + } + channel.UpdateBalance(response.Data.TotalPoints) + return response.Data.TotalPoints, nil +} + +func updateChannelAPI2GPTBalance(channel *model.Channel) (float64, error) { + url := "https://api.api2gpt.com/dashboard/billing/credit_grants" + body, err := GetResponseBody("GET", url, channel, GetAuthHeader(channel.Key)) + if err != nil { + return 0, err + } + response := API2GPTUsageResponse{} + err = json.Unmarshal(body, &response) + if err != nil { + return 0, err + } + channel.UpdateBalance(response.TotalRemaining) + return response.TotalRemaining, nil +} + +func updateChannelAIGC2DBalance(channel *model.Channel) (float64, error) { + url := "https://api.aigc2d.com/dashboard/billing/credit_grants" + body, err := GetResponseBody("GET", url, channel, GetAuthHeader(channel.Key)) + if err != nil { + return 0, err + } + response := APGC2DGPTUsageResponse{} + err = json.Unmarshal(body, &response) + if err != nil { + return 0, err + } + channel.UpdateBalance(response.TotalAvailable) + return response.TotalAvailable, nil +} + +func updateChannelSiliconFlowBalance(channel *model.Channel) (float64, error) { + url := "https://api.siliconflow.cn/v1/user/info" + body, err := GetResponseBody("GET", url, channel, GetAuthHeader(channel.Key)) + if err != nil { + return 0, err + } + response := SiliconFlowUsageResponse{} + err = json.Unmarshal(body, &response) + if err != nil { + return 0, err + } + if response.Code != 20000 { + return 0, fmt.Errorf("code: %d, message: %s", response.Code, response.Message) + } + balance, err := strconv.ParseFloat(response.Data.Balance, 64) + if err != nil { + return 0, err + } + channel.UpdateBalance(balance) + return balance, nil +} + +func updateChannelBalance(channel *model.Channel) (float64, error) { + baseURL := channeltype.ChannelBaseURLs[channel.Type] + if channel.BaseURL == "" { + channel.BaseURL = baseURL + } + switch channel.Type { + case channeltype.OpenAI: + baseURL = channel.BaseURL + case channeltype.Azure: + return 0, errors.New("尚未实现") + case channeltype.Custom: + baseURL = channel.BaseURL + case channeltype.CloseAI: + return updateChannelCloseAIBalance(channel) + case channeltype.OpenAISB: + return updateChannelOpenAISBBalance(channel) + case channeltype.AIProxy: + return updateChannelAIProxyBalance(channel) + case channeltype.API2GPT: + return updateChannelAPI2GPTBalance(channel) + case channeltype.AIGC2D: + return updateChannelAIGC2DBalance(channel) + case channeltype.SiliconFlow: + return updateChannelSiliconFlowBalance(channel) + default: + return 0, errors.New("尚未实现") + } + url := baseURL + "/v1/dashboard/billing/subscription" + + body, err := GetResponseBody("GET", url, channel, GetAuthHeader(channel.Key)) + if err != nil { + return 0, err + } + subscription := OpenAISubscriptionResponse{} + err = json.Unmarshal(body, &subscription) + if err != nil { + return 0, err + } + now := time.Now() + startDate := now.Format("2006-01") + "-01" + endDate := now.Format("2006-01-02") + if !subscription.HasPaymentMethod { + startDate = now.AddDate(0, 0, -100).Format("2006-01-02") + } + url = baseURL + "/v1/dashboard/billing/usage?start_date=" + startDate + "&end_date=" + endDate + body, err = GetResponseBody("GET", url, channel, GetAuthHeader(channel.Key)) + if err != nil { + return 0, err + } + usage := OpenAIUsageResponse{} + err = json.Unmarshal(body, &usage) + if err != nil { + return 0, err + } + balance := subscription.HardLimitUSD - usage.TotalUsage/100 + channel.UpdateBalance(balance) + return balance, nil +} + +func UpdateChannelBalance(c *gin.Context) { + id, err := strconv.Atoi(c.Param("id")) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + channel, err := model.GetChannelByID(id, false) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + balance, err := updateChannelBalance(channel) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "balance": balance, + }) +} + +func updateAllChannelsBalance() error { + channels, err := model.GetAllChannels(false, false) + if err != nil { + return err + } + for _, channel := range channels { + if channel.Status != model.ChannelStatusEnabled { + continue + } + // TODO: support Azure + if channel.Type != channeltype.OpenAI && channel.Type != channeltype.Custom { + continue + } + balance, err := updateChannelBalance(channel) + if err != nil { + continue + } + // err is nil & balance <= 0 means quota is used up + if balance <= 0 { + _ = model.DisableChannelByID(channel.ID) + } + time.Sleep(time.Second) + } + return nil +} + +func UpdateAllChannelsBalance(c *gin.Context) { + // err := updateAllChannelsBalance() + // if err != nil { + // c.JSON(http.StatusOK, gin.H{ + // "success": false, + // "message": err.Error(), + // }) + // return + // } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} + +func AutomaticallyUpdateChannels(frequency int) { + for { + time.Sleep(time.Duration(frequency) * time.Minute) + logger.SysLog("updating all channels") + _ = updateAllChannelsBalance() + logger.SysLog("channels update done") + } +} + +// subscription +func GetSubscription(c *gin.Context) { + group := c.GetString(ctxkey.Group) + b, _, err := balance.Default.GetGroupRemainBalance(c, group) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + quota := c.GetFloat64(ctxkey.TokenQuota) + if quota <= 0 { + quota = b + } + c.JSON(http.StatusOK, OpenAISubscriptionResponse{ + HardLimitUSD: quota / 7, + SoftLimitUSD: b / 7, + SystemHardLimitUSD: quota / 7, + }) +} + +func GetUsage(c *gin.Context) { + usedAmount := c.GetFloat64(ctxkey.TokenUsedAmount) + c.JSON(http.StatusOK, OpenAIUsageResponse{TotalUsage: usedAmount / 7 * 100}) +} diff --git a/service/aiproxy/controller/channel-test.go b/service/aiproxy/controller/channel-test.go new file mode 100644 index 00000000000..c3505010191 --- /dev/null +++ b/service/aiproxy/controller/channel-test.go @@ -0,0 +1,236 @@ +package controller + +import ( + "bytes" + "errors" + "fmt" + "io" + "net/http" + "net/http/httptest" + "net/url" + "slices" + "strconv" + "sync" + "time" + + json "github.com/json-iterator/go" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/middleware" + "github.com/labring/sealos/service/aiproxy/model" + "github.com/labring/sealos/service/aiproxy/monitor" + relay "github.com/labring/sealos/service/aiproxy/relay" + "github.com/labring/sealos/service/aiproxy/relay/channeltype" + "github.com/labring/sealos/service/aiproxy/relay/controller" + "github.com/labring/sealos/service/aiproxy/relay/meta" + relaymodel "github.com/labring/sealos/service/aiproxy/relay/model" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" +) + +func buildTestRequest(model string) *relaymodel.GeneralOpenAIRequest { + if model == "" { + model = "gpt-3.5-turbo" + } + testRequest := &relaymodel.GeneralOpenAIRequest{ + MaxTokens: 2, + Model: model, + } + testMessage := relaymodel.Message{ + Role: "user", + Content: "hi", + } + testRequest.Messages = append(testRequest.Messages, testMessage) + return testRequest +} + +func testChannel(channel *model.Channel, request *relaymodel.GeneralOpenAIRequest) (openaiErr *relaymodel.Error, err error) { + if len(channel.Models) == 0 { + channel.Models = config.GetDefaultChannelModels()[channel.Type] + if len(channel.Models) == 0 { + return nil, errors.New("no models") + } + } + modelName := request.Model + if modelName == "" { + modelName = channel.Models[0] + } else if !slices.Contains(channel.Models, modelName) { + return nil, fmt.Errorf("model %s not supported", modelName) + } + if v, ok := channel.ModelMapping[modelName]; ok { + modelName = v + } + w := httptest.NewRecorder() + c, _ := gin.CreateTestContext(w) + c.Request = &http.Request{ + Method: http.MethodPost, + URL: &url.URL{Path: "/v1/chat/completions"}, + Body: nil, + Header: make(http.Header), + } + c.Request.Header.Set("Authorization", "Bearer "+channel.Key) + c.Request.Header.Set("Content-Type", "application/json") + c.Set(ctxkey.Channel, channel.Type) + c.Set(ctxkey.BaseURL, channel.BaseURL) + c.Set(ctxkey.Config, channel.Config) + middleware.SetupContextForSelectedChannel(c, channel, "") + meta := meta.GetByContext(c) + apiType := channeltype.ToAPIType(channel.Type) + adaptor := relay.GetAdaptor(apiType) + if adaptor == nil { + return nil, fmt.Errorf("invalid api type: %d, adaptor is nil", apiType) + } + adaptor.Init(meta) + meta.OriginModelName, meta.ActualModelName = request.Model, modelName + request.Model = modelName + convertedRequest, err := adaptor.ConvertRequest(c, relaymode.ChatCompletions, request) + if err != nil { + return nil, err + } + jsonData, err := json.Marshal(convertedRequest) + if err != nil { + return nil, err + } + logger.SysLogf("testing channel #%d, request: \n%s", channel.ID, jsonData) + requestBody := bytes.NewBuffer(jsonData) + c.Request.Body = io.NopCloser(requestBody) + resp, err := adaptor.DoRequest(c, meta, requestBody) + if err != nil { + return nil, err + } + if resp != nil && resp.StatusCode != http.StatusOK { + err := controller.RelayErrorHandler(resp) + return &err.Error, errors.New(err.Error.Message) + } + usage, respErr := adaptor.DoResponse(c, resp, meta) + if respErr != nil { + return &respErr.Error, errors.New(respErr.Error.Message) + } + if usage == nil { + return nil, errors.New("usage is nil") + } + result := w.Result() + // print result.Body + respBody, err := io.ReadAll(result.Body) + if err != nil { + return nil, err + } + logger.SysLogf("testing channel #%d, response: \n%s", channel.ID, respBody) + return nil, nil +} + +func TestChannel(c *gin.Context) { + id, err := strconv.Atoi(c.Param("id")) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + channel, err := model.GetChannelByID(id, false) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + model := c.Query("model") + testRequest := buildTestRequest(model) + tik := time.Now() + _, err = testChannel(channel, testRequest) + tok := time.Now() + milliseconds := tok.Sub(tik).Milliseconds() + if err != nil { + milliseconds = 0 + } + go channel.UpdateResponseTime(milliseconds) + consumedTime := float64(milliseconds) / 1000.0 + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + "time": consumedTime, + "model": model, + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "time": consumedTime, + "model": model, + }) +} + +var ( + testAllChannelsLock sync.Mutex + testAllChannelsRunning = false +) + +func testChannels(onlyDisabled bool) error { + testAllChannelsLock.Lock() + if testAllChannelsRunning { + testAllChannelsLock.Unlock() + return errors.New("测试已在运行中") + } + testAllChannelsRunning = true + testAllChannelsLock.Unlock() + channels, err := model.GetAllChannels(onlyDisabled, false) + if err != nil { + return err + } + go func() { + for _, channel := range channels { + isChannelEnabled := channel.Status == model.ChannelStatusEnabled + tik := time.Now() + testRequest := buildTestRequest("") + openaiErr, err := testChannel(channel, testRequest) + tok := time.Now() + milliseconds := tok.Sub(tik).Milliseconds() + if isChannelEnabled && monitor.ShouldDisableChannel(openaiErr, -1) { + _ = model.DisableChannelByID(channel.ID) + } + if !isChannelEnabled && monitor.ShouldEnableChannel(err, openaiErr) { + _ = model.EnableChannelByID(channel.ID) + } + channel.UpdateResponseTime(milliseconds) + time.Sleep(time.Second * 1) + } + testAllChannelsLock.Lock() + testAllChannelsRunning = false + testAllChannelsLock.Unlock() + }() + return nil +} + +func TestChannels(c *gin.Context) { + onlyDisabled := c.Query("only_disabled") == "true" + err := testChannels(onlyDisabled) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} + +func AutomaticallyTestChannels(frequency int) { + for { + time.Sleep(time.Duration(frequency) * time.Minute) + logger.SysLog("testing all channels") + err := testChannels(false) + if err != nil { + logger.SysLog("testing all channels failed: " + err.Error()) + } + logger.SysLog("channel test finished") + } +} diff --git a/service/aiproxy/controller/channel.go b/service/aiproxy/controller/channel.go new file mode 100644 index 00000000000..dff792e721d --- /dev/null +++ b/service/aiproxy/controller/channel.go @@ -0,0 +1,314 @@ +package controller + +import ( + "maps" + "net/http" + "slices" + "strconv" + "strings" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/model" +) + +func GetChannels(c *gin.Context) { + p, _ := strconv.Atoi(c.Query("p")) + p-- + if p < 0 { + p = 0 + } + perPage, _ := strconv.Atoi(c.Query("per_page")) + if perPage <= 0 { + perPage = 10 + } else if perPage > 100 { + perPage = 100 + } + id, _ := strconv.Atoi(c.Query("id")) + name := c.Query("name") + key := c.Query("key") + channelType, _ := strconv.Atoi(c.Query("channel_type")) + baseURL := c.Query("base_url") + order := c.Query("order") + channels, total, err := model.GetChannels(p*perPage, perPage, false, false, id, name, key, channelType, baseURL, order) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": gin.H{ + "channels": channels, + "total": total, + }, + }) +} + +func GetAllChannels(c *gin.Context) { + channels, err := model.GetAllChannels(false, false) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": channels, + }) +} + +func AddChannels(c *gin.Context) { + channels := make([]*AddChannelRequest, 0) + err := c.ShouldBindJSON(&channels) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + _channels := make([]*model.Channel, 0, len(channels)) + for _, channel := range channels { + _channels = append(_channels, channel.ToChannels()...) + } + err = model.BatchInsertChannels(_channels) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} + +func SearchChannels(c *gin.Context) { + keyword := c.Query("keyword") + p, _ := strconv.Atoi(c.Query("p")) + p-- + if p < 0 { + p = 0 + } + perPage, _ := strconv.Atoi(c.Query("per_page")) + if perPage <= 0 { + perPage = 10 + } else if perPage > 100 { + perPage = 100 + } + id, _ := strconv.Atoi(c.Query("id")) + name := c.Query("name") + key := c.Query("key") + channelType, _ := strconv.Atoi(c.Query("channel_type")) + baseURL := c.Query("base_url") + order := c.Query("order") + channels, total, err := model.SearchChannels(keyword, p*perPage, perPage, false, false, id, name, key, channelType, baseURL, order) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": gin.H{ + "channels": channels, + "total": total, + }, + }) +} + +func GetChannel(c *gin.Context) { + id, err := strconv.Atoi(c.Param("id")) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + channel, err := model.GetChannelByID(id, false) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": channel, + }) +} + +type AddChannelRequest struct { + ModelMapping map[string]string `json:"model_mapping"` + Config model.ChannelConfig `json:"config"` + Name string `json:"name"` + Key string `json:"key"` + BaseURL string `json:"base_url"` + Other string `json:"other"` + Models []string `json:"models"` + Type int `json:"type"` + Priority int32 `json:"priority"` + Status int `json:"status"` +} + +func (r *AddChannelRequest) ToChannel() *model.Channel { + return &model.Channel{ + Type: r.Type, + Name: r.Name, + Key: r.Key, + BaseURL: r.BaseURL, + Other: r.Other, + Models: slices.Clone(r.Models), + ModelMapping: maps.Clone(r.ModelMapping), + Config: r.Config, + Priority: r.Priority, + Status: r.Status, + } +} + +func (r *AddChannelRequest) ToChannels() []*model.Channel { + keys := strings.Split(r.Key, "\n") + channels := make([]*model.Channel, 0, len(keys)) + for _, key := range keys { + if key == "" { + continue + } + c := r.ToChannel() + c.Key = key + channels = append(channels, c) + } + return channels +} + +func AddChannel(c *gin.Context) { + channel := AddChannelRequest{} + err := c.ShouldBindJSON(&channel) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + err = model.BatchInsertChannels(channel.ToChannels()) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} + +func DeleteChannel(c *gin.Context) { + id, _ := strconv.Atoi(c.Param("id")) + err := model.DeleteChannelByID(id) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} + +type UpdateChannelRequest struct { + AddChannelRequest + ID int `json:"id"` +} + +func (r *UpdateChannelRequest) ToChannel() *model.Channel { + c := r.AddChannelRequest.ToChannel() + c.ID = r.ID + return c +} + +func UpdateChannel(c *gin.Context) { + channel := UpdateChannelRequest{} + err := c.ShouldBindJSON(&channel) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + ch := channel.ToChannel() + err = model.UpdateChannel(ch) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": UpdateChannelRequest{ + ID: ch.ID, + AddChannelRequest: AddChannelRequest{ + Type: ch.Type, + Name: ch.Name, + Key: ch.Key, + BaseURL: ch.BaseURL, + Other: ch.Other, + Models: ch.Models, + ModelMapping: ch.ModelMapping, + Priority: ch.Priority, + Config: ch.Config, + }, + }, + }) +} + +type UpdateChannelStatusRequest struct { + Status int `json:"status"` +} + +func UpdateChannelStatus(c *gin.Context) { + id, _ := strconv.Atoi(c.Param("id")) + status := UpdateChannelStatusRequest{} + err := c.ShouldBindJSON(&status) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + err = model.UpdateChannelStatusByID(id, status.Status) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} diff --git a/service/aiproxy/controller/group.go b/service/aiproxy/controller/group.go new file mode 100644 index 00000000000..142e4f96333 --- /dev/null +++ b/service/aiproxy/controller/group.go @@ -0,0 +1,237 @@ +package controller + +import ( + "net/http" + "strconv" + "time" + + json "github.com/json-iterator/go" + + "github.com/labring/sealos/service/aiproxy/model" + + "github.com/gin-gonic/gin" +) + +func GetGroups(c *gin.Context) { + p, _ := strconv.Atoi(c.Query("p")) + p-- + if p < 0 { + p = 0 + } + perPage, _ := strconv.Atoi(c.Query("per_page")) + if perPage <= 0 { + perPage = 10 + } else if perPage > 100 { + perPage = 100 + } + + order := c.DefaultQuery("order", "") + groups, total, err := model.GetGroups(p*perPage, perPage, order, false) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": gin.H{ + "groups": groups, + "total": total, + }, + }) +} + +func SearchGroups(c *gin.Context) { + keyword := c.Query("keyword") + p, _ := strconv.Atoi(c.Query("p")) + p-- + if p < 0 { + p = 0 + } + perPage, _ := strconv.Atoi(c.Query("per_page")) + if perPage <= 0 { + perPage = 10 + } else if perPage > 100 { + perPage = 100 + } + order := c.DefaultQuery("order", "") + status, _ := strconv.Atoi(c.Query("status")) + groups, total, err := model.SearchGroup(keyword, p*perPage, perPage, order, status) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": gin.H{ + "groups": groups, + "total": total, + }, + }) +} + +func GetGroup(c *gin.Context) { + id := c.Param("id") + if id == "" { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": "group id is empty", + }) + return + } + group, err := model.GetGroupByID(id) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": group, + }) +} + +func GetGroupDashboard(c *gin.Context) { + id := c.Param("id") + now := time.Now() + startOfDay := now.Truncate(24*time.Hour).AddDate(0, 0, -6).Unix() + endOfDay := now.Truncate(24 * time.Hour).Add(24*time.Hour - time.Second).Unix() + + dashboards, err := model.SearchLogsByDayAndModel(id, time.Unix(startOfDay, 0), time.Unix(endOfDay, 0)) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": "failed to get statistics", + "data": nil, + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": dashboards, + }) +} + +type UpdateGroupQPMRequest struct { + ID string `json:"id"` + QPM int64 `json:"qpm"` +} + +func UpdateGroupQPM(c *gin.Context) { + req := UpdateGroupQPMRequest{} + err := json.NewDecoder(c.Request.Body).Decode(&req) + if err != nil || req.ID == "" { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": "invalid parameter", + }) + return + } + err = model.UpdateGroupQPM(req.ID, req.QPM) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} + +type UpdateGroupStatusRequest struct { + ID string `json:"id"` + Status int `json:"status"` +} + +func UpdateGroupStatus(c *gin.Context) { + req := UpdateGroupStatusRequest{} + err := json.NewDecoder(c.Request.Body).Decode(&req) + if err != nil || req.ID == "" { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": "invalid parameter", + }) + return + } + err = model.UpdateGroupStatus(req.ID, req.Status) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} + +func DeleteGroup(c *gin.Context) { + id := c.Param("id") + if id == "" { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": "invalid parameter", + }) + return + } + err := model.DeleteGroupByID(id) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} + +type CreateGroupRequest struct { + ID string `json:"id"` + QPM int64 `json:"qpm"` +} + +func CreateGroup(c *gin.Context) { + var group CreateGroupRequest + err := json.NewDecoder(c.Request.Body).Decode(&group) + if err != nil || group.ID == "" { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": "invalid parameter", + }) + return + } + if err := model.CreateGroup(&model.Group{ + ID: group.ID, + QPM: group.QPM, + }); err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} diff --git a/service/aiproxy/controller/log.go b/service/aiproxy/controller/log.go new file mode 100644 index 00000000000..94279e66410 --- /dev/null +++ b/service/aiproxy/controller/log.go @@ -0,0 +1,325 @@ +package controller + +import ( + "net/http" + "strconv" + "time" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/model" +) + +func GetLogs(c *gin.Context) { + p, _ := strconv.Atoi(c.Query("p")) + p-- + if p < 0 { + p = 0 + } + perPage, _ := strconv.Atoi(c.Query("per_page")) + if perPage <= 0 { + perPage = 10 + } else if perPage > 100 { + perPage = 100 + } + code, _ := strconv.Atoi(c.Query("code")) + startTimestamp, _ := strconv.ParseInt(c.Query("start_timestamp"), 10, 64) + endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64) + var startTimestampTime time.Time + if startTimestamp != 0 { + startTimestampTime = time.UnixMilli(startTimestamp) + } + var endTimestampTime time.Time + if endTimestamp != 0 { + endTimestampTime = time.UnixMilli(endTimestamp) + } + tokenName := c.Query("token_name") + modelName := c.Query("model_name") + channel, _ := strconv.Atoi(c.Query("channel")) + group := c.Query("group") + endpoint := c.Query("endpoint") + content := c.Query("content") + tokenID, _ := strconv.Atoi(c.Query("token_id")) + order := c.Query("order") + logs, total, err := model.GetLogs( + startTimestampTime, endTimestampTime, + code, modelName, group, tokenID, tokenName, p*perPage, perPage, channel, endpoint, content, order) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": gin.H{ + "logs": logs, + "total": total, + }, + }) +} + +func GetGroupLogs(c *gin.Context) { + p, _ := strconv.Atoi(c.Query("p")) + p-- + if p < 0 { + p = 0 + } + perPage, _ := strconv.Atoi(c.Query("per_page")) + if perPage <= 0 { + perPage = 10 + } else if perPage > 100 { + perPage = 100 + } + code, _ := strconv.Atoi(c.Query("code")) + startTimestamp, _ := strconv.ParseInt(c.Query("start_timestamp"), 10, 64) + endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64) + var startTimestampTime time.Time + if startTimestamp != 0 { + startTimestampTime = time.UnixMilli(startTimestamp) + } + var endTimestampTime time.Time + if endTimestamp != 0 { + endTimestampTime = time.UnixMilli(endTimestamp) + } + tokenName := c.Query("token_name") + modelName := c.Query("model_name") + channel, _ := strconv.Atoi(c.Query("channel")) + group := c.Param("group") + endpoint := c.Query("endpoint") + content := c.Query("content") + tokenID, _ := strconv.Atoi(c.Query("token_id")) + order := c.Query("order") + logs, total, err := model.GetGroupLogs(group, + startTimestampTime, endTimestampTime, + code, modelName, tokenID, tokenName, p*perPage, perPage, channel, endpoint, content, order) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": gin.H{ + "logs": logs, + "total": total, + }, + }) +} + +func SearchLogs(c *gin.Context) { + keyword := c.Query("keyword") + p, _ := strconv.Atoi(c.Query("p")) + perPage, _ := strconv.Atoi(c.Query("per_page")) + if perPage <= 0 { + perPage = 10 + } else if perPage > 100 { + perPage = 100 + } + code, _ := strconv.Atoi(c.Query("code")) + endpoint := c.Query("endpoint") + tokenName := c.Query("token_name") + modelName := c.Query("model_name") + content := c.Query("content") + groupID := c.Query("group_id") + tokenID, _ := strconv.Atoi(c.Query("token_id")) + channel, _ := strconv.Atoi(c.Query("channel")) + startTimestamp, _ := strconv.ParseInt(c.Query("start_timestamp"), 10, 64) + endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64) + var startTimestampTime time.Time + if startTimestamp != 0 { + startTimestampTime = time.UnixMilli(startTimestamp) + } + var endTimestampTime time.Time + if endTimestamp != 0 { + endTimestampTime = time.UnixMilli(endTimestamp) + } + order := c.Query("order") + logs, total, err := model.SearchLogs(keyword, p, perPage, code, endpoint, groupID, tokenID, tokenName, modelName, content, startTimestampTime, endTimestampTime, channel, order) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": gin.H{ + "logs": logs, + "total": total, + }, + }) +} + +func SearchGroupLogs(c *gin.Context) { + keyword := c.Query("keyword") + p, _ := strconv.Atoi(c.Query("p")) + perPage, _ := strconv.Atoi(c.Query("per_page")) + if perPage <= 0 { + perPage = 10 + } else if perPage > 100 { + perPage = 100 + } + group := c.Param("group") + code, _ := strconv.Atoi(c.Query("code")) + endpoint := c.Query("endpoint") + tokenName := c.Query("token_name") + modelName := c.Query("model_name") + content := c.Query("content") + tokenID, _ := strconv.Atoi(c.Query("token_id")) + channelID, _ := strconv.Atoi(c.Query("channel")) + startTimestamp, _ := strconv.ParseInt(c.Query("start_timestamp"), 10, 64) + endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64) + var startTimestampTime time.Time + if startTimestamp != 0 { + startTimestampTime = time.UnixMilli(startTimestamp) + } + var endTimestampTime time.Time + if endTimestamp != 0 { + endTimestampTime = time.UnixMilli(endTimestamp) + } + order := c.Query("order") + logs, total, err := model.SearchGroupLogs(group, keyword, p, perPage, code, endpoint, tokenID, tokenName, modelName, content, startTimestampTime, endTimestampTime, channelID, order) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": gin.H{ + "logs": logs, + "total": total, + }, + }) +} + +func GetLogsStat(c *gin.Context) { + startTimestamp, _ := strconv.ParseInt(c.Query("start_timestamp"), 10, 64) + endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64) + if endTimestamp < startTimestamp { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": "end_timestamp must be greater than start_timestamp", + }) + return + } + tokenName := c.Query("token_name") + group := c.Query("group") + modelName := c.Query("model_name") + channel, _ := strconv.Atoi(c.Query("channel")) + endpoint := c.Query("endpoint") + var startTimestampTime time.Time + if startTimestamp != 0 { + startTimestampTime = time.UnixMilli(startTimestamp) + } + var endTimestampTime time.Time + if endTimestamp != 0 { + endTimestampTime = time.UnixMilli(endTimestamp) + } + quotaNum := model.SumUsedQuota(startTimestampTime, endTimestampTime, modelName, group, tokenName, channel, endpoint) + // tokenNum := model.SumUsedToken(logType, startTimestamp, endTimestamp, modelName, username, "") + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": gin.H{ + "quota": quotaNum, + // "token": tokenNum, + }, + }) +} + +func GetLogsSelfStat(c *gin.Context) { + group := c.GetString(ctxkey.Group) + startTimestamp, _ := strconv.ParseInt(c.Query("start_timestamp"), 10, 64) + endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64) + tokenName := c.Query("token_name") + modelName := c.Query("model_name") + channel, _ := strconv.Atoi(c.Query("channel")) + endpoint := c.Query("endpoint") + var startTimestampTime time.Time + if startTimestamp != 0 { + startTimestampTime = time.UnixMilli(startTimestamp) + } + var endTimestampTime time.Time + if endTimestamp != 0 { + endTimestampTime = time.UnixMilli(endTimestamp) + } + quotaNum := model.SumUsedQuota(startTimestampTime, endTimestampTime, modelName, group, tokenName, channel, endpoint) + // tokenNum := model.SumUsedToken(logType, startTimestamp, endTimestamp, modelName, username, tokenName) + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": gin.H{ + "quota": quotaNum, + // "token": tokenNum, + }, + }) +} + +func DeleteHistoryLogs(c *gin.Context) { + timestamp, _ := strconv.ParseInt(c.Query("timestamp"), 10, 64) + if timestamp == 0 { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": "timestamp is required", + }) + return + } + count, err := model.DeleteOldLog(time.UnixMilli(timestamp)) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": count, + }) +} + +func SearchConsumeError(c *gin.Context) { + keyword := c.Query("keyword") + group := c.Query("group") + tokenName := c.Query("token_name") + modelName := c.Query("model_name") + content := c.Query("content") + tokenID, _ := strconv.Atoi(c.Query("token_id")) + usedAmount, _ := strconv.ParseFloat(c.Query("used_amount"), 64) + page, _ := strconv.Atoi(c.Query("page")) + perPage, _ := strconv.Atoi(c.Query("per_page")) + if perPage <= 0 { + perPage = 10 + } else if perPage > 100 { + perPage = 100 + } + order := c.Query("order") + logs, total, err := model.SearchConsumeError(keyword, group, tokenName, modelName, content, usedAmount, tokenID, page, perPage, order) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": gin.H{ + "logs": logs, + "total": total, + }, + }) +} diff --git a/service/aiproxy/controller/misc.go b/service/aiproxy/controller/misc.go new file mode 100644 index 00000000000..d2713aa4fc2 --- /dev/null +++ b/service/aiproxy/controller/misc.go @@ -0,0 +1,19 @@ +package controller + +import ( + "net/http" + + "github.com/labring/sealos/service/aiproxy/common" + + "github.com/gin-gonic/gin" +) + +func GetStatus(c *gin.Context) { + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": gin.H{ + "start_time": common.StartTime, + }, + }) +} diff --git a/service/aiproxy/controller/model.go b/service/aiproxy/controller/model.go new file mode 100644 index 00000000000..f40d75e2557 --- /dev/null +++ b/service/aiproxy/controller/model.go @@ -0,0 +1,311 @@ +package controller + +import ( + "fmt" + "net/http" + "slices" + "strconv" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/model" + relay "github.com/labring/sealos/service/aiproxy/relay" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/apitype" + "github.com/labring/sealos/service/aiproxy/relay/channeltype" + "github.com/labring/sealos/service/aiproxy/relay/meta" + relaymodel "github.com/labring/sealos/service/aiproxy/relay/model" + billingprice "github.com/labring/sealos/service/aiproxy/relay/price" +) + +// https://platform.openai.com/docs/api-reference/models/list + +type OpenAIModelPermission struct { + Group *string `json:"group"` + ID string `json:"id"` + Object string `json:"object"` + Organization string `json:"organization"` + Created int `json:"created"` + AllowCreateEngine bool `json:"allow_create_engine"` + AllowSampling bool `json:"allow_sampling"` + AllowLogprobs bool `json:"allow_logprobs"` + AllowSearchIndices bool `json:"allow_search_indices"` + AllowView bool `json:"allow_view"` + AllowFineTuning bool `json:"allow_fine_tuning"` + IsBlocking bool `json:"is_blocking"` +} + +type OpenAIModels struct { + Parent *string `json:"parent"` + ID string `json:"id"` + Object string `json:"object"` + OwnedBy string `json:"owned_by"` + Root string `json:"root"` + Permission []OpenAIModelPermission `json:"permission"` + Created int `json:"created"` +} + +var ( + models []OpenAIModels + modelsMap map[string]OpenAIModels + channelID2Models map[int][]string +) + +func init() { + var permission []OpenAIModelPermission + permission = append(permission, OpenAIModelPermission{ + ID: "modelperm-LwHkVFn8AcMItP432fKKDIKJ", + Object: "model_permission", + Created: 1626777600, + AllowCreateEngine: true, + AllowSampling: true, + AllowLogprobs: true, + AllowSearchIndices: false, + AllowView: true, + AllowFineTuning: false, + Organization: "*", + Group: nil, + IsBlocking: false, + }) + // https://platform.openai.com/docs/models/model-endpoint-compatibility + for i := 0; i < apitype.Dummy; i++ { + if i == apitype.AIProxyLibrary { + continue + } + adaptor := relay.GetAdaptor(i) + adaptor.Init(&meta.Meta{ + ChannelType: i, + }) + channelName := adaptor.GetChannelName() + modelNames := adaptor.GetModelList() + for _, modelName := range modelNames { + models = append(models, OpenAIModels{ + ID: modelName, + Object: "model", + Created: 1626777600, + OwnedBy: channelName, + Permission: permission, + Root: modelName, + Parent: nil, + }) + } + } + for _, channelType := range openai.CompatibleChannels { + if channelType == channeltype.Azure { + continue + } + channelName, channelModelList := openai.GetCompatibleChannelMeta(channelType) + for _, modelName := range channelModelList { + models = append(models, OpenAIModels{ + ID: modelName, + Object: "model", + Created: 1626777600, + OwnedBy: channelName, + Permission: permission, + Root: modelName, + Parent: nil, + }) + } + } + modelsMap = make(map[string]OpenAIModels) + for _, model := range models { + modelsMap[model.ID] = model + } + channelID2Models = make(map[int][]string) + for i := 1; i < channeltype.Dummy; i++ { + adaptor := relay.GetAdaptor(channeltype.ToAPIType(i)) + meta := &meta.Meta{ + ChannelType: i, + } + adaptor.Init(meta) + channelID2Models[i] = adaptor.GetModelList() + } +} + +func BuiltinModels(c *gin.Context) { + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": channelID2Models, + }) +} + +type modelPrice struct { + Prompt float64 `json:"prompt"` + Completion float64 `json:"completion"` + Unset bool `json:"unset,omitempty"` +} + +func ModelPrice(c *gin.Context) { + bill := make(map[string]*modelPrice) + modelPriceMap := billingprice.GetModelPriceMap() + completionPriceMap := billingprice.GetCompletionPriceMap() + for model, price := range modelPriceMap { + bill[model] = &modelPrice{ + Prompt: price, + Completion: price, + } + if completionPrice, ok := completionPriceMap[model]; ok { + bill[model].Completion = completionPrice + } + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": bill, + }) +} + +func EnabledType2Models(c *gin.Context) { + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": model.CacheGetType2Models(), + }) +} + +func EnabledType2ModelsAndPrice(c *gin.Context) { + type2Models := model.CacheGetType2Models() + result := make(map[int]map[string]*modelPrice) + + modelPriceMap := billingprice.GetModelPriceMap() + completionPriceMap := billingprice.GetCompletionPriceMap() + + for channelType, models := range type2Models { + m := make(map[string]*modelPrice) + result[channelType] = m + for _, modelName := range models { + if price, ok := modelPriceMap[modelName]; ok { + m[modelName] = &modelPrice{ + Prompt: price, + Completion: price, + } + if completionPrice, ok := completionPriceMap[modelName]; ok { + m[modelName].Completion = completionPrice + } + } else { + m[modelName] = &modelPrice{ + Unset: true, + } + } + } + } + + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": result, + }) +} + +func ChannelDefaultModels(c *gin.Context) { + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": config.GetDefaultChannelModels(), + }) +} + +func ChannelDefaultModelsByType(c *gin.Context) { + channelType := c.Param("type") + if channelType == "" { + c.JSON(http.StatusBadRequest, gin.H{ + "success": false, + "message": "type is required", + }) + return + } + channelTypeInt, err := strconv.Atoi(channelType) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{ + "success": false, + "message": "invalid type", + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": config.GetDefaultChannelModels()[channelTypeInt], + }) +} + +func EnabledModels(c *gin.Context) { + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": model.CacheGetAllModels(), + }) +} + +func EnabledModelsAndPrice(c *gin.Context) { + enabledModels := model.CacheGetAllModels() + result := make(map[string]*modelPrice) + + modelPriceMap := billingprice.GetModelPriceMap() + completionPriceMap := billingprice.GetCompletionPriceMap() + + for _, modelName := range enabledModels { + if price, ok := modelPriceMap[modelName]; ok { + result[modelName] = &modelPrice{ + Prompt: price, + Completion: price, + } + if completionPrice, ok := completionPriceMap[modelName]; ok { + result[modelName].Completion = completionPrice + } + } else { + result[modelName] = &modelPrice{ + Unset: true, + } + } + } + + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": result, + }) +} + +func ListModels(c *gin.Context) { + availableModels := c.GetStringSlice(ctxkey.AvailableModels) + availableOpenAIModels := make([]OpenAIModels, 0, len(availableModels)) + + for _, modelName := range availableModels { + if model, ok := modelsMap[modelName]; ok { + availableOpenAIModels = append(availableOpenAIModels, model) + continue + } + availableOpenAIModels = append(availableOpenAIModels, OpenAIModels{ + ID: modelName, + Object: "model", + Created: 1626777600, + OwnedBy: "custom", + Root: modelName, + Parent: nil, + }) + } + + c.JSON(http.StatusOK, gin.H{ + "object": "list", + "data": availableOpenAIModels, + }) +} + +func RetrieveModel(c *gin.Context) { + modelID := c.Param("model") + model, ok := modelsMap[modelID] + if !ok || !slices.Contains(c.GetStringSlice(ctxkey.AvailableModels), modelID) { + c.JSON(200, gin.H{ + "error": relaymodel.Error{ + Message: fmt.Sprintf("the model '%s' does not exist", modelID), + Type: "invalid_request_error", + Param: "model", + Code: "model_not_found", + }, + }) + } + c.JSON(200, model) +} diff --git a/service/aiproxy/controller/option.go b/service/aiproxy/controller/option.go new file mode 100644 index 00000000000..dd3c273ccec --- /dev/null +++ b/service/aiproxy/controller/option.go @@ -0,0 +1,74 @@ +package controller + +import ( + "net/http" + + json "github.com/json-iterator/go" + + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/model" + + "github.com/gin-gonic/gin" +) + +func GetOptions(c *gin.Context) { + options := make(map[string]string) + config.OptionMapRWMutex.RLock() + for k, v := range config.OptionMap { + options[k] = v + } + config.OptionMapRWMutex.RUnlock() + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": options, + }) +} + +func UpdateOption(c *gin.Context) { + var option model.Option + err := json.NewDecoder(c.Request.Body).Decode(&option) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{ + "success": false, + "message": "invalid parameter", + }) + return + } + err = model.UpdateOption(option.Key, option.Value) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} + +func UpdateOptions(c *gin.Context) { + var options map[string]string + err := json.NewDecoder(c.Request.Body).Decode(&options) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{ + "success": false, + "message": "invalid parameter", + }) + return + } + err = model.UpdateOptions(options) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} diff --git a/service/aiproxy/controller/relay.go b/service/aiproxy/controller/relay.go new file mode 100644 index 00000000000..95eaaa16a4b --- /dev/null +++ b/service/aiproxy/controller/relay.go @@ -0,0 +1,156 @@ +package controller + +import ( + "bytes" + "context" + "fmt" + "io" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/middleware" + dbmodel "github.com/labring/sealos/service/aiproxy/model" + "github.com/labring/sealos/service/aiproxy/monitor" + "github.com/labring/sealos/service/aiproxy/relay/controller" + "github.com/labring/sealos/service/aiproxy/relay/model" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" +) + +// https://platform.openai.com/docs/api-reference/chat + +func relayHelper(c *gin.Context, relayMode int) *model.ErrorWithStatusCode { + var err *model.ErrorWithStatusCode + switch relayMode { + case relaymode.ImagesGenerations: + err = controller.RelayImageHelper(c, relayMode) + case relaymode.AudioSpeech: + fallthrough + case relaymode.AudioTranslation: + fallthrough + case relaymode.AudioTranscription: + err = controller.RelayAudioHelper(c, relayMode) + default: + err = controller.RelayTextHelper(c) + } + return err +} + +func Relay(c *gin.Context) { + ctx := c.Request.Context() + relayMode := relaymode.GetByPath(c.Request.URL.Path) + if config.DebugEnabled { + requestBody, _ := common.GetRequestBody(c) + logger.Debugf(ctx, "request body: %s", requestBody) + } + channelID := c.GetInt(ctxkey.ChannelID) + bizErr := relayHelper(c, relayMode) + if bizErr == nil { + monitor.Emit(channelID, true) + return + } + lastFailedChannelID := channelID + group := c.GetString(ctxkey.Group) + originalModel := c.GetString(ctxkey.OriginalModel) + go processChannelRelayError(ctx, group, channelID, bizErr) + requestID := c.GetString(string(helper.RequestIDKey)) + retryTimes := config.GetRetryTimes() + if !shouldRetry(c, bizErr.StatusCode) { + logger.Errorf(ctx, "relay error happen, status code is %d, won't retry in this case", bizErr.StatusCode) + retryTimes = 0 + } + for i := retryTimes; i > 0; i-- { + channel, err := dbmodel.CacheGetRandomSatisfiedChannel(originalModel) + if err != nil { + logger.Errorf(ctx, "get random satisfied channel failed: %+v", err) + break + } + logger.Infof(ctx, "using channel #%d to retry (remain times %d)", channel.ID, i) + if channel.ID == lastFailedChannelID { + continue + } + middleware.SetupContextForSelectedChannel(c, channel, originalModel) + requestBody, err := common.GetRequestBody(c) + if err != nil { + logger.Errorf(ctx, "GetRequestBody failed: %+v", err) + break + } + c.Request.Body = io.NopCloser(bytes.NewBuffer(requestBody)) + bizErr = relayHelper(c, relayMode) + if bizErr == nil { + return + } + channelID := c.GetInt(ctxkey.ChannelID) + lastFailedChannelID = channelID + // BUG: bizErr is in race condition + go processChannelRelayError(ctx, group, channelID, bizErr) + } + if bizErr != nil { + if bizErr.StatusCode == http.StatusTooManyRequests { + bizErr.Error.Message = "The upstream load of the current group is saturated, please try again later" + } + + // BUG: bizErr is in race condition + bizErr.Error.Message = helper.MessageWithRequestID(bizErr.Error.Message, requestID) + c.JSON(bizErr.StatusCode, gin.H{ + "error": bizErr.Error, + }) + } +} + +func shouldRetry(c *gin.Context, statusCode int) bool { + if _, ok := c.Get(ctxkey.SpecificChannelID); ok { + return false + } + if statusCode == http.StatusTooManyRequests { + return true + } + if statusCode/100 == 5 { + return true + } + if statusCode == http.StatusBadRequest { + return false + } + if statusCode/100 == 2 { + return false + } + return true +} + +func processChannelRelayError(ctx context.Context, group string, channelID int, err *model.ErrorWithStatusCode) { + logger.Errorf(ctx, "relay error (channel id %d, group: %s): %s", channelID, group, err.Message) + // https://platform.openai.com/docs/guides/error-codes/api-errors + if monitor.ShouldDisableChannel(&err.Error, err.StatusCode) { + _ = dbmodel.DisableChannelByID(channelID) + } else { + monitor.Emit(channelID, false) + } +} + +func RelayNotImplemented(c *gin.Context) { + err := model.Error{ + Message: "API not implemented", + Type: "aiproxy_error", + Param: "", + Code: "api_not_implemented", + } + c.JSON(http.StatusNotImplemented, gin.H{ + "error": err, + }) +} + +func RelayNotFound(c *gin.Context) { + err := model.Error{ + Message: fmt.Sprintf("Invalid URL (%s %s)", c.Request.Method, c.Request.URL.Path), + Type: "invalid_request_error", + Param: "", + Code: "", + } + c.JSON(http.StatusNotFound, gin.H{ + "error": err, + }) +} diff --git a/service/aiproxy/controller/token.go b/service/aiproxy/controller/token.go new file mode 100644 index 00000000000..1abfb77b172 --- /dev/null +++ b/service/aiproxy/controller/token.go @@ -0,0 +1,624 @@ +package controller + +import ( + "errors" + "fmt" + "net/http" + "strconv" + "time" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/network" + "github.com/labring/sealos/service/aiproxy/common/random" + "github.com/labring/sealos/service/aiproxy/model" +) + +func GetTokens(c *gin.Context) { + p, _ := strconv.Atoi(c.Query("p")) + p-- + if p < 0 { + p = 0 + } + perPage, _ := strconv.Atoi(c.Query("per_page")) + if perPage <= 0 { + perPage = 10 + } else if perPage > 100 { + perPage = 100 + } + group := c.Query("group") + order := c.Query("order") + status, _ := strconv.Atoi(c.Query("status")) + tokens, total, err := model.GetTokens(p*perPage, perPage, order, group, status) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": gin.H{ + "tokens": tokens, + "total": total, + }, + }) +} + +func GetGroupTokens(c *gin.Context) { + p, _ := strconv.Atoi(c.Query("p")) + p-- + if p < 0 { + p = 0 + } + perPage, _ := strconv.Atoi(c.Query("per_page")) + if perPage <= 0 { + perPage = 10 + } else if perPage > 100 { + perPage = 100 + } + group := c.Param("group") + order := c.Query("order") + status, _ := strconv.Atoi(c.Query("status")) + tokens, total, err := model.GetGroupTokens(group, p*perPage, perPage, order, status) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": gin.H{ + "tokens": tokens, + "total": total, + }, + }) +} + +func SearchTokens(c *gin.Context) { + keyword := c.Query("keyword") + p, _ := strconv.Atoi(c.Query("p")) + p-- + if p < 0 { + p = 0 + } + perPage, _ := strconv.Atoi(c.Query("per_page")) + if perPage <= 0 { + perPage = 10 + } else if perPage > 100 { + perPage = 100 + } + order := c.Query("order") + name := c.Query("name") + key := c.Query("key") + status, _ := strconv.Atoi(c.Query("status")) + group := c.Query("group") + tokens, total, err := model.SearchTokens(keyword, p*perPage, perPage, order, status, name, key, group) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": gin.H{ + "tokens": tokens, + "total": total, + }, + }) +} + +func SearchGroupTokens(c *gin.Context) { + keyword := c.Query("keyword") + p, _ := strconv.Atoi(c.Query("p")) + p-- + if p < 0 { + p = 0 + } + perPage, _ := strconv.Atoi(c.Query("per_page")) + if perPage <= 0 { + perPage = 10 + } else if perPage > 100 { + perPage = 100 + } + group := c.Param("group") + order := c.Query("order") + name := c.Query("name") + key := c.Query("key") + status, _ := strconv.Atoi(c.Query("status")) + tokens, total, err := model.SearchGroupTokens(group, keyword, p*perPage, perPage, order, status, name, key) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": gin.H{ + "tokens": tokens, + "total": total, + }, + }) +} + +func GetToken(c *gin.Context) { + id, err := strconv.Atoi(c.Param("id")) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + token, err := model.GetTokenByID(id) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": token, + }) +} + +func GetGroupToken(c *gin.Context) { + id, err := strconv.Atoi(c.Param("id")) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + group := c.Param("group") + token, err := model.GetGroupTokenByID(group, id) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": token, + }) +} + +func validateToken(token AddTokenRequest) error { + if token.Name == "" { + return errors.New("token name cannot be empty") + } + if len(token.Name) > 30 { + return errors.New("token name is too long") + } + if token.Subnet != "" { + err := network.IsValidSubnets(token.Subnet) + if err != nil { + return fmt.Errorf("invalid subnet: %w", err) + } + } + return nil +} + +type AddTokenRequest struct { + Name string `json:"name"` + Subnet string `json:"subnet"` + Models []string `json:"models"` + ExpiredAt int64 `json:"expiredAt"` + Quota float64 `json:"quota"` +} + +func AddToken(c *gin.Context) { + group := c.Param("group") + token := AddTokenRequest{} + err := c.ShouldBindJSON(&token) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + err = validateToken(token) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": "parameter error: " + err.Error(), + }) + return + } + + var expiredAt time.Time + if token.ExpiredAt == 0 { + expiredAt = time.Time{} + } else { + expiredAt = time.UnixMilli(token.ExpiredAt) + } + + cleanToken := &model.Token{ + GroupID: group, + Name: model.EmptyNullString(token.Name), + Key: random.GenerateKey(), + ExpiredAt: expiredAt, + Quota: token.Quota, + Models: token.Models, + Subnet: token.Subnet, + } + err = model.InsertToken(cleanToken, c.Query("auto_create_group") == "true") + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": cleanToken, + }) +} + +func DeleteToken(c *gin.Context) { + id, err := strconv.Atoi(c.Param("id")) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + err = model.DeleteTokenByID(id) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} + +func DeleteGroupToken(c *gin.Context) { + group := c.Param("group") + id, err := strconv.Atoi(c.Param("id")) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + err = model.DeleteTokenByIDAndGroupID(id, group) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} + +func UpdateToken(c *gin.Context) { + id, err := strconv.Atoi(c.Param("id")) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + token := AddTokenRequest{} + err = c.ShouldBindJSON(&token) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + err = validateToken(token) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": "parameter error: " + err.Error(), + }) + return + } + cleanToken, err := model.GetTokenByID(id) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + expiredAt := time.Time{} + if token.ExpiredAt != 0 { + expiredAt = time.UnixMilli(token.ExpiredAt) + } + cleanToken.Name = model.EmptyNullString(token.Name) + cleanToken.ExpiredAt = expiredAt + cleanToken.Quota = token.Quota + cleanToken.Models = token.Models + cleanToken.Subnet = token.Subnet + err = model.UpdateToken(cleanToken) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": cleanToken, + }) +} + +func UpdateGroupToken(c *gin.Context) { + group := c.Param("group") + id, err := strconv.Atoi(c.Param("id")) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + token := AddTokenRequest{} + err = c.ShouldBindJSON(&token) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + err = validateToken(token) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": "parameter error: " + err.Error(), + }) + return + } + cleanToken, err := model.GetGroupTokenByID(group, id) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + expiredAt := time.Time{} + if token.ExpiredAt != 0 { + expiredAt = time.UnixMilli(token.ExpiredAt) + } + cleanToken.Name = model.EmptyNullString(token.Name) + cleanToken.ExpiredAt = expiredAt + cleanToken.Quota = token.Quota + cleanToken.Models = token.Models + cleanToken.Subnet = token.Subnet + err = model.UpdateToken(cleanToken) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + "data": cleanToken, + }) +} + +type UpdateTokenStatusRequest struct { + Status int `json:"status"` +} + +func UpdateTokenStatus(c *gin.Context) { + id, err := strconv.Atoi(c.Param("id")) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + token := UpdateTokenStatusRequest{} + err = c.ShouldBindJSON(&token) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + cleanToken, err := model.GetTokenByID(id) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + if token.Status == model.TokenStatusEnabled { + if cleanToken.Status == model.TokenStatusExpired && !cleanToken.ExpiredAt.IsZero() && cleanToken.ExpiredAt.Before(time.Now()) { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": "令牌已过期,无法启用,请先修改令牌过期时间,或者设置为永不过期", + }) + return + } + if cleanToken.Status == model.TokenStatusExhausted && cleanToken.Quota > 0 && cleanToken.UsedAmount >= cleanToken.Quota { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": "令牌可用额度已用尽,无法启用,请先修改令牌剩余额度,或者设置为无限额度", + }) + return + } + } + err = model.UpdateTokenStatus(id, token.Status) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} + +type UpdateGroupTokenStatusRequest struct { + UpdateTokenStatusRequest +} + +func UpdateGroupTokenStatus(c *gin.Context) { + group := c.Param("group") + id, err := strconv.Atoi(c.Param("id")) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + token := UpdateTokenStatusRequest{} + err = c.ShouldBindJSON(&token) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + cleanToken, err := model.GetGroupTokenByID(group, id) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + if token.Status == model.TokenStatusEnabled { + if cleanToken.Status == model.TokenStatusExpired && !cleanToken.ExpiredAt.IsZero() && cleanToken.ExpiredAt.Before(time.Now()) { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": "令牌已过期,无法启用,请先修改令牌过期时间,或者设置为永不过期", + }) + return + } + if cleanToken.Status == model.TokenStatusExhausted && cleanToken.Quota > 0 && cleanToken.UsedAmount >= cleanToken.Quota { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": "令牌可用额度已用尽,无法启用,请先修改令牌剩余额度,或者设置为无限额度", + }) + return + } + } + err = model.UpdateGroupTokenStatus(group, id, token.Status) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + return + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} + +type UpdateTokenNameRequest struct { + Name string `json:"name"` +} + +func UpdateTokenName(c *gin.Context) { + id, err := strconv.Atoi(c.Param("id")) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + } + name := UpdateTokenNameRequest{} + err = c.ShouldBindJSON(&name) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + } + err = model.UpdateTokenName(id, name.Name) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} + +func UpdateGroupTokenName(c *gin.Context) { + group := c.Param("group") + id, err := strconv.Atoi(c.Param("id")) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + } + name := UpdateTokenNameRequest{} + err = c.ShouldBindJSON(&name) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + } + err = model.UpdateGroupTokenName(group, id, name.Name) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "success": false, + "message": err.Error(), + }) + } + c.JSON(http.StatusOK, gin.H{ + "success": true, + "message": "", + }) +} diff --git a/service/aiproxy/deploy/Kubefile b/service/aiproxy/deploy/Kubefile new file mode 100644 index 00000000000..6a6ff80fc1c --- /dev/null +++ b/service/aiproxy/deploy/Kubefile @@ -0,0 +1,16 @@ +FROM scratch +COPY registry registry +COPY manifests manifests +COPY scripts scripts + +ENV cloudDomain="127.0.0.1.nip.io" +ENV cloudPort="" +ENV certSecretName="wildcard-cert" + +ENV ADMIN_KEY="" +ENV SEALOS_JWT_KEY="" +ENV SQL_DSN="" +ENV LOG_SQL_DSN="" +ENV REDIS_CONN_STRING="" + +CMD ["bash scripts/init.sh"] diff --git a/service/aiproxy/deploy/manifests/aiproxy-config.yaml.tmpl b/service/aiproxy/deploy/manifests/aiproxy-config.yaml.tmpl new file mode 100644 index 00000000000..94214c61537 --- /dev/null +++ b/service/aiproxy/deploy/manifests/aiproxy-config.yaml.tmpl @@ -0,0 +1,12 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: aiproxy-env +data: + DEBUG: "false" + DEBUG_SQL: "false" + ADMIN_KEY: "{{ .ADMIN_KEY }}" + SEALOS_JWT_KEY: "{{ .SEALOS_JWT_KEY }}" + SQL_DSN: "{{ .SQL_DSN }}" + LOG_SQL_DSN: "{{ .LOG_SQL_DSN }}" + REDIS_CONN_STRING: "{{ .REDIS_CONN_STRING }}" diff --git a/service/aiproxy/deploy/manifests/deploy.yaml.tmpl b/service/aiproxy/deploy/manifests/deploy.yaml.tmpl new file mode 100644 index 00000000000..208b911786e --- /dev/null +++ b/service/aiproxy/deploy/manifests/deploy.yaml.tmpl @@ -0,0 +1,60 @@ +apiVersion: v1 +kind: Service +metadata: + name: aiproxy + namespace: aiproxy-system + labels: + cloud.sealos.io/app-deploy-manager: aiproxy +spec: + ports: + - port: 3000 + targetPort: 3000 + selector: + app: aiproxy +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: aiproxy + namespace: aiproxy-system + annotations: + originImageName: ghcr.io/labring/sealos-aiproxy-service:latest + deploy.cloud.sealos.io/minReplicas: '3' + deploy.cloud.sealos.io/maxReplicas: '3' + labels: + cloud.sealos.io/app-deploy-manager: aiproxy + app: aiproxy +spec: + replicas: 3 + revisionHistoryLimit: 1 + selector: + matchLabels: + app: aiproxy + strategy: + type: RollingUpdate + rollingUpdate: + maxUnavailable: 0 + maxSurge: 1 + template: + metadata: + labels: + app: aiproxy + spec: + containers: + - name: aiproxy + image: ghcr.io/labring/sealos-aiproxy-service:latest + envFrom: + - configMapRef: + name: aiproxy-env + resources: + requests: + cpu: 50m + memory: 50Mi + limits: + cpu: 500m + memory: 512Mi + ports: + - containerPort: 3000 + imagePullPolicy: Always + serviceAccountName: default + automountServiceAccountToken: false diff --git a/service/aiproxy/deploy/manifests/ingress.yaml.tmpl b/service/aiproxy/deploy/manifests/ingress.yaml.tmpl new file mode 100644 index 00000000000..51d9009698c --- /dev/null +++ b/service/aiproxy/deploy/manifests/ingress.yaml.tmpl @@ -0,0 +1,37 @@ +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + annotations: + kubernetes.io/ingress.class: nginx + nginx.ingress.kubernetes.io/enable-cors: "true" + nginx.ingress.kubernetes.io/cors-allow-methods: "PUT, GET, POST, DELETE, PATCH, OPTIONS" + nginx.ingress.kubernetes.io/cors-allow-origin: "https://{{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }}, https://*.{{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }}" + nginx.ingress.kubernetes.io/cors-allow-credentials: "true" + nginx.ingress.kubernetes.io/cors-max-age: "600" + nginx.ingress.kubernetes.io/backend-protocol: "HTTP" + nginx.ingress.kubernetes.io/configuration-snippet: | + more_clear_headers "X-Frame-Options:"; + more_set_headers "Content-Security-Policy: default-src * blob: data: *.{{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} {{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }}; img-src * data: blob: resource: *.{{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} {{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }}; connect-src * wss: blob: resource:; style-src 'self' 'unsafe-inline' blob: *.{{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} {{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} resource:; script-src 'self' 'unsafe-inline' 'unsafe-eval' blob: *.{{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} {{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} resource: *.baidu.com *.bdstatic.com https://js.stripe.com; frame-src 'self' *.{{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} {{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} mailto: tel: weixin: mtt: *.baidu.com https://js.stripe.com; frame-ancestors 'self' https://{{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} https://*.{{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }}"; + more_set_headers "X-Xss-Protection: 1; mode=block"; + higress.io/response-header-control-remove: X-Frame-Options + higress.io/response-header-control-update: | + Content-Security-Policy "default-src * blob: data: *.{{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} {{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }}; img-src * data: blob: resource: *.{{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} {{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }}; connect-src * wss: blob: resource:; style-src 'self' 'unsafe-inline' blob: *.{{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} {{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} resource:; script-src 'self' 'unsafe-inline' 'unsafe-eval' blob: *.{{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} {{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} resource: *.baidu.com *.bdstatic.com https://js.stripe.com; frame-src 'self' *.{{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} {{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} mailto: tel: weixin: mtt: *.baidu.com https://js.stripe.com; frame-ancestors 'self' https://{{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }} https://*.{{ .cloudDomain }}{{ if .cloudPort }}:{{ .cloudPort }}{{ end }}" + X-Xss-Protection "1; mode=block" + name: aiproxy + namespace: aiproxy-system +spec: + rules: + - host: aiproxy.{{ .cloudDomain }} + http: + paths: + - pathType: Prefix + path: /v1 + backend: + service: + name: aiproxy + port: + number: 3000 + tls: + - hosts: + - 'aiproxy.{{ .cloudDomain }}' + secretName: {{ .certSecretName }} diff --git a/service/aiproxy/deploy/scripts/init.sh b/service/aiproxy/deploy/scripts/init.sh new file mode 100644 index 00000000000..296f90e13d5 --- /dev/null +++ b/service/aiproxy/deploy/scripts/init.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -ex + +kubectl create ns aiproxy-system || true + +kubectl create -f manifests/aiproxy-config.yaml -n aiproxy-system || true + +kubectl apply -f manifests/deploy.yaml -n aiproxy-system + +if [[ -n "$cloudDomain" ]]; then + kubectl create -f manifests/ingress.yaml -n aiproxy-system || true +fi diff --git a/service/aiproxy/go.mod b/service/aiproxy/go.mod new file mode 100644 index 00000000000..c44cbcabc63 --- /dev/null +++ b/service/aiproxy/go.mod @@ -0,0 +1,114 @@ +module github.com/labring/sealos/service/aiproxy + +go 1.22.7 + +replace github.com/labring/sealos/service/aiproxy => ../aiproxy + +require ( + cloud.google.com/go/iam v1.2.2 + github.com/aws/aws-sdk-go-v2 v1.32.4 + github.com/aws/aws-sdk-go-v2/credentials v1.17.44 + github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.20.0 + github.com/gin-contrib/cors v1.7.2 + github.com/gin-contrib/gzip v1.0.1 + github.com/gin-gonic/gin v1.10.0 + github.com/glebarez/sqlite v1.11.0 + github.com/golang-jwt/jwt v3.2.2+incompatible + github.com/golang-jwt/jwt/v5 v5.2.1 + github.com/google/uuid v1.6.0 + github.com/jinzhu/copier v0.4.0 + github.com/joho/godotenv v1.5.1 + github.com/json-iterator/go v1.1.12 + github.com/patrickmn/go-cache v2.1.0+incompatible + github.com/pkg/errors v0.9.1 + github.com/pkoukk/tiktoken-go v0.1.7 + github.com/redis/go-redis/v9 v9.7.0 + github.com/shopspring/decimal v1.4.0 + github.com/smartystreets/goconvey v1.8.1 + github.com/stretchr/testify v1.9.0 + golang.org/x/image v0.22.0 + google.golang.org/api v0.205.0 + gorm.io/driver/mysql v1.5.7 + gorm.io/driver/postgres v1.5.9 + gorm.io/gorm v1.25.12 +) + +require ( + cloud.google.com/go/auth v0.10.2 // indirect + cloud.google.com/go/auth/oauth2adapt v0.2.5 // indirect + cloud.google.com/go/compute/metadata v0.5.2 // indirect + filippo.io/edwards25519 v1.1.0 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.23 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.23 // indirect + github.com/aws/smithy-go v1.22.0 // indirect + github.com/bytedance/sonic v1.12.4 // indirect + github.com/bytedance/sonic/loader v0.2.1 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/cloudwego/base64x v0.1.4 // indirect + github.com/cloudwego/iasm v0.2.0 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/dlclark/regexp2 v1.11.4 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/gabriel-vasile/mimetype v1.4.6 // indirect + github.com/gin-contrib/sse v0.1.0 // indirect + github.com/glebarez/go-sqlite v1.22.0 // indirect + github.com/go-logr/logr v1.4.2 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.22.1 // indirect + github.com/go-sql-driver/mysql v1.8.1 // indirect + github.com/goccy/go-json v0.10.3 // indirect + github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect + github.com/google/s2a-go v0.1.8 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.4 // indirect + github.com/googleapis/gax-go/v2 v2.14.0 // indirect + github.com/gopherjs/gopherjs v1.17.2 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect + github.com/jackc/pgx/v5 v5.7.1 // indirect + github.com/jackc/puddle/v2 v2.2.2 // indirect + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/jinzhu/now v1.1.5 // indirect + github.com/jtolds/gls v4.20.0+incompatible // indirect + github.com/klauspost/cpuid/v2 v2.2.9 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/ncruces/go-strftime v0.1.9 // indirect + github.com/pelletier/go-toml/v2 v2.2.3 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect + github.com/smarty/assertions v1.15.0 // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/ugorji/go/codec v1.2.12 // indirect + go.opencensus.io v0.24.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.57.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.57.0 // indirect + go.opentelemetry.io/otel v1.32.0 // indirect + go.opentelemetry.io/otel/metric v1.32.0 // indirect + go.opentelemetry.io/otel/trace v1.32.0 // indirect + golang.org/x/arch v0.12.0 // indirect + golang.org/x/crypto v0.29.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/net v0.31.0 // indirect + golang.org/x/oauth2 v0.24.0 // indirect + golang.org/x/sync v0.9.0 // indirect + golang.org/x/sys v0.27.0 // indirect + golang.org/x/text v0.20.0 // indirect + golang.org/x/time v0.8.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20241113202542-65e8d215514f // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20241113202542-65e8d215514f // indirect + google.golang.org/grpc v1.68.0 // indirect + google.golang.org/protobuf v1.35.2 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + modernc.org/libc v1.61.0 // indirect + modernc.org/mathutil v1.6.0 // indirect + modernc.org/memory v1.8.0 // indirect + modernc.org/sqlite v1.33.1 // indirect +) diff --git a/service/aiproxy/go.sum b/service/aiproxy/go.sum new file mode 100644 index 00000000000..e48ba2706ce --- /dev/null +++ b/service/aiproxy/go.sum @@ -0,0 +1,344 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go/auth v0.10.2 h1:oKF7rgBfSHdp/kuhXtqU/tNDr0mZqhYbEh+6SiqzkKo= +cloud.google.com/go/auth v0.10.2/go.mod h1:xxA5AqpDrvS+Gkmo9RqrGGRh6WSNKKOXhY3zNOr38tI= +cloud.google.com/go/auth/oauth2adapt v0.2.5 h1:2p29+dePqsCHPP1bqDJcKj4qxRyYCcbzKpFyKGt3MTk= +cloud.google.com/go/auth/oauth2adapt v0.2.5/go.mod h1:AlmsELtlEBnaNTL7jCj8VQFLy6mbZv0s4Q7NGBeQ5E8= +cloud.google.com/go/compute/metadata v0.5.2 h1:UxK4uu/Tn+I3p2dYWTfiX4wva7aYlKixAHn3fyqngqo= +cloud.google.com/go/compute/metadata v0.5.2/go.mod h1:C66sj2AluDcIqakBq/M8lw8/ybHgOZqin2obFxa/E5k= +cloud.google.com/go/iam v1.2.2 h1:ozUSofHUGf/F4tCNy/mu9tHLTaxZFLOUiKzjcgWHGIA= +cloud.google.com/go/iam v1.2.2/go.mod h1:0Ys8ccaZHdI1dEUilwzqng/6ps2YB6vRsjIe00/+6JY= +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= +filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/aws/aws-sdk-go-v2 v1.32.4 h1:S13INUiTxgrPueTmrm5DZ+MiAo99zYzHEFh1UNkOxNE= +github.com/aws/aws-sdk-go-v2 v1.32.4/go.mod h1:2SK5n0a2karNTv5tbP1SjsX0uhttou00v/HpXKM1ZUo= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6 h1:pT3hpW0cOHRJx8Y0DfJUEQuqPild8jRGmSFmBgvydr0= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6/go.mod h1:j/I2++U0xX+cr44QjHay4Cvxj6FUbnxrgmqN3H1jTZA= +github.com/aws/aws-sdk-go-v2/credentials v1.17.44 h1:qqfs5kulLUHUEXlHEZXLJkgGoF3kkUeFUTVA585cFpU= +github.com/aws/aws-sdk-go-v2/credentials v1.17.44/go.mod h1:0Lm2YJ8etJdEdw23s+q/9wTpOeo2HhNE97XcRa7T8MA= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.23 h1:A2w6m6Tmr+BNXjDsr7M90zkWjsu4JXHwrzPg235STs4= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.23/go.mod h1:35EVp9wyeANdujZruvHiQUAo9E3vbhnIO1mTCAxMlY0= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.23 h1:pgYW9FCabt2M25MoHYCfMrVY2ghiiBKYWUVXfwZs+sU= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.23/go.mod h1:c48kLgzO19wAu3CPkDWC28JbaJ+hfQlsdl7I2+oqIbk= +github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.20.0 h1:c/2Lv0Nq/I+UeWKqUKR/LS9rO8McuXc5CzIfK2aBlhg= +github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.20.0/go.mod h1:Kh/nzScDldU7Ti7MyFMCA+0Po+LZ4iNjWwl7H1DWYtU= +github.com/aws/smithy-go v1.22.0 h1:uunKnWlcoL3zO7q+gG2Pk53joueEOsnNB28QdMsmiMM= +github.com/aws/smithy-go v1.22.0/go.mod h1:irrKGvNn1InZwb2d7fkIRNucdfwR8R+Ts3wxYa/cJHg= +github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= +github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= +github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= +github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= +github.com/bytedance/sonic v1.12.4 h1:9Csb3c9ZJhfUWeMtpCDCq6BUoH5ogfDFLUgQ/jG+R0k= +github.com/bytedance/sonic v1.12.4/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk= +github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= +github.com/bytedance/sonic/loader v0.2.1 h1:1GgorWTqf12TA8mma4DDSbaQigE2wOgQo7iCjjJv3+E= +github.com/bytedance/sonic/loader v0.2.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y= +github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= +github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg= +github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= +github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/dlclark/regexp2 v1.11.4 h1:rPYF9/LECdNymJufQKmri9gV604RvvABwgOA8un7yAo= +github.com/dlclark/regexp2 v1.11.4/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/gabriel-vasile/mimetype v1.4.6 h1:3+PzJTKLkvgjeTbts6msPJt4DixhT4YtFNf1gtGe3zc= +github.com/gabriel-vasile/mimetype v1.4.6/go.mod h1:JX1qVKqZd40hUPpAfiNTe0Sne7hdfKSbOqqmkq8GCXc= +github.com/gin-contrib/cors v1.7.2 h1:oLDHxdg8W/XDoN/8zamqk/Drgt4oVZDvaV0YmvVICQw= +github.com/gin-contrib/cors v1.7.2/go.mod h1:SUJVARKgQ40dmrzgXEVxj2m7Ig1v1qIboQkPDTQ9t2E= +github.com/gin-contrib/gzip v1.0.1 h1:HQ8ENHODeLY7a4g1Au/46Z92bdGFl74OhxcZble9WJE= +github.com/gin-contrib/gzip v1.0.1/go.mod h1:njt428fdUNRvjuJf16tZMYZ2Yl+WQB53X5wmhDwXvC4= +github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU= +github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= +github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ= +github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc= +github.com/glebarez/sqlite v1.11.0 h1:wSG0irqzP6VurnMEpFGer5Li19RpIRi2qvQz++w0GMw= +github.com/glebarez/sqlite v1.11.0/go.mod h1:h8/o8j5wiAsqSPoWELDUdJXhjAhsVliSn7bWZjOhrgQ= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.22.1 h1:40JcKH+bBNGFczGuoBYgX4I6m/i27HYW8P9FDk5PbgA= +github.com/go-playground/validator/v10 v10.22.1/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= +github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI= +github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= +github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= +github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA= +github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= +github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= +github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= +github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd h1:gbpYu9NMq8jhDVbvlGkMFWCjLFlqqEZjEmObmhUy6Vo= +github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw= +github.com/google/s2a-go v0.1.8 h1:zZDs9gcbt9ZPLV0ndSyQk6Kacx2g/X+SKYovpnz3SMM= +github.com/google/s2a-go v0.1.8/go.mod h1:6iNWHTpQ+nfNRN5E00MSdfDwVesa8hhS32PhPO8deJA= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.3.4 h1:XYIDZApgAnrN1c855gTgghdIA6Stxb52D5RnLI1SLyw= +github.com/googleapis/enterprise-certificate-proxy v0.3.4/go.mod h1:YKe7cfqYXjKGpGvmSg28/fFvhNzinZQm8DGnaburhGA= +github.com/googleapis/gax-go/v2 v2.14.0 h1:f+jMrjBPl+DL9nI4IQzLUxMq7XrAqFYB7hBPqMNIe8o= +github.com/googleapis/gax-go/v2 v2.14.0/go.mod h1:lhBCnjdLrWRaPvLWhmc8IS24m9mr07qSYnHncrgo+zk= +github.com/gopherjs/gopherjs v1.17.2 h1:fQnZVsXk8uxXIStYb0N4bGk7jeyTalG/wsZjQ25dO0g= +github.com/gopherjs/gopherjs v1.17.2/go.mod h1:pRRIvn/QzFLrKfvEz3qUuEhtE/zLCWfreZ6J5gM2i+k= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.7.1 h1:x7SYsPBYDkHDksogeSmZZ5xzThcTgRz++I5E+ePFUcs= +github.com/jackc/pgx/v5 v5.7.1/go.mod h1:e7O26IywZZ+naJtWWos6i6fvWK+29etgITqrqHLfoZA= +github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= +github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8= +github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg= +github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= +github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= +github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.2.9 h1:66ze0taIn2H33fBvCkXuv9BmCwDfafmiIVpKV9kKGuY= +github.com/klauspost/cpuid/v2 v2.2.9/go.mod h1:rqkxqrZ1EhYM9G+hXH7YdowN5R5RGN6NK4QwQ3WMXF8= +github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4= +github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls= +github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc= +github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ= +github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= +github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkoukk/tiktoken-go v0.1.7 h1:qOBHXX4PHtvIvmOtyg1EeKlwFRiMKAcoMp4Q+bLQDmw= +github.com/pkoukk/tiktoken-go v0.1.7/go.mod h1:9NiV+i9mJKGj1rYOT+njbv+ZwA/zJxYdewGl6qVatpg= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/redis/go-redis/v9 v9.7.0 h1:HhLSs+B6O021gwzl+locl0zEDnyNkxMtf/Z3NNBMa9E= +github.com/redis/go-redis/v9 v9.7.0/go.mod h1:f6zhXITC7JUJIlPEiBOTXxJgPLdZcA93GewI7inzyWw= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= +github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= +github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= +github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k= +github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME= +github.com/smarty/assertions v1.15.0 h1:cR//PqUBUiQRakZWqBiFFQ9wb8emQGDb0HeGdqGByCY= +github.com/smarty/assertions v1.15.0/go.mod h1:yABtdzeQs6l1brC900WlRNwj6ZR55d7B+E8C6HtKdec= +github.com/smartystreets/goconvey v1.8.1 h1:qGjIddxOk4grTu9JPOU31tVfq3cNdBlNa5sSznIX1xY= +github.com/smartystreets/goconvey v1.8.1/go.mod h1:+/u4qLyY6x1jReYOp7GOM2FSt8aP9CzCZL03bI28W60= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= +github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= +go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.57.0 h1:qtFISDHKolvIxzSs0gIaiPUPR0Cucb0F2coHC7ZLdps= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.57.0/go.mod h1:Y+Pop1Q6hCOnETWTW4NROK/q1hv50hM7yDaUTjG8lp8= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.57.0 h1:DheMAlT6POBP+gh8RUH19EOTnQIor5QE0uSRPtzCpSw= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.57.0/go.mod h1:wZcGmeVO9nzP67aYSLDqXNWK87EZWhi7JWj1v7ZXf94= +go.opentelemetry.io/otel v1.32.0 h1:WnBN+Xjcteh0zdk01SVqV55d/m62NJLJdIyb4y/WO5U= +go.opentelemetry.io/otel v1.32.0/go.mod h1:00DCVSB0RQcnzlwyTfqtxSm+DRr9hpYrHjNGiBHVQIg= +go.opentelemetry.io/otel/metric v1.32.0 h1:xV2umtmNcThh2/a/aCP+h64Xx5wsj8qqnkYZktzNa0M= +go.opentelemetry.io/otel/metric v1.32.0/go.mod h1:jH7CIbbK6SH2V2wE16W05BHCtIDzauciCRLoc/SyMv8= +go.opentelemetry.io/otel/trace v1.32.0 h1:WIC9mYrXf8TmY/EXuULKc8hR17vE+Hjv2cssQDe03fM= +go.opentelemetry.io/otel/trace v1.32.0/go.mod h1:+i4rkvCraA+tG6AzwloGaCtkx53Fa+L+V8e9a7YvhT8= +golang.org/x/arch v0.12.0 h1:UsYJhbzPYGsT0HbEdmYcqtCv8UNGvnaL561NnIUvaKg= +golang.org/x/arch v0.12.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= +golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/image v0.22.0 h1:UtK5yLUzilVrkjMAZAZ34DXGpASN8i8pj8g+O+yd10g= +golang.org/x/image v0.22.0/go.mod h1:9hPFhljd4zZ1GNSIZJ49sqbp45GKK9t6w+iXvGqZUz4= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= +golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.24.0 h1:KTBBxWqUa0ykRPLtV69rRto9TLXcqYkeswu48x/gvNE= +golang.org/x/oauth2 v0.24.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= +golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= +golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg= +golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.205.0 h1:LFaxkAIpDb/GsrWV20dMMo5MR0h8UARTbn24LmD+0Pg= +google.golang.org/api v0.205.0/go.mod h1:NrK1EMqO8Xk6l6QwRAmrXXg2v6dzukhlOyvkYtnvUuc= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto/googleapis/api v0.0.0-20241113202542-65e8d215514f h1:M65LEviCfuZTfrfzwwEoxVtgvfkFkBUbFnRbxCXuXhU= +google.golang.org/genproto/googleapis/api v0.0.0-20241113202542-65e8d215514f/go.mod h1:Yo94eF2nj7igQt+TiJ49KxjIH8ndLYPZMIRSiRcEbg0= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241113202542-65e8d215514f h1:C1QccEa9kUwvMgEUORqQD9S17QesQijxjZ84sO82mfo= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241113202542-65e8d215514f/go.mod h1:GX3210XPVPUjJbTUbvwI8f2IpZDMZuPJWDzDuebbviI= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.68.0 h1:aHQeeJbo8zAkAa3pRzrVjZlbz6uSfeOXlJNQM0RAbz0= +google.golang.org/grpc v1.68.0/go.mod h1:fmSPC5AsjSBCK54MyHRx48kpOti1/jRfOlwEWywNjWA= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io= +google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gorm.io/driver/mysql v1.5.7 h1:MndhOPYOfEp2rHKgkZIhJ16eVUIRf2HmzgoPmh7FCWo= +gorm.io/driver/mysql v1.5.7/go.mod h1:sEtPWMiqiN1N1cMXoXmBbd8C6/l+TESwriotuRRpkDM= +gorm.io/driver/postgres v1.5.9 h1:DkegyItji119OlcaLjqN11kHoUgZ/j13E0jkJZgD6A8= +gorm.io/driver/postgres v1.5.9/go.mod h1:DX3GReXH+3FPWGrrgffdvCk3DQ1dwDPdmbenSkweRGI= +gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= +gorm.io/gorm v1.25.12 h1:I0u8i2hWQItBq1WfE0o2+WuL9+8L21K9e2HHSTE/0f8= +gorm.io/gorm v1.25.12/go.mod h1:xh7N7RHfYlNc5EmcI/El95gXusucDrQnHXe0+CgWcLQ= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= +modernc.org/cc/v4 v4.21.4/go.mod h1:HM7VJTZbUCR3rV8EYBi9wxnJ0ZBRiGE5OeGXNA0IsLQ= +modernc.org/ccgo/v4 v4.21.0 h1:kKPI3dF7RIag8YcToh5ZwDcVMIv6VGa0ED5cvh0LMW4= +modernc.org/ccgo/v4 v4.21.0/go.mod h1:h6kt6H/A2+ew/3MW/p6KEoQmrq/i3pr0J/SiwiaF/g0= +modernc.org/fileutil v1.3.0 h1:gQ5SIzK3H9kdfai/5x41oQiKValumqNTDXMvKo62HvE= +modernc.org/fileutil v1.3.0/go.mod h1:XatxS8fZi3pS8/hKG2GH/ArUogfxjpEKs3Ku3aK4JyQ= +modernc.org/gc/v2 v2.5.0 h1:bJ9ChznK1L1mUtAQtxi0wi5AtAs5jQuw4PrPHO5pb6M= +modernc.org/gc/v2 v2.5.0/go.mod h1:wzN5dK1AzVGoH6XOzc3YZ+ey/jPgYHLuVckd62P0GYU= +modernc.org/libc v1.61.0 h1:eGFcvWpqlnoGwzZeZe3PWJkkKbM/3SUGyk1DVZQ0TpE= +modernc.org/libc v1.61.0/go.mod h1:DvxVX89wtGTu+r72MLGhygpfi3aUGgZRdAYGCAVVud0= +modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4= +modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo= +modernc.org/memory v1.8.0 h1:IqGTL6eFMaDZZhEWwcREgeMXYwmW83LYW8cROZYkg+E= +modernc.org/memory v1.8.0/go.mod h1:XPZ936zp5OMKGWPqbD3JShgd/ZoQ7899TUuQqxY+peU= +modernc.org/opt v0.1.3 h1:3XOZf2yznlhC+ibLltsDGzABUGVx8J6pnFMS3E4dcq4= +modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0= +modernc.org/sortutil v1.2.0 h1:jQiD3PfS2REGJNzNCMMaLSp/wdMNieTbKX920Cqdgqc= +modernc.org/sortutil v1.2.0/go.mod h1:TKU2s7kJMf1AE84OoiGppNHJwvB753OYfNl2WRb++Ss= +modernc.org/sqlite v1.33.1 h1:trb6Z3YYoeM9eDL1O8do81kP+0ejv+YzgyFo+Gwy0nM= +modernc.org/sqlite v1.33.1/go.mod h1:pXV2xHxhzXZsgT/RtTFAPY6JJDEvOTcTdwADQCCWD4k= +modernc.org/strutil v1.2.0 h1:agBi9dp1I+eOnxXeiZawM8F4LawKv4NzGWSaLfyeNZA= +modernc.org/strutil v1.2.0/go.mod h1:/mdcBmfOibveCTBxUl5B5l6W+TTH1FXPLHZE6bTosX0= +modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y= +modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= +nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= diff --git a/service/aiproxy/main.go b/service/aiproxy/main.go new file mode 100644 index 00000000000..42a96a5d9ff --- /dev/null +++ b/service/aiproxy/main.go @@ -0,0 +1,121 @@ +package main + +import ( + "context" + "net/http" + "os" + "os/signal" + "strconv" + "syscall" + "time" + + "github.com/gin-gonic/gin" + _ "github.com/joho/godotenv/autoload" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/balance" + "github.com/labring/sealos/service/aiproxy/common/client" + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/controller" + "github.com/labring/sealos/service/aiproxy/middleware" + "github.com/labring/sealos/service/aiproxy/model" + "github.com/labring/sealos/service/aiproxy/router" +) + +func main() { + common.Init() + logger.SetupLogger() + + sealosJwtKey := os.Getenv("SEALOS_JWT_KEY") + if sealosJwtKey == "" { + logger.SysLog("SEALOS_JWT_KEY is not set, balance will not be enabled") + } else { + logger.SysLog("SEALOS_JWT_KEY is set, balance will be enabled") + err := balance.InitSealos(sealosJwtKey, os.Getenv("SEALOS_ACCOUNT_URL")) + if err != nil { + logger.FatalLog("failed to initialize sealos balance: " + err.Error()) + } + } + + if os.Getenv("GIN_MODE") != gin.DebugMode { + gin.SetMode(gin.ReleaseMode) + } + if config.DebugEnabled { + logger.SysLog("running in debug mode") + } + + // Initialize SQL Database + model.InitDB() + model.InitLogDB() + + defer func() { + err := model.CloseDB() + if err != nil { + logger.FatalLog("failed to close database: " + err.Error()) + } + }() + + // Initialize Redis + err := common.InitRedisClient() + if err != nil { + logger.FatalLog("failed to initialize Redis: " + err.Error()) + } + + // Initialize options + model.InitOptionMap() + model.InitChannelCache() + go model.SyncOptions(time.Second * 5) + go model.SyncChannelCache(time.Second * 5) + if os.Getenv("CHANNEL_TEST_FREQUENCY") != "" { + frequency, err := strconv.Atoi(os.Getenv("CHANNEL_TEST_FREQUENCY")) + if err != nil { + logger.FatalLog("failed to parse CHANNEL_TEST_FREQUENCY: " + err.Error()) + } + go controller.AutomaticallyTestChannels(frequency) + } + if config.EnableMetric { + logger.SysLog("metric enabled, will disable channel if too much request failed") + } + client.Init() + + // Initialize HTTP server + server := gin.New() + server.Use(gin.Recovery()) + server.Use(middleware.RequestID) + middleware.SetUpLogger(server) + + router.SetRouter(server) + port := os.Getenv("PORT") + if port == "" { + port = strconv.Itoa(*common.Port) + } + + // Create HTTP server + srv := &http.Server{ + Addr: ":" + port, + ReadHeaderTimeout: 10 * time.Second, + Handler: server, + } + + // Graceful shutdown setup + go func() { + logger.SysLogf("server started on http://localhost:%s", port) + if err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed { + logger.FatalLog("failed to start HTTP server: " + err.Error()) + } + }() + + // Wait for interrupt signal to gracefully shutdown the server + quit := make(chan os.Signal, 1) + signal.Notify(quit, os.Interrupt, syscall.SIGTERM) + <-quit + logger.SysLog("shutting down server...") + + ctx, cancel := context.WithTimeout(context.Background(), 15*time.Second) + defer cancel() + if err := srv.Shutdown(ctx); err != nil { + logger.SysError("server forced to shutdown: " + err.Error()) + } + + logger.SysLog("server exiting") +} diff --git a/service/aiproxy/middleware/auth.go b/service/aiproxy/middleware/auth.go new file mode 100644 index 00000000000..27150a6af84 --- /dev/null +++ b/service/aiproxy/middleware/auth.go @@ -0,0 +1,141 @@ +package middleware + +import ( + "fmt" + "net/http" + "slices" + "strings" + "time" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/common/network" + "github.com/labring/sealos/service/aiproxy/model" +) + +func AdminAuth(c *gin.Context) { + accessToken := c.Request.Header.Get("Authorization") + if config.AdminKey != "" && (accessToken == "" || strings.TrimPrefix(accessToken, "Bearer ") != config.AdminKey) { + c.JSON(http.StatusUnauthorized, gin.H{ + "success": false, + "message": "unauthorized, no access token provided", + }) + c.Abort() + return + } + c.Next() +} + +func TokenAuth(c *gin.Context) { + ctx := c.Request.Context() + key := c.Request.Header.Get("Authorization") + key = strings.TrimPrefix( + strings.TrimPrefix(key, "Bearer "), + "sk-", + ) + parts := strings.Split(key, "-") + key = parts[0] + token, err := model.ValidateAndGetToken(key) + if err != nil { + abortWithMessage(c, http.StatusUnauthorized, err.Error()) + return + } + if token.Subnet != "" { + if !network.IsIPInSubnets(ctx, c.ClientIP(), token.Subnet) { + abortWithMessage(c, http.StatusForbidden, + fmt.Sprintf("token (%s[%d]) can only be used in the specified subnet: %s, current ip: %s", + token.Name, + token.ID, + token.Subnet, + c.ClientIP(), + ), + ) + return + } + } + group, err := model.CacheGetGroup(token.Group) + if err != nil { + abortWithMessage(c, http.StatusInternalServerError, err.Error()) + return + } + requestModel, err := getRequestModel(c) + if err != nil && shouldCheckModel(c) { + abortWithMessage(c, http.StatusBadRequest, err.Error()) + return + } + c.Set(ctxkey.RequestModel, requestModel) + if len(token.Models) == 0 { + token.Models = model.CacheGetAllModels() + if requestModel != "" && len(token.Models) == 0 { + abortWithMessage(c, + http.StatusForbidden, + fmt.Sprintf("token (%s[%d]) has no permission to use any model", + token.Name, token.ID, + ), + ) + return + } + } + c.Set(ctxkey.AvailableModels, []string(token.Models)) + if requestModel != "" && !slices.Contains(token.Models, requestModel) { + abortWithMessage(c, + http.StatusForbidden, + fmt.Sprintf("token (%s[%d]) has no permission to use model: %s", + token.Name, token.ID, requestModel, + ), + ) + return + } + + if group.QPM <= 0 { + group.QPM = config.GetDefaultGroupQPM() + } + + if group.QPM > 0 { + ok, err := RateLimit(ctx, "group_qpm:"+group.ID, int(group.QPM), time.Minute) + if err != nil { + abortWithMessage(c, http.StatusInternalServerError, err.Error()) + return + } + if !ok { + abortWithMessage(c, http.StatusTooManyRequests, + group.ID+" is requesting too frequently", + ) + return + } + } + + c.Set(ctxkey.Group, token.Group) + c.Set(ctxkey.GroupQPM, group.QPM) + c.Set(ctxkey.TokenID, token.ID) + c.Set(ctxkey.TokenName, token.Name) + c.Set(ctxkey.TokenUsedAmount, token.UsedAmount) + c.Set(ctxkey.TokenQuota, token.Quota) + // if len(parts) > 1 { + // c.Set(ctxkey.SpecificChannelId, parts[1]) + // } + + // set channel id for proxy relay + if channelID := c.Param("channelid"); channelID != "" { + c.Set(ctxkey.SpecificChannelID, channelID) + } + + c.Next() +} + +func shouldCheckModel(c *gin.Context) bool { + if strings.HasPrefix(c.Request.URL.Path, "/v1/completions") { + return true + } + if strings.HasPrefix(c.Request.URL.Path, "/v1/chat/completions") { + return true + } + if strings.HasPrefix(c.Request.URL.Path, "/v1/images") { + return true + } + if strings.HasPrefix(c.Request.URL.Path, "/v1/audio") { + return true + } + return false +} diff --git a/service/aiproxy/middleware/cors.go b/service/aiproxy/middleware/cors.go new file mode 100644 index 00000000000..d2a109abece --- /dev/null +++ b/service/aiproxy/middleware/cors.go @@ -0,0 +1,15 @@ +package middleware + +import ( + "github.com/gin-contrib/cors" + "github.com/gin-gonic/gin" +) + +func CORS() gin.HandlerFunc { + config := cors.DefaultConfig() + config.AllowAllOrigins = true + config.AllowCredentials = true + config.AllowMethods = []string{"GET", "POST", "PUT", "DELETE", "OPTIONS"} + config.AllowHeaders = []string{"*"} + return cors.New(config) +} diff --git a/service/aiproxy/middleware/distributor.go b/service/aiproxy/middleware/distributor.go new file mode 100644 index 00000000000..13c4f4aea82 --- /dev/null +++ b/service/aiproxy/middleware/distributor.go @@ -0,0 +1,86 @@ +package middleware + +import ( + "net/http" + "slices" + "strconv" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/model" + "github.com/labring/sealos/service/aiproxy/relay/channeltype" +) + +type ModelRequest struct { + Model string `form:"model" json:"model"` +} + +func Distribute(c *gin.Context) { + if config.GetDisableServe() { + abortWithMessage(c, http.StatusServiceUnavailable, "service is under maintenance") + return + } + requestModel := c.GetString(ctxkey.RequestModel) + var channel *model.Channel + channelID, ok := c.Get(ctxkey.SpecificChannelID) + if ok { + id, err := strconv.Atoi(channelID.(string)) + if err != nil { + abortWithMessage(c, http.StatusBadRequest, "invalid channel ID") + return + } + channel, ok = model.CacheGetChannelByID(id) + if !ok { + abortWithMessage(c, http.StatusBadRequest, "invalid channel ID") + return + } + if !slices.Contains(channel.Models, requestModel) { + abortWithMessage(c, http.StatusServiceUnavailable, channel.Name+" does not support "+requestModel) + return + } + } else { + var err error + channel, err = model.CacheGetRandomSatisfiedChannel(requestModel) + if err != nil { + message := requestModel + " is not available" + abortWithMessage(c, http.StatusServiceUnavailable, message) + return + } + } + SetupContextForSelectedChannel(c, channel, requestModel) + c.Next() +} + +func SetupContextForSelectedChannel(c *gin.Context, channel *model.Channel, modelName string) { + c.Set(ctxkey.Channel, channel.Type) + c.Set(ctxkey.ChannelID, channel.ID) + c.Set(ctxkey.APIKey, channel.Key) + c.Set(ctxkey.ChannelName, channel.Name) + c.Set(ctxkey.ModelMapping, channel.ModelMapping) + c.Set(ctxkey.OriginalModel, modelName) // for retry + c.Set(ctxkey.BaseURL, channel.BaseURL) + cfg := channel.Config + // this is for backward compatibility + if channel.Other != "" { + switch channel.Type { + case channeltype.Azure: + if cfg.APIVersion == "" { + cfg.APIVersion = channel.Other + } + case channeltype.Gemini: + if cfg.APIVersion == "" { + cfg.APIVersion = channel.Other + } + case channeltype.AIProxyLibrary: + if cfg.LibraryID == "" { + cfg.LibraryID = channel.Other + } + case channeltype.Ali: + if cfg.Plugin == "" { + cfg.Plugin = channel.Other + } + } + } + c.Set(ctxkey.Config, cfg) +} diff --git a/service/aiproxy/middleware/logger.go b/service/aiproxy/middleware/logger.go new file mode 100644 index 00000000000..75c3fa66dbc --- /dev/null +++ b/service/aiproxy/middleware/logger.go @@ -0,0 +1,26 @@ +package middleware + +import ( + "fmt" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/helper" +) + +func SetUpLogger(server *gin.Engine) { + server.Use(gin.LoggerWithFormatter(func(param gin.LogFormatterParams) string { + var requestID string + if param.Keys != nil { + requestID = param.Keys[string(helper.RequestIDKey)].(string) + } + return fmt.Sprintf("[GIN] %s | %s | %3d | %13v | %15s | %7s %s\n", + param.TimeStamp.Format("2006/01/02 - 15:04:05"), + requestID, + param.StatusCode, + param.Latency, + param.ClientIP, + param.Method, + param.Path, + ) + })) +} diff --git a/service/aiproxy/middleware/rate-limit.go b/service/aiproxy/middleware/rate-limit.go new file mode 100644 index 00000000000..728d447befe --- /dev/null +++ b/service/aiproxy/middleware/rate-limit.go @@ -0,0 +1,88 @@ +package middleware + +import ( + "context" + "fmt" + "net/http" + "time" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/config" +) + +var inMemoryRateLimiter common.InMemoryRateLimiter + +// 1. 使用Redis列表存储请求时间戳 +// 2. 列表长度代表当前窗口内的请求数 +// 3. 如果请求数未达到限制,直接添加新请求并返回成功 +// 4. 如果达到限制,则检查最老的请求是否已经过期 +// 5. 如果最老的请求已过期,移除它并添加新请求,否则拒绝新请求 +// 6. 通过EXPIRE命令设置键的过期时间,自动清理过期数据 +var luaScript = ` +local key = KEYS[1] +local max_requests = tonumber(ARGV[1]) +local window = tonumber(ARGV[2]) +local current_time = tonumber(ARGV[3]) + +local count = redis.call('LLEN', key) + +if count < max_requests then + redis.call('LPUSH', key, current_time) + redis.call('PEXPIRE', key, window) + return 1 +else + local oldest = redis.call('LINDEX', key, -1) + if current_time - tonumber(oldest) >= window then + redis.call('LPUSH', key, current_time) + redis.call('LTRIM', key, 0, max_requests - 1) + redis.call('PEXPIRE', key, window) + return 1 + else + return 0 + end +end +` + +func redisRateLimitRequest(ctx context.Context, key string, maxRequestNum int, duration time.Duration) (bool, error) { + rdb := common.RDB + currentTime := time.Now().UnixMilli() + result, err := rdb.Eval(ctx, luaScript, []string{key}, maxRequestNum, duration.Milliseconds(), currentTime).Int64() + if err != nil { + return false, err + } + return result == 1, nil +} + +func RateLimit(ctx context.Context, key string, maxRequestNum int, duration time.Duration) (bool, error) { + if maxRequestNum == 0 { + return true, nil + } + if common.RedisEnabled { + return redisRateLimitRequest(ctx, key, maxRequestNum, duration) + } + // It's safe to call multi times. + inMemoryRateLimiter.Init(config.RateLimitKeyExpirationDuration) + return inMemoryRateLimiter.Request(key, maxRequestNum, duration), nil +} + +func GlobalAPIRateLimit(c *gin.Context) { + globalAPIRateLimitNum := config.GetGlobalAPIRateLimitNum() + if globalAPIRateLimitNum <= 0 { + c.Next() + return + } + ok, err := RateLimit(c.Request.Context(), "global_qpm", int(globalAPIRateLimitNum), time.Minute) + if err != nil { + fmt.Println(err.Error()) + c.Status(http.StatusInternalServerError) + c.Abort() + return + } + if !ok { + c.Status(http.StatusTooManyRequests) + c.Abort() + return + } + c.Next() +} diff --git a/service/aiproxy/middleware/recover.go b/service/aiproxy/middleware/recover.go new file mode 100644 index 00000000000..d76c1792ccb --- /dev/null +++ b/service/aiproxy/middleware/recover.go @@ -0,0 +1,32 @@ +package middleware + +import ( + "fmt" + "net/http" + "runtime/debug" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/logger" +) + +func RelayPanicRecover(c *gin.Context) { + defer func() { + if err := recover(); err != nil { + ctx := c.Request.Context() + logger.Errorf(ctx, "panic detected: %v", err) + logger.Errorf(ctx, "stacktrace from panic: %s", debug.Stack()) + logger.Errorf(ctx, "request: %s %s", c.Request.Method, c.Request.URL.Path) + body, _ := common.GetRequestBody(c) + logger.Errorf(ctx, "request body: %s", body) + c.JSON(http.StatusInternalServerError, gin.H{ + "error": gin.H{ + "message": fmt.Sprintf("Panic detected, error: %v.", err), + "type": "aiproxy_panic", + }, + }) + c.Abort() + } + }() + c.Next() +} diff --git a/service/aiproxy/middleware/request-id.go b/service/aiproxy/middleware/request-id.go new file mode 100644 index 00000000000..aabca3a04e3 --- /dev/null +++ b/service/aiproxy/middleware/request-id.go @@ -0,0 +1,17 @@ +package middleware + +import ( + "context" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/helper" +) + +func RequestID(c *gin.Context) { + id := helper.GenRequestID() + c.Set(string(helper.RequestIDKey), id) + ctx := context.WithValue(c.Request.Context(), helper.RequestIDKey, id) + c.Request = c.Request.WithContext(ctx) + c.Header(string(helper.RequestIDKey), id) + c.Next() +} diff --git a/service/aiproxy/middleware/utils.go b/service/aiproxy/middleware/utils.go new file mode 100644 index 00000000000..cf7fcc4b906 --- /dev/null +++ b/service/aiproxy/middleware/utils.go @@ -0,0 +1,43 @@ +package middleware + +import ( + "fmt" + "strings" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/logger" +) + +func abortWithMessage(c *gin.Context, statusCode int, message string) { + c.JSON(statusCode, gin.H{ + "error": gin.H{ + "message": helper.MessageWithRequestID(message, c.GetString(string(helper.RequestIDKey))), + "type": "aiproxy_error", + }, + }) + c.Abort() + logger.Error(c.Request.Context(), message) +} + +func getRequestModel(c *gin.Context) (string, error) { + path := c.Request.URL.Path + switch { + case strings.HasPrefix(path, "/v1/moderations"): + return "text-moderation-stable", nil + case strings.HasSuffix(path, "embeddings"): + return c.Param("model"), nil + case strings.HasPrefix(path, "/v1/images/generations"): + return "dall-e-2", nil + case strings.HasPrefix(path, "/v1/audio/transcriptions"), strings.HasPrefix(path, "/v1/audio/translations"): + return c.Request.FormValue("model"), nil + default: + var modelRequest ModelRequest + err := common.UnmarshalBodyReusable(c, &modelRequest) + if err != nil { + return "", fmt.Errorf("get request model failed: %w", err) + } + return modelRequest.Model, nil + } +} diff --git a/service/aiproxy/model/cache.go b/service/aiproxy/model/cache.go new file mode 100644 index 00000000000..66b5ffafb79 --- /dev/null +++ b/service/aiproxy/model/cache.go @@ -0,0 +1,399 @@ +package model + +import ( + "context" + "encoding" + "errors" + "fmt" + "math/rand/v2" + "sort" + "sync" + "time" + + json "github.com/json-iterator/go" + "github.com/redis/go-redis/v9" + + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/logger" +) + +const ( + SyncFrequency = time.Minute * 3 + TokenCacheKey = "token:%s" + GroupCacheKey = "group:%s" +) + +var ( + _ encoding.BinaryMarshaler = (*redisStringSlice)(nil) + _ redis.Scanner = (*redisStringSlice)(nil) +) + +type redisStringSlice []string + +func (r *redisStringSlice) ScanRedis(value string) error { + return json.Unmarshal(conv.StringToBytes(value), r) +} + +func (r redisStringSlice) MarshalBinary() ([]byte, error) { + return json.Marshal(r) +} + +type redisTime time.Time + +func (t *redisTime) ScanRedis(value string) error { + return (*time.Time)(t).UnmarshalBinary(conv.StringToBytes(value)) +} + +func (t redisTime) MarshalBinary() ([]byte, error) { + return time.Time(t).MarshalBinary() +} + +type TokenCache struct { + ExpiredAt redisTime `json:"expired_at" redis:"e"` + Group string `json:"group" redis:"g"` + Key string `json:"-" redis:"-"` + Name string `json:"name" redis:"n"` + Subnet string `json:"subnet" redis:"s"` + Models redisStringSlice `json:"models" redis:"m"` + ID int `json:"id" redis:"i"` + Status int `json:"status" redis:"st"` + Quota float64 `json:"quota" redis:"q"` + UsedAmount float64 `json:"used_amount" redis:"u"` +} + +func (t *Token) ToTokenCache() *TokenCache { + return &TokenCache{ + ID: t.ID, + Group: t.GroupID, + Name: t.Name.String(), + Models: t.Models, + Subnet: t.Subnet, + Status: t.Status, + ExpiredAt: redisTime(t.ExpiredAt), + Quota: t.Quota, + UsedAmount: t.UsedAmount, + } +} + +func CacheDeleteToken(key string) error { + if !common.RedisEnabled { + return nil + } + return common.RedisDel(fmt.Sprintf(TokenCacheKey, key)) +} + +func CacheSetToken(token *Token) error { + if !common.RedisEnabled { + return nil + } + key := fmt.Sprintf(TokenCacheKey, token.Key) + pipe := common.RDB.Pipeline() + pipe.HSet(context.Background(), key, token.ToTokenCache()) + expireTime := SyncFrequency + time.Duration(rand.Int64N(60)-30)*time.Second + pipe.Expire(context.Background(), key, expireTime) + _, err := pipe.Exec(context.Background()) + return err +} + +func CacheGetTokenByKey(key string) (*TokenCache, error) { + if !common.RedisEnabled { + token, err := GetTokenByKey(key) + if err != nil { + return nil, err + } + return token.ToTokenCache(), nil + } + + cacheKey := fmt.Sprintf(TokenCacheKey, key) + tokenCache := &TokenCache{} + err := common.RDB.HGetAll(context.Background(), cacheKey).Scan(tokenCache) + if err == nil && tokenCache.ID != 0 { + tokenCache.Key = key + return tokenCache, nil + } else if err != nil && err != redis.Nil { + logger.SysLogf("get token (%s) from redis error: %s", key, err.Error()) + } + + token, err := GetTokenByKey(key) + if err != nil { + return nil, err + } + + if err := CacheSetToken(token); err != nil { + logger.SysError("Redis set token error: " + err.Error()) + } + + return token.ToTokenCache(), nil +} + +var updateTokenUsedAmountScript = redis.NewScript(` + if redis.call("HExists", KEYS[1], "used_amount") then + redis.call("HSet", KEYS[1], "used_amount", ARGV[1]) + end + return redis.status_reply("ok") +`) + +var updateTokenUsedAmountOnlyIncreaseScript = redis.NewScript(` + local used_amount = redis.call("HGet", KEYS[1], "used_amount") + if used_amount == false then + return redis.status_reply("ok") + end + if ARGV[1] < used_amount then + return redis.status_reply("ok") + end + redis.call("HSet", KEYS[1], "used_amount", ARGV[1]) + return redis.status_reply("ok") +`) + +var increaseTokenUsedAmountScript = redis.NewScript(` + local used_amount = redis.call("HGet", KEYS[1], "used_amount") + if used_amount == false then + return redis.status_reply("ok") + end + redis.call("HSet", KEYS[1], "used_amount", used_amount + ARGV[1]) + return redis.status_reply("ok") +`) + +func CacheUpdateTokenUsedAmount(key string, amount float64) error { + if !common.RedisEnabled { + return nil + } + return updateTokenUsedAmountScript.Run(context.Background(), common.RDB, []string{fmt.Sprintf(TokenCacheKey, key)}, amount).Err() +} + +func CacheUpdateTokenUsedAmountOnlyIncrease(key string, amount float64) error { + if !common.RedisEnabled { + return nil + } + return updateTokenUsedAmountOnlyIncreaseScript.Run(context.Background(), common.RDB, []string{fmt.Sprintf(TokenCacheKey, key)}, amount).Err() +} + +func CacheIncreaseTokenUsedAmount(key string, amount float64) error { + if !common.RedisEnabled { + return nil + } + return increaseTokenUsedAmountScript.Run(context.Background(), common.RDB, []string{fmt.Sprintf(TokenCacheKey, key)}, amount).Err() +} + +type GroupCache struct { + ID string `json:"-" redis:"-"` + Status int `json:"status" redis:"st"` + QPM int64 `json:"qpm" redis:"q"` +} + +func (g *Group) ToGroupCache() *GroupCache { + return &GroupCache{ + ID: g.ID, + Status: g.Status, + QPM: g.QPM, + } +} + +func CacheDeleteGroup(id string) error { + if !common.RedisEnabled { + return nil + } + return common.RedisDel(fmt.Sprintf(GroupCacheKey, id)) +} + +var updateGroupQPMScript = redis.NewScript(` + if redis.call("HExists", KEYS[1], "qpm") then + redis.call("HSet", KEYS[1], "qpm", ARGV[1]) + end + return redis.status_reply("ok") +`) + +func CacheUpdateGroupQPM(id string, qpm int64) error { + if !common.RedisEnabled { + return nil + } + return updateGroupQPMScript.Run(context.Background(), common.RDB, []string{fmt.Sprintf(GroupCacheKey, id)}, qpm).Err() +} + +var updateGroupStatusScript = redis.NewScript(` + if redis.call("HExists", KEYS[1], "status") then + redis.call("HSet", KEYS[1], "status", ARGV[1]) + end + return redis.status_reply("ok") +`) + +func CacheUpdateGroupStatus(id string, status int) error { + if !common.RedisEnabled { + return nil + } + return updateGroupStatusScript.Run(context.Background(), common.RDB, []string{fmt.Sprintf(GroupCacheKey, id)}, status).Err() +} + +func CacheSetGroup(group *Group) error { + if !common.RedisEnabled { + return nil + } + key := fmt.Sprintf(GroupCacheKey, group.ID) + pipe := common.RDB.Pipeline() + pipe.HSet(context.Background(), key, group.ToGroupCache()) + expireTime := SyncFrequency + time.Duration(rand.Int64N(60)-30)*time.Second + pipe.Expire(context.Background(), key, expireTime) + _, err := pipe.Exec(context.Background()) + return err +} + +func CacheGetGroup(id string) (*GroupCache, error) { + if !common.RedisEnabled { + group, err := GetGroupByID(id) + if err != nil { + return nil, err + } + return group.ToGroupCache(), nil + } + + cacheKey := fmt.Sprintf(GroupCacheKey, id) + groupCache := &GroupCache{} + err := common.RDB.HGetAll(context.Background(), cacheKey).Scan(groupCache) + if err == nil && groupCache.Status != 0 { + groupCache.ID = id + return groupCache, nil + } else if err != nil && !errors.Is(err, redis.Nil) { + logger.SysLogf("get group (%s) from redis error: %s", id, err.Error()) + } + + group, err := GetGroupByID(id) + if err != nil { + return nil, err + } + + if err := CacheSetGroup(group); err != nil { + logger.SysError("Redis set group error: " + err.Error()) + } + + return group.ToGroupCache(), nil +} + +var ( + model2channels map[string][]*Channel + allModels []string + type2Models map[int][]string + channelID2channel map[int]*Channel + channelSyncLock sync.RWMutex +) + +func CacheGetAllModels() []string { + channelSyncLock.RLock() + defer channelSyncLock.RUnlock() + return allModels +} + +func CacheGetType2Models() map[int][]string { + channelSyncLock.RLock() + defer channelSyncLock.RUnlock() + return type2Models +} + +func CacheGetModelsByType(channelType int) []string { + return CacheGetType2Models()[channelType] +} + +func InitChannelCache() { + newChannelID2channel := make(map[int]*Channel) + var channels []*Channel + DB.Where("status = ?", ChannelStatusEnabled).Find(&channels) + for _, channel := range channels { + if len(channel.Models) == 0 { + channel.Models = config.GetDefaultChannelModels()[channel.Type] + } + if len(channel.ModelMapping) == 0 { + channel.ModelMapping = config.GetDefaultChannelModelMapping()[channel.Type] + } + newChannelID2channel[channel.ID] = channel + } + newModel2channels := make(map[string][]*Channel) + for _, channel := range channels { + for _, model := range channel.Models { + newModel2channels[model] = append(newModel2channels[model], channel) + } + } + + // sort by priority + for _, channels := range newModel2channels { + sort.Slice(channels, func(i, j int) bool { + return channels[i].Priority > channels[j].Priority + }) + } + + models := make([]string, 0, len(newModel2channels)) + for model := range newModel2channels { + models = append(models, model) + } + + newType2ModelsMap := make(map[int]map[string]struct{}) + for _, channel := range channels { + newType2ModelsMap[channel.Type] = make(map[string]struct{}) + for _, model := range channel.Models { + newType2ModelsMap[channel.Type][model] = struct{}{} + } + } + newType2Models := make(map[int][]string) + for k, v := range newType2ModelsMap { + newType2Models[k] = make([]string, 0, len(v)) + for model := range v { + newType2Models[k] = append(newType2Models[k], model) + } + } + + channelSyncLock.Lock() + model2channels = newModel2channels + allModels = models + type2Models = newType2Models + channelID2channel = newChannelID2channel + channelSyncLock.Unlock() + logger.SysDebug("channels synced from database") +} + +func SyncChannelCache(frequency time.Duration) { + ticker := time.NewTicker(frequency) + defer ticker.Stop() + for range ticker.C { + logger.SysDebug("syncing channels from database") + InitChannelCache() + } +} + +func CacheGetRandomSatisfiedChannel(model string) (*Channel, error) { + channelSyncLock.RLock() + channels := model2channels[model] + channelSyncLock.RUnlock() + if len(channels) == 0 { + return nil, errors.New("model not found") + } + + if len(channels) == 1 { + return channels[0], nil + } + + var totalWeight int32 + for _, ch := range channels { + totalWeight += ch.Priority + } + + if totalWeight == 0 { + return channels[rand.IntN(len(channels))], nil + } + + r := rand.Int32N(totalWeight) + for _, ch := range channels { + r -= ch.Priority + if r < 0 { + return ch, nil + } + } + + return channels[rand.IntN(len(channels))], nil +} + +func CacheGetChannelByID(id int) (*Channel, bool) { + channelSyncLock.RLock() + channel, ok := channelID2channel[id] + channelSyncLock.RUnlock() + return channel, ok +} diff --git a/service/aiproxy/model/channel.go b/service/aiproxy/model/channel.go new file mode 100644 index 00000000000..048dd501080 --- /dev/null +++ b/service/aiproxy/model/channel.go @@ -0,0 +1,334 @@ +package model + +import ( + "fmt" + "strings" + "time" + + json "github.com/json-iterator/go" + + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/logger" + "gorm.io/gorm" + "gorm.io/gorm/clause" +) + +const ( + ErrChannelNotFound = "channel" +) + +const ( + ChannelStatusUnknown = 0 + ChannelStatusEnabled = 1 // don't use 0, 0 is the default value! + ChannelStatusManuallyDisabled = 2 // also don't use 0 + ChannelStatusAutoDisabled = 3 +) + +type Channel struct { + CreatedAt time.Time `gorm:"index" json:"created_at"` + AccessedAt time.Time `json:"accessed_at"` + TestAt time.Time `json:"test_at"` + BalanceUpdatedAt time.Time `json:"balance_updated_at"` + ModelMapping map[string]string `gorm:"serializer:fastjson;type:text" json:"model_mapping"` + Config ChannelConfig `gorm:"serializer:json;type:text" json:"config"` + Other string `json:"other"` + Key string `gorm:"type:text;index" json:"key"` + Name string `gorm:"index" json:"name"` + BaseURL string `gorm:"index" json:"base_url"` + Models []string `gorm:"serializer:json;type:text" json:"models"` + Balance float64 `json:"balance"` + ResponseDuration int64 `gorm:"index" json:"response_duration"` + ID int `gorm:"primaryKey" json:"id"` + UsedAmount float64 `gorm:"index" json:"used_amount"` + RequestCount int `gorm:"index" json:"request_count"` + Status int `gorm:"default:1;index" json:"status"` + Type int `gorm:"default:0;index" json:"type"` + Priority int32 `json:"priority"` +} + +func (c *Channel) MarshalJSON() ([]byte, error) { + type Alias Channel + return json.Marshal(&struct { + *Alias + CreatedAt int64 `json:"created_at"` + AccessedAt int64 `json:"accessed_at"` + TestAt int64 `json:"test_at"` + BalanceUpdatedAt int64 `json:"balance_updated_at"` + }{ + Alias: (*Alias)(c), + CreatedAt: c.CreatedAt.UnixMilli(), + AccessedAt: c.AccessedAt.UnixMilli(), + TestAt: c.TestAt.UnixMilli(), + BalanceUpdatedAt: c.BalanceUpdatedAt.UnixMilli(), + }) +} + +//nolint:goconst +func getChannelOrder(order string) string { + switch order { + case "name": + return "name asc" + case "name-desc": + return "name desc" + case "type": + return "type asc" + case "type-desc": + return "type desc" + case "created_at": + return "created_at asc" + case "created_at-desc": + return "created_at desc" + case "accessed_at": + return "accessed_at asc" + case "accessed_at-desc": + return "accessed_at desc" + case "status": + return "status asc" + case "status-desc": + return "status desc" + case "test_at": + return "test_at asc" + case "test_at-desc": + return "test_at desc" + case "balance_updated_at": + return "balance_updated_at asc" + case "balance_updated_at-desc": + return "balance_updated_at desc" + case "used_amount": + return "used_amount asc" + case "used_amount-desc": + return "used_amount desc" + case "request_count": + return "request_count asc" + case "request_count-desc": + return "request_count desc" + case "priority": + return "priority asc" + case "priority-desc": + return "priority desc" + case "id": + return "id asc" + default: + return "id desc" + } +} + +type ChannelConfig struct { + Region string `json:"region,omitempty"` + SK string `json:"sk,omitempty"` + AK string `json:"ak,omitempty"` + UserID string `json:"user_id,omitempty"` + APIVersion string `json:"api_version,omitempty"` + LibraryID string `json:"library_id,omitempty"` + Plugin string `json:"plugin,omitempty"` + VertexAIProjectID string `json:"vertex_ai_project_id,omitempty"` + VertexAIADC string `json:"vertex_ai_adc,omitempty"` +} + +func GetAllChannels(onlyDisabled bool, omitKey bool) (channels []*Channel, err error) { + tx := DB.Model(&Channel{}) + if onlyDisabled { + tx = tx.Where("status = ? or status = ?", ChannelStatusAutoDisabled, ChannelStatusManuallyDisabled) + } + if omitKey { + tx = tx.Omit("key") + } + err = tx.Order("id desc").Find(&channels).Error + return channels, err +} + +func GetChannels(startIdx int, num int, onlyDisabled bool, omitKey bool, id int, name string, key string, channelType int, baseURL string, order string) (channels []*Channel, total int64, err error) { + tx := DB.Model(&Channel{}) + if onlyDisabled { + tx = tx.Where("status = ? or status = ?", ChannelStatusAutoDisabled, ChannelStatusManuallyDisabled) + } + if id != 0 { + tx = tx.Where("id = ?", id) + } + if name != "" { + tx = tx.Where("name = ?", name) + } + if key != "" { + tx = tx.Where("key = ?", key) + } + if channelType != 0 { + tx = tx.Where("type = ?", channelType) + } + if baseURL != "" { + tx = tx.Where("base_url = ?", baseURL) + } + err = tx.Count(&total).Error + if err != nil { + return nil, 0, err + } + if omitKey { + tx = tx.Omit("key") + } + if total <= 0 { + return nil, 0, nil + } + err = tx.Order(getChannelOrder(order)).Limit(num).Offset(startIdx).Find(&channels).Error + return channels, total, err +} + +func SearchChannels(keyword string, startIdx int, num int, onlyDisabled bool, omitKey bool, id int, name string, key string, channelType int, baseURL string, order string) (channels []*Channel, total int64, err error) { + tx := DB.Model(&Channel{}) + if onlyDisabled { + tx = tx.Where("status = ? or status = ?", ChannelStatusAutoDisabled, ChannelStatusManuallyDisabled) + } + + // Handle exact match conditions for non-zero values + if id != 0 { + tx = tx.Where("id = ?", id) + } + if name != "" { + tx = tx.Where("name = ?", name) + } + if key != "" { + tx = tx.Where("key = ?", key) + } + if channelType != 0 { + tx = tx.Where("type = ?", channelType) + } + if baseURL != "" { + tx = tx.Where("base_url = ?", baseURL) + } + + // Handle keyword search for zero value fields + if keyword != "" { + var conditions []string + var values []interface{} + + if id == 0 { + conditions = append(conditions, "id = ?") + values = append(values, helper.String2Int(keyword)) + } + if name == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "name ILIKE ?") + } else { + conditions = append(conditions, "name LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + if key == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "key ILIKE ?") + } else { + conditions = append(conditions, "key LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + if channelType == 0 { + conditions = append(conditions, "type = ?") + values = append(values, helper.String2Int(keyword)) + } + if baseURL == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "base_url ILIKE ?") + } else { + conditions = append(conditions, "base_url LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + + if len(conditions) > 0 { + tx = tx.Where(fmt.Sprintf("(%s)", strings.Join(conditions, " OR ")), values...) + } + } + + err = tx.Count(&total).Error + if err != nil { + return nil, 0, err + } + if omitKey { + tx = tx.Omit("key") + } + if total <= 0 { + return nil, 0, nil + } + err = tx.Order(getChannelOrder(order)).Limit(num).Offset(startIdx).Find(&channels).Error + return channels, total, err +} + +func GetChannelByID(id int, omitKey bool) (*Channel, error) { + channel := Channel{ID: id} + var err error + if omitKey { + err = DB.Omit("key").First(&channel, "id = ?", id).Error + } else { + err = DB.First(&channel, "id = ?", id).Error + } + if err != nil { + return nil, err + } + return &channel, nil +} + +func BatchInsertChannels(channels []*Channel) error { + return DB.Transaction(func(tx *gorm.DB) error { + return tx.Create(&channels).Error + }) +} + +func UpdateChannel(channel *Channel) error { + result := DB. + Model(channel). + Omit("accessed_at", "used_amount", "request_count", "balance_updated_at", "created_at", "balance", "test_at", "balance_updated_at"). + Clauses(clause.Returning{}). + Updates(channel) + return HandleUpdateResult(result, ErrChannelNotFound) +} + +func (c *Channel) UpdateResponseTime(responseTime int64) { + err := DB.Model(c).Select("test_at", "response_duration").Updates(Channel{ + TestAt: time.Now(), + ResponseDuration: responseTime, + }).Error + if err != nil { + logger.SysError("failed to update response time: " + err.Error()) + } +} + +func (c *Channel) UpdateBalance(balance float64) { + err := DB.Model(c).Select("balance_updated_at", "balance").Updates(Channel{ + BalanceUpdatedAt: time.Now(), + Balance: balance, + }).Error + if err != nil { + logger.SysError("failed to update balance: " + err.Error()) + } +} + +func DeleteChannelByID(id int) error { + result := DB.Delete(&Channel{ID: id}) + return HandleUpdateResult(result, ErrChannelNotFound) +} + +func UpdateChannelStatusByID(id int, status int) error { + result := DB.Model(&Channel{}).Where("id = ?", id).Update("status", status) + return HandleUpdateResult(result, ErrChannelNotFound) +} + +func DisableChannelByID(id int) error { + return UpdateChannelStatusByID(id, ChannelStatusAutoDisabled) +} + +func EnableChannelByID(id int) error { + return UpdateChannelStatusByID(id, ChannelStatusEnabled) +} + +func UpdateChannelUsedAmount(id int, amount float64, requestCount int) error { + result := DB.Model(&Channel{}).Where("id = ?", id).Updates(map[string]interface{}{ + "used_amount": gorm.Expr("used_amount + ?", amount), + "request_count": gorm.Expr("request_count + ?", requestCount), + "accessed_at": time.Now(), + }) + return HandleUpdateResult(result, ErrChannelNotFound) +} + +func DeleteDisabledChannel() error { + result := DB.Where("status = ? or status = ?", ChannelStatusAutoDisabled, ChannelStatusManuallyDisabled).Delete(&Channel{}) + return HandleUpdateResult(result, ErrChannelNotFound) +} diff --git a/service/aiproxy/model/consumeerr.go b/service/aiproxy/model/consumeerr.go new file mode 100644 index 00000000000..7bf76b9b49e --- /dev/null +++ b/service/aiproxy/model/consumeerr.go @@ -0,0 +1,133 @@ +package model + +import ( + "fmt" + "strings" + "time" + + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/helper" +) + +type ConsumeError struct { + CreatedAt time.Time `gorm:"index" json:"created_at"` + GroupID string `gorm:"index" json:"group_id"` + TokenName EmptyNullString `gorm:"index;not null" json:"token_name"` + Model string `gorm:"index" json:"model"` + Content string `gorm:"type:text" json:"content"` + ID int `gorm:"primaryKey" json:"id"` + UsedAmount float64 `gorm:"index" json:"used_amount"` + TokenID int `gorm:"index" json:"token_id"` +} + +func (c *ConsumeError) MarshalJSON() ([]byte, error) { + type Alias ConsumeError + return json.Marshal(&struct { + *Alias + CreatedAt int64 `json:"created_at"` + }{ + Alias: (*Alias)(c), + CreatedAt: c.CreatedAt.UnixMilli(), + }) +} + +func CreateConsumeError(group string, tokenName string, model string, content string, usedAmount float64, tokenID int) error { + return LogDB.Create(&ConsumeError{ + GroupID: group, + TokenName: EmptyNullString(tokenName), + Model: model, + Content: content, + UsedAmount: usedAmount, + TokenID: tokenID, + }).Error +} + +func SearchConsumeError(keyword string, group string, tokenName string, model string, content string, usedAmount float64, tokenID int, page int, perPage int, order string) ([]*ConsumeError, int64, error) { + tx := LogDB.Model(&ConsumeError{}) + + // Handle exact match conditions for non-zero values + if group != "" { + tx = tx.Where("group_id = ?", group) + } + if tokenName != "" { + tx = tx.Where("token_name = ?", tokenName) + } + if model != "" { + tx = tx.Where("model = ?", model) + } + if content != "" { + tx = tx.Where("content = ?", content) + } + if usedAmount > 0 { + tx = tx.Where("used_amount = ?", usedAmount) + } + if tokenID != 0 { + tx = tx.Where("token_id = ?", tokenID) + } + + // Handle keyword search for zero value fields + if keyword != "" { + var conditions []string + var values []interface{} + + if tokenID == 0 { + conditions = append(conditions, "token_id = ?") + values = append(values, helper.String2Int(keyword)) + } + if group == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "group_id ILIKE ?") + } else { + conditions = append(conditions, "group_id LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + if tokenName == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "token_name ILIKE ?") + } else { + conditions = append(conditions, "token_name LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + if model == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "model ILIKE ?") + } else { + conditions = append(conditions, "model LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + if content == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "content ILIKE ?") + } else { + conditions = append(conditions, "content LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + + if len(conditions) > 0 { + tx = tx.Where(fmt.Sprintf("(%s)", strings.Join(conditions, " OR ")), values...) + } + } + + var total int64 + err := tx.Count(&total).Error + if err != nil { + return nil, 0, err + } + if total <= 0 { + return nil, 0, nil + } + + page-- + if page < 0 { + page = 0 + } + + var errors []*ConsumeError + err = tx.Order(getLogOrder(order)).Limit(perPage).Offset(page * perPage).Find(&errors).Error + return errors, total, err +} diff --git a/service/aiproxy/model/group.go b/service/aiproxy/model/group.go new file mode 100644 index 00000000000..b486107eb98 --- /dev/null +++ b/service/aiproxy/model/group.go @@ -0,0 +1,215 @@ +package model + +import ( + "errors" + "fmt" + "strings" + "time" + + json "github.com/json-iterator/go" + + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/logger" + "gorm.io/gorm" +) + +const ( + ErrGroupNotFound = "group" +) + +const ( + GroupStatusEnabled = 1 // don't use 0, 0 is the default value! + GroupStatusDisabled = 2 // also don't use 0 +) + +type Group struct { + CreatedAt time.Time `json:"created_at"` + AccessedAt time.Time `json:"accessed_at"` + ID string `gorm:"primaryKey" json:"id"` + Tokens []*Token `gorm:"foreignKey:GroupID;constraint:OnUpdate:CASCADE,OnDelete:CASCADE" json:"-"` + Status int `gorm:"default:1;index" json:"status"` + UsedAmount float64 `gorm:"index" json:"used_amount"` + QPM int64 `gorm:"index" json:"qpm"` + RequestCount int `gorm:"index" json:"request_count"` +} + +func (g *Group) MarshalJSON() ([]byte, error) { + type Alias Group + return json.Marshal(&struct { + *Alias + CreatedAt int64 `json:"created_at"` + AccessedAt int64 `json:"accessed_at"` + }{ + Alias: (*Alias)(g), + CreatedAt: g.CreatedAt.UnixMilli(), + AccessedAt: g.AccessedAt.UnixMilli(), + }) +} + +//nolint:goconst +func getGroupOrder(order string) string { + switch order { + case "id-desc": + return "id desc" + case "request_count": + return "request_count asc" + case "request_count-desc": + return "request_count desc" + case "accessed_at": + return "accessed_at asc" + case "accessed_at-desc": + return "accessed_at desc" + case "status": + return "status asc" + case "status-desc": + return "status desc" + case "created_at": + return "created_at asc" + case "created_at-desc": + return "created_at desc" + case "used_amount": + return "used_amount asc" + case "used_amount-desc": + return "used_amount desc" + case "id": + return "id asc" + default: + return "id desc" + } +} + +func GetGroups(startIdx int, num int, order string, onlyDisabled bool) (groups []*Group, total int64, err error) { + tx := DB.Model(&Group{}) + if onlyDisabled { + tx = tx.Where("status = ?", GroupStatusDisabled) + } + + err = tx.Count(&total).Error + if err != nil { + return nil, 0, err + } + + if total <= 0 { + return nil, 0, nil + } + + err = tx.Order(getGroupOrder(order)).Limit(num).Offset(startIdx).Find(&groups).Error + return groups, total, err +} + +func GetGroupByID(id string) (*Group, error) { + if id == "" { + return nil, errors.New("id 为空!") + } + group := Group{ID: id} + err := DB.First(&group, "id = ?", id).Error + return &group, HandleNotFound(err, ErrGroupNotFound) +} + +func DeleteGroupByID(id string) (err error) { + if id == "" { + return errors.New("id 为空!") + } + defer func() { + if err == nil { + if err := CacheDeleteGroup(id); err != nil { + logger.SysError("CacheDeleteGroup failed: " + err.Error()) + } + if _, err := DeleteGroupLogs(id); err != nil { + logger.SysError("DeleteGroupLogs failed: " + err.Error()) + } + } + }() + result := DB. + Delete(&Group{ + ID: id, + }) + return HandleUpdateResult(result, ErrGroupNotFound) +} + +func UpdateGroupUsedAmountAndRequestCount(id string, amount float64, count int) error { + result := DB.Model(&Group{}).Where("id = ?", id).Updates(map[string]interface{}{ + "used_amount": gorm.Expr("used_amount + ?", amount), + "request_count": gorm.Expr("request_count + ?", count), + "accessed_at": time.Now(), + }) + return HandleUpdateResult(result, ErrGroupNotFound) +} + +func UpdateGroupUsedAmount(id string, amount float64) error { + result := DB.Model(&Group{}).Where("id = ?", id).Updates(map[string]interface{}{ + "used_amount": gorm.Expr("used_amount + ?", amount), + "accessed_at": time.Now(), + }) + return HandleUpdateResult(result, ErrGroupNotFound) +} + +func UpdateGroupRequestCount(id string, count int) error { + result := DB.Model(&Group{}).Where("id = ?", id).Updates(map[string]interface{}{ + "request_count": gorm.Expr("request_count + ?", count), + "accessed_at": time.Now(), + }) + return HandleUpdateResult(result, ErrGroupNotFound) +} + +func UpdateGroupQPM(id string, qpm int64) (err error) { + defer func() { + if err == nil { + if err := CacheUpdateGroupQPM(id, qpm); err != nil { + logger.SysError("CacheUpdateGroupQPM failed: " + err.Error()) + } + } + }() + result := DB.Model(&Group{}).Where("id = ?", id).Update("qpm", qpm) + return HandleUpdateResult(result, ErrGroupNotFound) +} + +func UpdateGroupStatus(id string, status int) (err error) { + defer func() { + if err == nil { + if err := CacheUpdateGroupStatus(id, status); err != nil { + logger.SysError("CacheUpdateGroupStatus failed: " + err.Error()) + } + } + }() + result := DB.Model(&Group{}).Where("id = ?", id).Update("status", status) + return HandleUpdateResult(result, ErrGroupNotFound) +} + +func SearchGroup(keyword string, startIdx int, num int, order string, status int) (groups []*Group, total int64, err error) { + tx := DB.Model(&Group{}) + if status != 0 { + tx = tx.Where("status = ?", status) + } + if common.UsingPostgreSQL { + tx = tx.Where("id ILIKE ?", "%"+keyword+"%") + } else { + tx = tx.Where("id LIKE ?", "%"+keyword+"%") + } + if keyword != "" { + var conditions []string + var values []interface{} + + if status == 0 { + conditions = append(conditions, "status = ?") + values = append(values, 1) + } + + if len(conditions) > 0 { + tx = tx.Where(fmt.Sprintf("(%s)", strings.Join(conditions, " OR ")), values...) + } + } + err = tx.Count(&total).Error + if err != nil { + return nil, 0, err + } + if total <= 0 { + return nil, 0, nil + } + err = tx.Order(getGroupOrder(order)).Limit(num).Offset(startIdx).Find(&groups).Error + return groups, total, err +} + +func CreateGroup(group *Group) error { + return DB.Create(group).Error +} diff --git a/service/aiproxy/model/log.go b/service/aiproxy/model/log.go new file mode 100644 index 00000000000..15fe1f6807b --- /dev/null +++ b/service/aiproxy/model/log.go @@ -0,0 +1,516 @@ +package model + +import ( + "context" + "errors" + "fmt" + "strings" + "time" + + json "github.com/json-iterator/go" + + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/helper" +) + +type Log struct { + CreatedAt time.Time `gorm:"index" json:"created_at"` + TokenName string `gorm:"index" json:"token_name"` + Endpoint string `gorm:"index" json:"endpoint"` + Content string `gorm:"type:text" json:"content"` + GroupID string `gorm:"index" json:"group"` + Model string `gorm:"index" json:"model"` + Price float64 `json:"price"` + ID int `gorm:"primaryKey" json:"id"` + CompletionPrice float64 `json:"completion_price"` + TokenID int `gorm:"index" json:"token_id"` + UsedAmount float64 `gorm:"index" json:"used_amount"` + PromptTokens int `json:"prompt_tokens"` + CompletionTokens int `json:"completion_tokens"` + ChannelID int `gorm:"index" json:"channel"` + Code int `gorm:"index" json:"code"` +} + +func (l *Log) MarshalJSON() ([]byte, error) { + type Alias Log + return json.Marshal(&struct { + *Alias + CreatedAt int64 `json:"created_at"` + }{ + Alias: (*Alias)(l), + CreatedAt: l.CreatedAt.UnixMilli(), + }) +} + +func RecordConsumeLog(_ context.Context, group string, code int, channelID int, promptTokens int, completionTokens int, modelName string, tokenID int, tokenName string, amount float64, price float64, completionPrice float64, endpoint string, content string) error { + log := &Log{ + GroupID: group, + CreatedAt: time.Now(), + Code: code, + PromptTokens: promptTokens, + CompletionTokens: completionTokens, + TokenID: tokenID, + TokenName: tokenName, + Model: modelName, + UsedAmount: amount, + Price: price, + CompletionPrice: completionPrice, + ChannelID: channelID, + Endpoint: endpoint, + Content: content, + } + return LogDB.Create(log).Error +} + +//nolint:goconst +func getLogOrder(order string) string { + switch order { + case "id-desc": + return "id desc" + case "used_amount": + return "used_amount asc" + case "used_amount-desc": + return "used_amount desc" + case "price": + return "price asc" + case "price-desc": + return "price desc" + case "completion_price": + return "completion_price asc" + case "completion_price-desc": + return "completion_price desc" + case "token_id": + return "token_id asc" + case "token_id-desc": + return "token_id desc" + case "token_name": + return "token_name asc" + case "token_name-desc": + return "token_name desc" + case "prompt_tokens": + return "prompt_tokens asc" + case "prompt_tokens-desc": + return "prompt_tokens desc" + case "completion_tokens": + return "completion_tokens asc" + case "completion_tokens-desc": + return "completion_tokens desc" + case "endpoint": + return "endpoint asc" + case "endpoint-desc": + return "endpoint desc" + case "group": + return "group_id asc" + case "group-desc": + return "group_id desc" + case "created_at": + return "created_at asc" + case "created_at-desc": + return "created_at desc" + case "id": + return "id asc" + default: + return "id desc" + } +} + +func GetLogs(startTimestamp time.Time, endTimestamp time.Time, code int, modelName string, group string, tokenID int, tokenName string, startIdx int, num int, channelID int, endpoint string, content string, order string) (logs []*Log, total int64, err error) { + tx := LogDB.Model(&Log{}) + if modelName != "" { + tx = tx.Where("model = ?", modelName) + } + if group != "" { + tx = tx.Where("group_id = ?", group) + } + if tokenID != 0 { + tx = tx.Where("token_id = ?", tokenID) + } + if tokenName != "" { + tx = tx.Where("token_name = ?", tokenName) + } + if !startTimestamp.IsZero() { + tx = tx.Where("created_at >= ?", startTimestamp) + } + if !endTimestamp.IsZero() { + tx = tx.Where("created_at <= ?", endTimestamp) + } + if channelID != 0 { + tx = tx.Where("channel_id = ?", channelID) + } + if endpoint != "" { + tx = tx.Where("endpoint = ?", endpoint) + } + if content != "" { + tx = tx.Where("content = ?", content) + } + if code != 0 { + tx = tx.Where("code = ?", code) + } + err = tx.Count(&total).Error + if err != nil { + return nil, 0, err + } + if total <= 0 { + return nil, 0, nil + } + + err = tx.Order(getLogOrder(order)).Limit(num).Offset(startIdx).Find(&logs).Error + return logs, total, err +} + +func GetGroupLogs(group string, startTimestamp time.Time, endTimestamp time.Time, code int, modelName string, tokenID int, tokenName string, startIdx int, num int, channelID int, endpoint string, content string, order string) (logs []*Log, total int64, err error) { + tx := LogDB.Model(&Log{}).Where("group_id = ?", group) + if modelName != "" { + tx = tx.Where("model = ?", modelName) + } + if tokenID != 0 { + tx = tx.Where("token_id = ?", tokenID) + } + if tokenName != "" { + tx = tx.Where("token_name = ?", tokenName) + } + if !startTimestamp.IsZero() { + tx = tx.Where("created_at >= ?", startTimestamp) + } + if !endTimestamp.IsZero() { + tx = tx.Where("created_at <= ?", endTimestamp) + } + if channelID != 0 { + tx = tx.Where("channel_id = ?", channelID) + } + if endpoint != "" { + tx = tx.Where("endpoint = ?", endpoint) + } + if content != "" { + tx = tx.Where("content = ?", content) + } + if code != 0 { + tx = tx.Where("code = ?", code) + } + err = tx.Count(&total).Error + if err != nil { + return nil, 0, err + } + if total <= 0 { + return nil, 0, nil + } + + err = tx.Order(getLogOrder(order)).Limit(num).Offset(startIdx).Omit("id").Find(&logs).Error + return logs, total, err +} + +func SearchLogs(keyword string, page int, perPage int, code int, endpoint string, groupID string, tokenID int, tokenName string, modelName string, content string, startTimestamp time.Time, endTimestamp time.Time, channelID int, order string) (logs []*Log, total int64, err error) { + tx := LogDB.Model(&Log{}) + + // Handle exact match conditions for non-zero values + if code != 0 { + tx = tx.Where("code = ?", code) + } + if endpoint != "" { + tx = tx.Where("endpoint = ?", endpoint) + } + if groupID != "" { + tx = tx.Where("group_id = ?", groupID) + } + if tokenID != 0 { + tx = tx.Where("token_id = ?", tokenID) + } + if tokenName != "" { + tx = tx.Where("token_name = ?", tokenName) + } + if modelName != "" { + tx = tx.Where("model = ?", modelName) + } + if content != "" { + tx = tx.Where("content = ?", content) + } + if !startTimestamp.IsZero() { + tx = tx.Where("created_at >= ?", startTimestamp) + } + if !endTimestamp.IsZero() { + tx = tx.Where("created_at <= ?", endTimestamp) + } + if channelID != 0 { + tx = tx.Where("channel_id = ?", channelID) + } + + // Handle keyword search for zero value fields + if keyword != "" { + var conditions []string + var values []interface{} + + if code == 0 { + conditions = append(conditions, "code = ?") + values = append(values, helper.String2Int(keyword)) + } + if channelID == 0 { + conditions = append(conditions, "channel_id = ?") + values = append(values, helper.String2Int(keyword)) + } + if endpoint == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "endpoint ILIKE ?") + } else { + conditions = append(conditions, "endpoint LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + if groupID == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "group_id ILIKE ?") + } else { + conditions = append(conditions, "group_id LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + if tokenName == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "token_name ILIKE ?") + } else { + conditions = append(conditions, "token_name LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + if modelName == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "model ILIKE ?") + } else { + conditions = append(conditions, "model LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + if content == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "content ILIKE ?") + } else { + conditions = append(conditions, "content LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + + if len(conditions) > 0 { + tx = tx.Where(fmt.Sprintf("(%s)", strings.Join(conditions, " OR ")), values...) + } + } + + err = tx.Count(&total).Error + if err != nil { + return nil, 0, err + } + if total <= 0 { + return nil, 0, nil + } + + page-- + if page < 0 { + page = 0 + } + err = tx.Order(getLogOrder(order)).Limit(perPage).Offset(page * perPage).Find(&logs).Error + return logs, total, err +} + +func SearchGroupLogs(group string, keyword string, page int, perPage int, code int, endpoint string, tokenID int, tokenName string, modelName string, content string, startTimestamp time.Time, endTimestamp time.Time, channelID int, order string) (logs []*Log, total int64, err error) { + if group == "" { + return nil, 0, errors.New("group is empty") + } + tx := LogDB.Model(&Log{}).Where("group_id = ?", group) + + // Handle exact match conditions for non-zero values + if code != 0 { + tx = tx.Where("code = ?", code) + } + if endpoint != "" { + tx = tx.Where("endpoint = ?", endpoint) + } + if tokenID != 0 { + tx = tx.Where("token_id = ?", tokenID) + } + if tokenName != "" { + tx = tx.Where("token_name = ?", tokenName) + } + if modelName != "" { + tx = tx.Where("model = ?", modelName) + } + if content != "" { + tx = tx.Where("content = ?", content) + } + if !startTimestamp.IsZero() { + tx = tx.Where("created_at >= ?", startTimestamp) + } + if !endTimestamp.IsZero() { + tx = tx.Where("created_at <= ?", endTimestamp) + } + if channelID != 0 { + tx = tx.Where("channel_id = ?", channelID) + } + + // Handle keyword search for zero value fields + if keyword != "" { + var conditions []string + var values []interface{} + + if code == 0 { + conditions = append(conditions, "code = ?") + values = append(values, helper.String2Int(keyword)) + } + if channelID == 0 { + conditions = append(conditions, "channel_id = ?") + values = append(values, helper.String2Int(keyword)) + } + if endpoint == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "endpoint ILIKE ?") + } else { + conditions = append(conditions, "endpoint LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + if tokenName == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "token_name ILIKE ?") + } else { + conditions = append(conditions, "token_name LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + if modelName == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "model ILIKE ?") + } else { + conditions = append(conditions, "model LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + if content == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "content ILIKE ?") + } else { + conditions = append(conditions, "content LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + + if len(conditions) > 0 { + tx = tx.Where(fmt.Sprintf("(%s)", strings.Join(conditions, " OR ")), values...) + } + } + + err = tx.Count(&total).Error + if err != nil { + return nil, 0, err + } + if total <= 0 { + return nil, 0, nil + } + + page-- + if page < 0 { + page = 0 + } + + err = tx.Order(getLogOrder(order)).Limit(perPage).Offset(page * perPage).Find(&logs).Error + return logs, total, err +} + +func SumUsedQuota(startTimestamp time.Time, endTimestamp time.Time, modelName string, group string, tokenName string, channel int, endpoint string) (quota int64) { + ifnull := "ifnull" + if common.UsingPostgreSQL { + ifnull = "COALESCE" + } + tx := LogDB.Table("logs").Select(ifnull + "(sum(quota),0)") + if group != "" { + tx = tx.Where("group_id = ?", group) + } + if tokenName != "" { + tx = tx.Where("token_name = ?", tokenName) + } + if !startTimestamp.IsZero() { + tx = tx.Where("created_at >= ?", startTimestamp) + } + if !endTimestamp.IsZero() { + tx = tx.Where("created_at <= ?", endTimestamp) + } + if modelName != "" { + tx = tx.Where("model = ?", modelName) + } + if channel != 0 { + tx = tx.Where("channel_id = ?", channel) + } + if endpoint != "" { + tx = tx.Where("endpoint = ?", endpoint) + } + tx.Scan("a) + return quota +} + +func SumUsedToken(startTimestamp time.Time, endTimestamp time.Time, modelName string, group string, tokenName string, endpoint string) (token int) { + ifnull := "ifnull" + if common.UsingPostgreSQL { + ifnull = "COALESCE" + } + tx := LogDB.Table("logs").Select(fmt.Sprintf("%s(sum(prompt_tokens),0) + %s(sum(completion_tokens),0)", ifnull, ifnull)) + if group != "" { + tx = tx.Where("group_id = ?", group) + } + if tokenName != "" { + tx = tx.Where("token_name = ?", tokenName) + } + if !startTimestamp.IsZero() { + tx = tx.Where("created_at >= ?", startTimestamp) + } + if !endTimestamp.IsZero() { + tx = tx.Where("created_at <= ?", endTimestamp) + } + if modelName != "" { + tx = tx.Where("model = ?", modelName) + } + if endpoint != "" { + tx = tx.Where("endpoint = ?", endpoint) + } + tx.Scan(&token) + return token +} + +func DeleteOldLog(timestamp time.Time) (int64, error) { + result := LogDB.Where("created_at < ?", timestamp).Delete(&Log{}) + return result.RowsAffected, result.Error +} + +func DeleteGroupLogs(groupID string) (int64, error) { + result := LogDB.Where("group_id = ?", groupID).Delete(&Log{}) + return result.RowsAffected, result.Error +} + +type LogStatistic struct { + Day string `gorm:"column:day"` + Model string `gorm:"column:model"` + RequestCount int `gorm:"column:request_count"` + PromptTokens int `gorm:"column:prompt_tokens"` + CompletionTokens int `gorm:"column:completion_tokens"` +} + +func SearchLogsByDayAndModel(group string, start time.Time, end time.Time) (logStatistics []*LogStatistic, err error) { + groupSelect := "DATE_FORMAT(FROM_UNIXTIME(created_at), '%Y-%m-%d') as day" + + if common.UsingPostgreSQL { + groupSelect = "TO_CHAR(date_trunc('day', to_timestamp(created_at)), 'YYYY-MM-DD') as day" + } + + if common.UsingSQLite { + groupSelect = "strftime('%Y-%m-%d', datetime(created_at, 'unixepoch')) as day" + } + + err = LogDB.Raw(` + SELECT `+groupSelect+`, + model, count(1) as request_count, + sum(prompt_tokens) as prompt_tokens, + sum(completion_tokens) as completion_tokens + FROM logs + WHERE group_id = ? + AND created_at BETWEEN ? AND ? + GROUP BY day, model + ORDER BY day, model + `, group, start, end).Scan(&logStatistics).Error + + return logStatistics, err +} diff --git a/service/aiproxy/model/main.go b/service/aiproxy/model/main.go new file mode 100644 index 00000000000..2009f9a08ae --- /dev/null +++ b/service/aiproxy/model/main.go @@ -0,0 +1,217 @@ +package model + +import ( + "fmt" + "log" + "os" + "strings" + "time" + + "github.com/glebarez/sqlite" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/common/env" + + // import fastjson serializer + _ "github.com/labring/sealos/service/aiproxy/common/fastJSONSerializer" + "github.com/labring/sealos/service/aiproxy/common/logger" + "gorm.io/driver/mysql" + "gorm.io/driver/postgres" + "gorm.io/gorm" + gormLogger "gorm.io/gorm/logger" +) + +var ( + DB *gorm.DB + LogDB *gorm.DB +) + +func chooseDB(envName string) (*gorm.DB, error) { + dsn := os.Getenv(envName) + + switch { + case strings.HasPrefix(dsn, "postgres"): + // Use PostgreSQL + return openPostgreSQL(dsn) + case dsn != "": + // Use MySQL + return openMySQL(dsn) + default: + // Use SQLite + return openSQLite() + } +} + +func newDBLogger() gormLogger.Interface { + var logLevel gormLogger.LogLevel + if config.DebugSQLEnabled { + logLevel = gormLogger.Info + } else { + logLevel = gormLogger.Warn + } + return gormLogger.New( + log.New(os.Stdout, "", log.LstdFlags), + gormLogger.Config{ + SlowThreshold: time.Second, + LogLevel: logLevel, + IgnoreRecordNotFoundError: true, + ParameterizedQueries: !config.DebugSQLEnabled, + Colorful: true, + }, + ) +} + +func openPostgreSQL(dsn string) (*gorm.DB, error) { + logger.SysLog("using PostgreSQL as database") + common.UsingPostgreSQL = true + return gorm.Open(postgres.New(postgres.Config{ + DSN: dsn, + PreferSimpleProtocol: true, // disables implicit prepared statement usage + }), &gorm.Config{ + PrepareStmt: true, // precompile SQL + TranslateError: true, + Logger: newDBLogger(), + DisableForeignKeyConstraintWhenMigrating: false, + IgnoreRelationshipsWhenMigrating: false, + }) +} + +func openMySQL(dsn string) (*gorm.DB, error) { + logger.SysLog("using MySQL as database") + common.UsingMySQL = true + return gorm.Open(mysql.Open(dsn), &gorm.Config{ + PrepareStmt: true, // precompile SQL + TranslateError: true, + Logger: newDBLogger(), + DisableForeignKeyConstraintWhenMigrating: false, + IgnoreRelationshipsWhenMigrating: false, + }) +} + +func openSQLite() (*gorm.DB, error) { + logger.SysLog("SQL_DSN not set, using SQLite as database") + common.UsingSQLite = true + dsn := fmt.Sprintf("%s?_busy_timeout=%d", common.SQLitePath, common.SQLiteBusyTimeout) + return gorm.Open(sqlite.Open(dsn), &gorm.Config{ + PrepareStmt: true, // precompile SQL + TranslateError: true, + Logger: newDBLogger(), + DisableForeignKeyConstraintWhenMigrating: false, + IgnoreRelationshipsWhenMigrating: false, + }) +} + +func InitDB() { + var err error + DB, err = chooseDB("SQL_DSN") + if err != nil { + logger.FatalLog("failed to initialize database: " + err.Error()) + return + } + + setDBConns(DB) + + if config.DisableAutoMigrateDB { + return + } + + logger.SysLog("database migration started") + if err = migrateDB(); err != nil { + logger.FatalLog("failed to migrate database: " + err.Error()) + return + } + logger.SysLog("database migrated") +} + +func migrateDB() error { + err := DB.AutoMigrate( + &Channel{}, + &Token{}, + &Group{}, + &Option{}, + ) + if err != nil { + return err + } + return nil +} + +func InitLogDB() { + if os.Getenv("LOG_SQL_DSN") == "" { + LogDB = DB + if config.DisableAutoMigrateDB { + return + } + err := migrateLOGDB() + if err != nil { + logger.FatalLog("failed to migrate secondary database: " + err.Error()) + return + } + logger.SysLog("secondary database migrated") + return + } + + logger.SysLog("using secondary database for table logs") + var err error + LogDB, err = chooseDB("LOG_SQL_DSN") + if err != nil { + logger.FatalLog("failed to initialize secondary database: " + err.Error()) + return + } + + setDBConns(LogDB) + + if config.DisableAutoMigrateDB { + return + } + + logger.SysLog("secondary database migration started") + err = migrateLOGDB() + if err != nil { + logger.FatalLog("failed to migrate secondary database: " + err.Error()) + return + } + logger.SysLog("secondary database migrated") +} + +func migrateLOGDB() error { + return LogDB.AutoMigrate( + &Log{}, + &ConsumeError{}, + ) +} + +func setDBConns(db *gorm.DB) { + if config.DebugSQLEnabled { + db = db.Debug() + } + + sqlDB, err := db.DB() + if err != nil { + logger.FatalLog("failed to connect database: " + err.Error()) + return + } + + sqlDB.SetMaxIdleConns(env.Int("SQL_MAX_IDLE_CONNS", 100)) + sqlDB.SetMaxOpenConns(env.Int("SQL_MAX_OPEN_CONNS", 1000)) + sqlDB.SetConnMaxLifetime(time.Second * time.Duration(env.Int("SQL_MAX_LIFETIME", 60))) +} + +func closeDB(db *gorm.DB) error { + sqlDB, err := db.DB() + if err != nil { + return err + } + err = sqlDB.Close() + return err +} + +func CloseDB() error { + if LogDB != DB { + err := closeDB(LogDB) + if err != nil { + return err + } + } + return closeDB(DB) +} diff --git a/service/aiproxy/model/option.go b/service/aiproxy/model/option.go new file mode 100644 index 00000000000..d530d63286d --- /dev/null +++ b/service/aiproxy/model/option.go @@ -0,0 +1,176 @@ +package model + +import ( + "errors" + "strconv" + "time" + + json "github.com/json-iterator/go" + + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/logger" + billingprice "github.com/labring/sealos/service/aiproxy/relay/price" +) + +type Option struct { + Key string `gorm:"primaryKey" json:"key"` + Value string `json:"value"` +} + +func AllOption() ([]*Option, error) { + var options []*Option + err := DB.Find(&options).Error + return options, err +} + +func InitOptionMap() { + config.OptionMapRWMutex.Lock() + config.OptionMap = make(map[string]string) + config.OptionMap["DisableServe"] = strconv.FormatBool(config.GetDisableServe()) + config.OptionMap["AutomaticDisableChannelEnabled"] = strconv.FormatBool(config.GetAutomaticDisableChannelEnabled()) + config.OptionMap["AutomaticEnableChannelWhenTestSucceedEnabled"] = strconv.FormatBool(config.GetAutomaticEnableChannelWhenTestSucceedEnabled()) + config.OptionMap["ApproximateTokenEnabled"] = strconv.FormatBool(config.GetApproximateTokenEnabled()) + config.OptionMap["BillingEnabled"] = strconv.FormatBool(billingprice.GetBillingEnabled()) + config.OptionMap["ModelPrice"] = billingprice.ModelPrice2JSONString() + config.OptionMap["CompletionPrice"] = billingprice.CompletionPrice2JSONString() + config.OptionMap["RetryTimes"] = strconv.FormatInt(config.GetRetryTimes(), 10) + config.OptionMap["GlobalApiRateLimitNum"] = strconv.FormatInt(config.GetGlobalAPIRateLimitNum(), 10) + config.OptionMap["DefaultGroupQPM"] = strconv.FormatInt(config.GetDefaultGroupQPM(), 10) + defaultChannelModelsJSON, _ := json.Marshal(config.GetDefaultChannelModels()) + config.OptionMap["DefaultChannelModels"] = conv.BytesToString(defaultChannelModelsJSON) + defaultChannelModelMappingJSON, _ := json.Marshal(config.GetDefaultChannelModelMapping()) + config.OptionMap["DefaultChannelModelMapping"] = conv.BytesToString(defaultChannelModelMappingJSON) + config.OptionMap["GeminiSafetySetting"] = config.GetGeminiSafetySetting() + config.OptionMap["GeminiVersion"] = config.GetGeminiVersion() + config.OptionMap["GroupMaxTokenNum"] = strconv.FormatInt(int64(config.GetGroupMaxTokenNum()), 10) + config.OptionMapRWMutex.Unlock() + loadOptionsFromDatabase() +} + +func loadOptionsFromDatabase() { + options, _ := AllOption() + for _, option := range options { + if option.Key == "ModelPrice" { + option.Value = billingprice.AddNewMissingPrice(option.Value) + } + err := updateOptionMap(option.Key, option.Value) + if err != nil { + logger.SysError("failed to update option map: " + err.Error()) + } + } + logger.SysDebug("options synced from database") +} + +func SyncOptions(frequency time.Duration) { + ticker := time.NewTicker(frequency) + defer ticker.Stop() + for range ticker.C { + logger.SysDebug("syncing options from database") + loadOptionsFromDatabase() + } +} + +func UpdateOption(key string, value string) error { + err := updateOptionMap(key, value) + if err != nil { + return err + } + // Save to database first + option := Option{ + Key: key, + } + err = DB.Assign(Option{Key: key, Value: value}).FirstOrCreate(&option).Error + if err != nil { + return err + } + return nil +} + +func UpdateOptions(options map[string]string) error { + errs := make([]error, 0) + for key, value := range options { + err := UpdateOption(key, value) + if err != nil && !errors.Is(err, ErrUnknownOptionKey) { + errs = append(errs, err) + } + } + if len(errs) > 0 { + return errors.Join(errs...) + } + return nil +} + +var ErrUnknownOptionKey = errors.New("unknown option key") + +func isTrue(value string) bool { + result, _ := strconv.ParseBool(value) + return result +} + +func updateOptionMap(key string, value string) (err error) { + config.OptionMapRWMutex.Lock() + defer config.OptionMapRWMutex.Unlock() + config.OptionMap[key] = value + switch key { + case "DisableServe": + config.SetDisableServe(isTrue(value)) + case "AutomaticDisableChannelEnabled": + config.SetAutomaticDisableChannelEnabled(isTrue(value)) + case "AutomaticEnableChannelWhenTestSucceedEnabled": + config.SetAutomaticEnableChannelWhenTestSucceedEnabled(isTrue(value)) + case "ApproximateTokenEnabled": + config.SetApproximateTokenEnabled(isTrue(value)) + case "BillingEnabled": + billingprice.SetBillingEnabled(isTrue(value)) + case "GroupMaxTokenNum": + groupMaxTokenNum, err := strconv.ParseInt(value, 10, 32) + if err != nil { + return err + } + config.SetGroupMaxTokenNum(int32(groupMaxTokenNum)) + case "GeminiSafetySetting": + config.SetGeminiSafetySetting(value) + case "GeminiVersion": + config.SetGeminiVersion(value) + case "GlobalApiRateLimitNum": + globalAPIRateLimitNum, err := strconv.ParseInt(value, 10, 64) + if err != nil { + return err + } + config.SetGlobalAPIRateLimitNum(globalAPIRateLimitNum) + case "DefaultGroupQPM": + defaultGroupQPM, err := strconv.ParseInt(value, 10, 64) + if err != nil { + return err + } + config.SetDefaultGroupQPM(defaultGroupQPM) + case "DefaultChannelModels": + var newModules map[int][]string + err := json.Unmarshal(conv.StringToBytes(value), &newModules) + if err != nil { + return err + } + config.SetDefaultChannelModels(newModules) + case "DefaultChannelModelMapping": + var newMapping map[int]map[string]string + err := json.Unmarshal(conv.StringToBytes(value), &newMapping) + if err != nil { + return err + } + config.SetDefaultChannelModelMapping(newMapping) + case "RetryTimes": + retryTimes, err := strconv.ParseInt(value, 10, 32) + if err != nil { + return err + } + config.SetRetryTimes(retryTimes) + case "ModelPrice": + err = billingprice.UpdateModelPriceByJSONString(value) + case "CompletionPrice": + err = billingprice.UpdateCompletionPriceByJSONString(value) + default: + return ErrUnknownOptionKey + } + return err +} diff --git a/service/aiproxy/model/token.go b/service/aiproxy/model/token.go new file mode 100644 index 00000000000..c9471e1a669 --- /dev/null +++ b/service/aiproxy/model/token.go @@ -0,0 +1,604 @@ +package model + +import ( + "errors" + "fmt" + "strings" + "time" + + json "github.com/json-iterator/go" + + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/common/logger" + "gorm.io/gorm" + "gorm.io/gorm/clause" +) + +const ( + ErrTokenNotFound = "token" +) + +const ( + TokenStatusEnabled = 1 // don't use 0, 0 is the default value! + TokenStatusDisabled = 2 // also don't use 0 + TokenStatusExpired = 3 + TokenStatusExhausted = 4 +) + +type Token struct { + CreatedAt time.Time `json:"created_at"` + ExpiredAt time.Time `json:"expired_at"` + AccessedAt time.Time `gorm:"index" json:"accessed_at"` + Group *Group `gorm:"foreignKey:GroupID" json:"-"` + Key string `gorm:"type:char(48);uniqueIndex" json:"key"` + Name EmptyNullString `gorm:"index;uniqueIndex:idx_group_name;not null" json:"name"` + GroupID string `gorm:"index;uniqueIndex:idx_group_name" json:"group"` + Subnet string `json:"subnet"` + Models []string `gorm:"serializer:json;type:text" json:"models"` + Status int `gorm:"default:1;index" json:"status"` + ID int `gorm:"primaryKey" json:"id"` + Quota float64 `json:"quota"` + UsedAmount float64 `gorm:"index" json:"used_amount"` + RequestCount int `gorm:"index" json:"request_count"` +} + +func (t *Token) MarshalJSON() ([]byte, error) { + type Alias Token + return json.Marshal(&struct { + *Alias + CreatedAt int64 `json:"created_at"` + AccessedAt int64 `json:"accessed_at"` + ExpiredAt int64 `json:"expired_at"` + }{ + Alias: (*Alias)(t), + CreatedAt: t.CreatedAt.UnixMilli(), + AccessedAt: t.AccessedAt.UnixMilli(), + ExpiredAt: t.ExpiredAt.UnixMilli(), + }) +} + +//nolint:goconst +func getTokenOrder(order string) string { + switch order { + case "name": + return "name asc" + case "name-desc": + return "name desc" + case "accessed_at": + return "accessed_at asc" + case "accessed_at-desc": + return "accessed_at desc" + case "expired_at": + return "expired_at asc" + case "expired_at-desc": + return "expired_at desc" + case "group": + return "group_id asc" + case "group-desc": + return "group_id desc" + case "used_amount": + return "used_amount asc" + case "used_amount-desc": + return "used_amount desc" + case "request_count": + return "request_count asc" + case "request_count-desc": + return "request_count desc" + case "id": + return "id asc" + default: + return "id desc" + } +} + +func InsertToken(token *Token, autoCreateGroup bool) error { + if autoCreateGroup { + group := &Group{ + ID: token.GroupID, + } + if err := OnConflictDoNothing().Create(group).Error; err != nil { + return err + } + } + maxTokenNum := config.GetGroupMaxTokenNum() + err := DB.Transaction(func(tx *gorm.DB) error { + if maxTokenNum > 0 { + var count int64 + err := tx.Model(&Token{}).Where("group_id = ?", token.GroupID).Count(&count).Error + if err != nil { + return err + } + if count >= int64(maxTokenNum) { + return errors.New("group max token num reached") + } + } + return tx.Create(token).Error + }) + if err != nil { + if errors.Is(err, gorm.ErrDuplicatedKey) { + return errors.New("token name already exists in this group") + } + return err + } + return nil +} + +func GetTokens(startIdx int, num int, order string, group string, status int) (tokens []*Token, total int64, err error) { + tx := DB.Model(&Token{}) + + if group != "" { + tx = tx.Where("group_id = ?", group) + } + if status != 0 { + tx = tx.Where("status = ?", status) + } + + err = tx.Count(&total).Error + if err != nil { + return nil, 0, err + } + + if total <= 0 { + return nil, 0, nil + } + err = tx.Order(getTokenOrder(order)).Limit(num).Offset(startIdx).Find(&tokens).Error + return tokens, total, err +} + +func GetGroupTokens(group string, startIdx int, num int, order string, status int) (tokens []*Token, total int64, err error) { + if group == "" { + return nil, 0, errors.New("group is empty") + } + + tx := DB.Model(&Token{}).Where("group_id = ?", group) + + if status != 0 { + tx = tx.Where("status = ?", status) + } + + err = tx.Count(&total).Error + if err != nil { + return nil, 0, err + } + + if total <= 0 { + return nil, 0, nil + } + err = tx.Order(getTokenOrder(order)).Limit(num).Offset(startIdx).Find(&tokens).Error + return tokens, total, err +} + +func SearchTokens(keyword string, startIdx int, num int, order string, status int, name string, key string, group string) (tokens []*Token, total int64, err error) { + tx := DB.Model(&Token{}) + if group != "" { + tx = tx.Where("group_id = ?", group) + } + if status != 0 { + tx = tx.Where("status = ?", status) + } + if name != "" { + tx = tx.Where("name = ?", name) + } + if key != "" { + tx = tx.Where("key = ?", key) + } + + if keyword != "" { + var conditions []string + var values []interface{} + if status == 0 { + conditions = append(conditions, "status = ?") + values = append(values, 1) + } + if group == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "group_id ILIKE ?") + } else { + conditions = append(conditions, "group_id LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + if name == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "name ILIKE ?") + } else { + conditions = append(conditions, "name LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + if key == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "key ILIKE ?") + } else { + conditions = append(conditions, "key LIKE ?") + } + values = append(values, keyword) + } + if len(conditions) > 0 { + tx = tx.Where(fmt.Sprintf("(%s)", strings.Join(conditions, " OR ")), values...) + } + } + + err = tx.Count(&total).Error + if err != nil { + return nil, 0, err + } + if total <= 0 { + return nil, 0, nil + } + err = tx.Order(getTokenOrder(order)).Limit(num).Offset(startIdx).Find(&tokens).Error + return tokens, total, err +} + +func SearchGroupTokens(group string, keyword string, startIdx int, num int, order string, status int, name string, key string) (tokens []*Token, total int64, err error) { + if group == "" { + return nil, 0, errors.New("group is empty") + } + tx := DB.Model(&Token{}).Where("group_id = ?", group) + if status != 0 { + tx = tx.Where("status = ?", status) + } + if name != "" { + tx = tx.Where("name = ?", name) + } + if key != "" { + tx = tx.Where("key = ?", key) + } + + if keyword != "" { + var conditions []string + var values []interface{} + if status == 0 { + conditions = append(conditions, "status = ?") + values = append(values, 1) + } + if name == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "name ILIKE ?") + } else { + conditions = append(conditions, "name LIKE ?") + } + values = append(values, "%"+keyword+"%") + } + if key == "" { + if common.UsingPostgreSQL { + conditions = append(conditions, "key ILIKE ?") + } else { + conditions = append(conditions, "key LIKE ?") + } + values = append(values, keyword) + } + if len(conditions) > 0 { + tx = tx.Where(fmt.Sprintf("(%s)", strings.Join(conditions, " OR ")), values...) + } + } + + err = tx.Count(&total).Error + if err != nil { + return nil, 0, err + } + if total <= 0 { + return nil, 0, nil + } + err = tx.Order(getTokenOrder(order)).Limit(num).Offset(startIdx).Find(&tokens).Error + return tokens, total, err +} + +func GetTokenByKey(key string) (*Token, error) { + var token Token + err := DB.Where("key = ?", key).First(&token).Error + return &token, HandleNotFound(err, ErrTokenNotFound) +} + +func GetTokenUsedAmount(id int) (float64, error) { + var amount float64 + err := DB.Model(&Token{}).Where("id = ?", id).Select("used_amount").Scan(&amount).Error + return amount, HandleNotFound(err, ErrTokenNotFound) +} + +func GetTokenUsedAmountByKey(key string) (float64, error) { + var amount float64 + err := DB.Model(&Token{}).Where("key = ?", key).Select("used_amount").Scan(&amount).Error + return amount, HandleNotFound(err, ErrTokenNotFound) +} + +func ValidateAndGetToken(key string) (token *TokenCache, err error) { + if key == "" { + return nil, errors.New("no token provided") + } + token, err = CacheGetTokenByKey(key) + if err != nil { + logger.SysError("get token from cache failed: " + err.Error()) + if errors.Is(err, gorm.ErrRecordNotFound) { + return nil, errors.New("invalid token") + } + return nil, errors.New("token validation failed") + } + switch token.Status { + case TokenStatusExhausted: + return nil, fmt.Errorf("token (%s[%d]) quota is exhausted", token.Name, token.ID) + case TokenStatusExpired: + return nil, fmt.Errorf("token (%s[%d]) is expired", token.Name, token.ID) + } + if token.Status != TokenStatusEnabled { + return nil, fmt.Errorf("token (%s[%d]) is not available", token.Name, token.ID) + } + if !time.Time(token.ExpiredAt).IsZero() && time.Time(token.ExpiredAt).Before(time.Now()) { + err := UpdateTokenStatusAndAccessedAt(token.ID, TokenStatusExpired) + if err != nil { + logger.SysError("failed to update token status" + err.Error()) + } + return nil, fmt.Errorf("token (%s[%d]) is expired", token.Name, token.ID) + } + if token.Quota > 0 && token.UsedAmount >= token.Quota { + // in this case, we can make sure the token is exhausted + err := UpdateTokenStatusAndAccessedAt(token.ID, TokenStatusExhausted) + if err != nil { + logger.SysError("failed to update token status" + err.Error()) + } + return nil, fmt.Errorf("token (%s[%d]) quota is exhausted", token.Name, token.ID) + } + return token, nil +} + +func GetGroupTokenByID(group string, id int) (*Token, error) { + if id == 0 || group == "" { + return nil, errors.New("id or group is empty") + } + token := Token{} + err := DB. + Where("id = ? and group_id = ?", id, group). + First(&token).Error + return &token, HandleNotFound(err, ErrTokenNotFound) +} + +func GetTokenByID(id int) (*Token, error) { + if id == 0 { + return nil, errors.New("id is empty") + } + token := Token{ID: id} + err := DB.First(&token, "id = ?", id).Error + return &token, HandleNotFound(err, ErrTokenNotFound) +} + +func UpdateTokenStatus(id int, status int) (err error) { + token := Token{ID: id} + defer func() { + if err == nil { + if err := CacheDeleteToken(token.Key); err != nil { + logger.SysError("delete token from cache failed: " + err.Error()) + } + } + }() + result := DB. + Model(&token). + Clauses(clause.Returning{ + Columns: []clause.Column{ + {Name: "key"}, + }, + }). + Where("id = ?", id). + Updates( + map[string]interface{}{ + "status": status, + }, + ) + return HandleUpdateResult(result, ErrTokenNotFound) +} + +func UpdateTokenStatusAndAccessedAt(id int, status int) (err error) { + token := Token{ID: id} + defer func() { + if err == nil { + if err := CacheDeleteToken(token.Key); err != nil { + logger.SysError("delete token from cache failed: " + err.Error()) + } + } + }() + result := DB. + Model(&token). + Clauses(clause.Returning{ + Columns: []clause.Column{ + {Name: "key"}, + }, + }). + Where("id = ?", id).Updates( + map[string]interface{}{ + "status": status, + "accessed_at": time.Now(), + }, + ) + return HandleUpdateResult(result, ErrTokenNotFound) +} + +func UpdateGroupTokenStatusAndAccessedAt(group string, id int, status int) (err error) { + token := Token{} + defer func() { + if err == nil { + if err := CacheDeleteToken(token.Key); err != nil { + logger.SysError("delete token from cache failed: " + err.Error()) + } + } + }() + result := DB. + Model(&token). + Clauses(clause.Returning{ + Columns: []clause.Column{ + {Name: "key"}, + }, + }). + Where("id = ? and group_id = ?", id, group). + Updates( + map[string]interface{}{ + "status": status, + "accessed_at": time.Now(), + }, + ) + return HandleUpdateResult(result, ErrTokenNotFound) +} + +func UpdateGroupTokenStatus(group string, id int, status int) (err error) { + token := Token{} + defer func() { + if err == nil { + if err := CacheDeleteToken(token.Key); err != nil { + logger.SysError("delete token from cache failed: " + err.Error()) + } + } + }() + result := DB. + Model(&token). + Clauses(clause.Returning{ + Columns: []clause.Column{ + {Name: "key"}, + }, + }). + Where("id = ? and group_id = ?", id, group). + Updates( + map[string]interface{}{ + "status": status, + }, + ) + return HandleUpdateResult(result, ErrTokenNotFound) +} + +func DeleteTokenByIDAndGroupID(id int, groupID string) (err error) { + if id == 0 || groupID == "" { + return errors.New("id 或 group 为空!") + } + token := Token{ID: id, GroupID: groupID} + defer func() { + if err == nil { + if err := CacheDeleteToken(token.Key); err != nil { + logger.SysError("delete token from cache failed: " + err.Error()) + } + } + }() + result := DB. + Clauses(clause.Returning{ + Columns: []clause.Column{ + {Name: "key"}, + }, + }). + Where(token). + Delete(&token) + return HandleUpdateResult(result, ErrTokenNotFound) +} + +func DeleteTokenByID(id int) (err error) { + if id == 0 { + return errors.New("id 为空!") + } + token := Token{ID: id} + defer func() { + if err == nil { + if err := CacheDeleteToken(token.Key); err != nil { + logger.SysError("delete token from cache failed: " + err.Error()) + } + } + }() + result := DB. + Clauses(clause.Returning{ + Columns: []clause.Column{ + {Name: "key"}, + }, + }). + Where(token). + Delete(&token) + return HandleUpdateResult(result, ErrTokenNotFound) +} + +func UpdateToken(token *Token) (err error) { + defer func() { + if err == nil { + if err := CacheDeleteToken(token.Key); err != nil { + logger.SysError("delete token from cache failed: " + err.Error()) + } + } + }() + result := DB.Omit("created_at", "status", "key", "group_id", "used_amount", "request_count").Save(token) + if result.Error != nil { + if errors.Is(result.Error, gorm.ErrDuplicatedKey) { + return errors.New("token name already exists in this group") + } + } + return HandleUpdateResult(result, ErrTokenNotFound) +} + +func UpdateTokenUsedAmount(id int, amount float64, requestCount int) (err error) { + token := &Token{ID: id} + defer func() { + if amount > 0 && err == nil && token.Quota > 0 { + if err := CacheUpdateTokenUsedAmountOnlyIncrease(token.Key, token.UsedAmount); err != nil { + logger.SysError("update token used amount in cache failed: " + err.Error()) + } + } + }() + result := DB. + Model(token). + Clauses(clause.Returning{ + Columns: []clause.Column{ + {Name: "key"}, + {Name: "quota"}, + {Name: "used_amount"}, + }, + }). + Where("id = ?", id). + Updates( + map[string]interface{}{ + "used_amount": gorm.Expr("used_amount + ?", amount), + "request_count": gorm.Expr("request_count + ?", requestCount), + "accessed_at": time.Now(), + }, + ) + return HandleUpdateResult(result, ErrTokenNotFound) +} + +func UpdateTokenName(id int, name string) (err error) { + token := &Token{ID: id} + defer func() { + if err == nil { + if err := CacheDeleteToken(token.Key); err != nil { + logger.SysError("delete token from cache failed: " + err.Error()) + } + } + }() + result := DB. + Model(token). + Clauses(clause.Returning{ + Columns: []clause.Column{ + {Name: "key"}, + }, + }). + Where("id = ?", id). + Update("name", name) + if result.Error != nil && errors.Is(result.Error, gorm.ErrDuplicatedKey) { + return errors.New("token name already exists in this group") + } + return HandleUpdateResult(result, ErrTokenNotFound) +} + +func UpdateGroupTokenName(group string, id int, name string) (err error) { + token := &Token{ID: id, GroupID: group} + defer func() { + if err == nil { + if err := CacheDeleteToken(token.Key); err != nil { + logger.SysError("delete token from cache failed: " + err.Error()) + } + } + }() + result := DB. + Model(token). + Clauses(clause.Returning{ + Columns: []clause.Column{ + {Name: "key"}, + }, + }). + Where("id = ? and group_id = ?", id, group). + Update("name", name) + if result.Error != nil && errors.Is(result.Error, gorm.ErrDuplicatedKey) { + return errors.New("token name already exists in this group") + } + return HandleUpdateResult(result, ErrTokenNotFound) +} diff --git a/service/aiproxy/model/utils.go b/service/aiproxy/model/utils.go new file mode 100644 index 00000000000..7868ef2338d --- /dev/null +++ b/service/aiproxy/model/utils.go @@ -0,0 +1,97 @@ +package model + +import ( + "context" + "database/sql/driver" + "errors" + "fmt" + "strings" + + "gorm.io/gorm" + "gorm.io/gorm/clause" +) + +type NotFoundError string + +func (e NotFoundError) Error() string { + return string(e) + " not found" +} + +func HandleNotFound(err error, errMsg ...string) error { + if err != nil && errors.Is(err, gorm.ErrRecordNotFound) { + return NotFoundError(strings.Join(errMsg, " ")) + } + return err +} + +// Helper function to handle update results +func HandleUpdateResult(result *gorm.DB, entityName string) error { + if result.Error != nil { + return HandleNotFound(result.Error, entityName) + } + if result.RowsAffected == 0 { + return NotFoundError(entityName) + } + return nil +} + +func OnConflictDoNothing() *gorm.DB { + return DB.Clauses(clause.OnConflict{ + DoNothing: true, + }) +} + +func BatchRecordConsume(ctx context.Context, group string, code int, channelID int, promptTokens int, completionTokens int, modelName string, tokenID int, tokenName string, amount float64, price float64, completionPrice float64, endpoint string, content string) error { + errs := []error{} + err := RecordConsumeLog(ctx, group, code, channelID, promptTokens, completionTokens, modelName, tokenID, tokenName, amount, price, completionPrice, endpoint, content) + if err != nil { + errs = append(errs, fmt.Errorf("failed to record log: %w", err)) + } + err = UpdateGroupUsedAmountAndRequestCount(group, amount, 1) + if err != nil { + errs = append(errs, fmt.Errorf("failed to update group used amount and request count: %w", err)) + } + err = UpdateTokenUsedAmount(tokenID, amount, 1) + if err != nil { + errs = append(errs, fmt.Errorf("failed to update token used amount: %w", err)) + } + err = UpdateChannelUsedAmount(channelID, amount, 1) + if err != nil { + errs = append(errs, fmt.Errorf("failed to update channel used amount: %w", err)) + } + if len(errs) == 0 { + return nil + } + return errors.Join(errs...) +} + +type EmptyNullString string + +func (ns EmptyNullString) String() string { + return string(ns) +} + +// Scan implements the [Scanner] interface. +func (ns *EmptyNullString) Scan(value any) error { + if value == nil { + *ns = "" + return nil + } + switch v := value.(type) { + case []byte: + *ns = EmptyNullString(v) + case string: + *ns = EmptyNullString(v) + default: + return fmt.Errorf("unsupported type: %T", v) + } + return nil +} + +// Value implements the [driver.Valuer] interface. +func (ns EmptyNullString) Value() (driver.Value, error) { + if ns == "" { + return nil, nil + } + return string(ns), nil +} diff --git a/service/aiproxy/monitor/manage.go b/service/aiproxy/monitor/manage.go new file mode 100644 index 00000000000..15d58110057 --- /dev/null +++ b/service/aiproxy/monitor/manage.go @@ -0,0 +1,55 @@ +package monitor + +import ( + "net/http" + "strings" + + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +func ShouldDisableChannel(err *model.Error, statusCode int) bool { + if !config.GetAutomaticDisableChannelEnabled() { + return false + } + if err == nil { + return false + } + if statusCode == http.StatusUnauthorized { + return true + } + switch err.Type { + case "insufficient_quota", "authentication_error", "permission_error", "forbidden": + return true + } + if err.Code == "invalid_api_key" || err.Code == "account_deactivated" { + return true + } + + lowerMessage := strings.ToLower(err.Message) + if strings.Contains(lowerMessage, "your access was terminated") || + strings.Contains(lowerMessage, "violation of our policies") || + strings.Contains(lowerMessage, "your credit balance is too low") || + strings.Contains(lowerMessage, "organization has been disabled") || + strings.Contains(lowerMessage, "credit") || + strings.Contains(lowerMessage, "balance") || + strings.Contains(lowerMessage, "permission denied") || + strings.Contains(lowerMessage, "organization has been restricted") || // groq + strings.Contains(lowerMessage, "已欠费") { + return true + } + return false +} + +func ShouldEnableChannel(err error, openAIErr *model.Error) bool { + if !config.GetAutomaticEnableChannelWhenTestSucceedEnabled() { + return false + } + if err != nil { + return false + } + if openAIErr != nil { + return false + } + return true +} diff --git a/service/aiproxy/monitor/metric.go b/service/aiproxy/monitor/metric.go new file mode 100644 index 00000000000..bd7b9914606 --- /dev/null +++ b/service/aiproxy/monitor/metric.go @@ -0,0 +1,76 @@ +package monitor + +import ( + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/model" +) + +var ( + store = make(map[int][]bool) + metricSuccessChan = make(chan int, config.MetricSuccessChanSize) + metricFailChan = make(chan int, config.MetricFailChanSize) +) + +func consumeSuccess(channelID int) { + if len(store[channelID]) > config.MetricQueueSize { + store[channelID] = store[channelID][1:] + } + store[channelID] = append(store[channelID], true) +} + +func consumeFail(channelID int) (bool, float64) { + if len(store[channelID]) > config.MetricQueueSize { + store[channelID] = store[channelID][1:] + } + store[channelID] = append(store[channelID], false) + successCount := 0 + for _, success := range store[channelID] { + if success { + successCount++ + } + } + successRate := float64(successCount) / float64(len(store[channelID])) + if len(store[channelID]) < config.MetricQueueSize { + return false, successRate + } + if successRate < config.MetricSuccessRateThreshold { + store[channelID] = make([]bool, 0) + return true, successRate + } + return false, successRate +} + +func metricSuccessConsumer() { + for channelID := range metricSuccessChan { + consumeSuccess(channelID) + } +} + +func metricFailConsumer() { + for channelID := range metricFailChan { + disable, _ := consumeFail(channelID) + if disable { + _ = model.DisableChannelByID(channelID) + } + } +} + +func init() { + if config.EnableMetric { + go metricSuccessConsumer() + go metricFailConsumer() + } +} + +func Emit(channelID int, success bool) { + if !config.EnableMetric { + return + } + go func() { + if success { + metricSuccessChan <- channelID + } else { + metricFailChan <- channelID + } + }() +} diff --git a/service/aiproxy/relay/adaptor.go b/service/aiproxy/relay/adaptor.go new file mode 100644 index 00000000000..11669d62d06 --- /dev/null +++ b/service/aiproxy/relay/adaptor.go @@ -0,0 +1,63 @@ +package relay + +import ( + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/aiproxy" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/ali" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/anthropic" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/aws" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/baidu" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/cloudflare" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/cohere" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/coze" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/deepl" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/gemini" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/ollama" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/palm" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/tencent" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/vertexai" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/xunfei" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/zhipu" + "github.com/labring/sealos/service/aiproxy/relay/apitype" +) + +func GetAdaptor(apiType int) adaptor.Adaptor { + switch apiType { + case apitype.AIProxyLibrary: + return &aiproxy.Adaptor{} + case apitype.Ali: + return &ali.Adaptor{} + case apitype.Anthropic: + return &anthropic.Adaptor{} + case apitype.AwsClaude: + return &aws.Adaptor{} + case apitype.Baidu: + return &baidu.Adaptor{} + case apitype.Gemini: + return &gemini.Adaptor{} + case apitype.OpenAI: + return &openai.Adaptor{} + case apitype.PaLM: + return &palm.Adaptor{} + case apitype.Tencent: + return &tencent.Adaptor{} + case apitype.Xunfei: + return &xunfei.Adaptor{} + case apitype.Zhipu: + return &zhipu.Adaptor{} + case apitype.Ollama: + return &ollama.Adaptor{} + case apitype.Coze: + return &coze.Adaptor{} + case apitype.Cohere: + return &cohere.Adaptor{} + case apitype.Cloudflare: + return &cloudflare.Adaptor{} + case apitype.DeepL: + return &deepl.Adaptor{} + case apitype.VertexAI: + return &vertexai.Adaptor{} + } + return nil +} diff --git a/service/aiproxy/relay/adaptor/ai360/constants.go b/service/aiproxy/relay/adaptor/ai360/constants.go new file mode 100644 index 00000000000..cfc3cb2833f --- /dev/null +++ b/service/aiproxy/relay/adaptor/ai360/constants.go @@ -0,0 +1,8 @@ +package ai360 + +var ModelList = []string{ + "360GPT_S2_V9", + "embedding-bert-512-v1", + "embedding_s1_v1", + "semantic_similarity_s1_v1", +} diff --git a/service/aiproxy/relay/adaptor/aiproxy/adaptor.go b/service/aiproxy/relay/adaptor/aiproxy/adaptor.go new file mode 100644 index 00000000000..dcc15255878 --- /dev/null +++ b/service/aiproxy/relay/adaptor/aiproxy/adaptor.go @@ -0,0 +1,75 @@ +package aiproxy + +import ( + "errors" + "io" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type Adaptor struct { + meta *meta.Meta +} + +func (a *Adaptor) Init(meta *meta.Meta) { + a.meta = meta +} + +func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { + return meta.BaseURL + "/api/library/ask", nil +} + +func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { + adaptor.SetupCommonRequestHeader(c, req, meta) + req.Header.Set("Authorization", "Bearer "+meta.APIKey) + return nil +} + +func (a *Adaptor) ConvertRequest(_ *gin.Context, _ int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + aiProxyLibraryRequest := ConvertRequest(request) + aiProxyLibraryRequest.LibraryID = a.meta.Config.LibraryID + return aiProxyLibraryRequest, nil +} + +func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + return request, nil +} + +func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + return adaptor.DoRequestHelper(a, c, meta, requestBody) +} + +func (a *Adaptor) ConvertSTTRequest(*http.Request) (io.ReadCloser, error) { + return nil, nil +} + +func (a *Adaptor) ConvertTTSRequest(*model.TextToSpeechRequest) (any, error) { + return nil, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + err, usage = StreamHandler(c, resp) + } else { + err, usage = Handler(c, resp) + } + return +} + +func (a *Adaptor) GetModelList() []string { + return ModelList +} + +func (a *Adaptor) GetChannelName() string { + return "aiproxy" +} diff --git a/service/aiproxy/relay/adaptor/aiproxy/constants.go b/service/aiproxy/relay/adaptor/aiproxy/constants.go new file mode 100644 index 00000000000..1bdad8b1711 --- /dev/null +++ b/service/aiproxy/relay/adaptor/aiproxy/constants.go @@ -0,0 +1,9 @@ +package aiproxy + +import "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + +var ModelList = []string{""} + +func init() { + ModelList = openai.ModelList +} diff --git a/service/aiproxy/relay/adaptor/aiproxy/main.go b/service/aiproxy/relay/adaptor/aiproxy/main.go new file mode 100644 index 00000000000..a06b0ab10b0 --- /dev/null +++ b/service/aiproxy/relay/adaptor/aiproxy/main.go @@ -0,0 +1,185 @@ +package aiproxy + +import ( + "bufio" + "fmt" + "net/http" + "slices" + "strconv" + + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/render" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/common/random" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/constant" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +// https://docs.aiproxy.io/dev/library#使用已经定制好的知识库进行对话问答 + +func ConvertRequest(request *model.GeneralOpenAIRequest) *LibraryRequest { + query := "" + if len(request.Messages) != 0 { + query = request.Messages[len(request.Messages)-1].StringContent() + } + return &LibraryRequest{ + Model: request.Model, + Stream: request.Stream, + Query: query, + } +} + +func aiProxyDocuments2Markdown(documents []LibraryDocument) string { + if len(documents) == 0 { + return "" + } + content := "\n\n参考文档:\n" + for i, document := range documents { + content += fmt.Sprintf("%d. [%s](%s)\n", i+1, document.Title, document.URL) + } + return content +} + +func responseAIProxyLibrary2OpenAI(response *LibraryResponse) *openai.TextResponse { + content := response.Answer + aiProxyDocuments2Markdown(response.Documents) + choice := openai.TextResponseChoice{ + Index: 0, + Message: model.Message{ + Role: "assistant", + Content: content, + }, + FinishReason: "stop", + } + fullTextResponse := openai.TextResponse{ + ID: "chatcmpl-" + random.GetUUID(), + Object: "chat.completion", + Created: helper.GetTimestamp(), + Choices: []openai.TextResponseChoice{choice}, + } + return &fullTextResponse +} + +func documentsAIProxyLibrary(documents []LibraryDocument) *openai.ChatCompletionsStreamResponse { + var choice openai.ChatCompletionsStreamResponseChoice + choice.Delta.Content = aiProxyDocuments2Markdown(documents) + choice.FinishReason = &constant.StopFinishReason + return &openai.ChatCompletionsStreamResponse{ + ID: "chatcmpl-" + random.GetUUID(), + Object: "chat.completion.chunk", + Created: helper.GetTimestamp(), + Model: "", + Choices: []openai.ChatCompletionsStreamResponseChoice{choice}, + } +} + +func streamResponseAIProxyLibrary2OpenAI(response *LibraryStreamResponse) *openai.ChatCompletionsStreamResponse { + var choice openai.ChatCompletionsStreamResponseChoice + choice.Delta.Content = response.Content + return &openai.ChatCompletionsStreamResponse{ + ID: "chatcmpl-" + random.GetUUID(), + Object: "chat.completion.chunk", + Created: helper.GetTimestamp(), + Model: response.Model, + Choices: []openai.ChatCompletionsStreamResponseChoice{choice}, + } +} + +func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var usage model.Usage + var documents []LibraryDocument + scanner := bufio.NewScanner(resp.Body) + scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { + if atEOF && len(data) == 0 { + return 0, nil, nil + } + if i := slices.Index(data, '\n'); i >= 0 { + return i + 1, data[0:i], nil + } + if atEOF { + return len(data), data, nil + } + return 0, nil, nil + }) + + common.SetEventStreamHeaders(c) + + for scanner.Scan() { + data := scanner.Bytes() + if len(data) < 6 || conv.BytesToString(data[:6]) != "data: " { + continue + } + data = data[6:] + + if conv.BytesToString(data) == "[DONE]" { + break + } + + var AIProxyLibraryResponse LibraryStreamResponse + err := json.Unmarshal(data, &AIProxyLibraryResponse) + if err != nil { + logger.SysError("error unmarshalling stream response: " + err.Error()) + continue + } + if len(AIProxyLibraryResponse.Documents) != 0 { + documents = AIProxyLibraryResponse.Documents + } + response := streamResponseAIProxyLibrary2OpenAI(&AIProxyLibraryResponse) + err = render.ObjectData(c, response) + if err != nil { + logger.SysError(err.Error()) + } + } + + if err := scanner.Err(); err != nil { + logger.SysError("error reading stream: " + err.Error()) + } + + response := documentsAIProxyLibrary(documents) + err := render.ObjectData(c, response) + if err != nil { + logger.SysError(err.Error()) + } + render.Done(c) + + return nil, &usage +} + +func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var AIProxyLibraryResponse LibraryResponse + err := json.NewDecoder(resp.Body).Decode(&AIProxyLibraryResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + if AIProxyLibraryResponse.ErrCode != 0 { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: AIProxyLibraryResponse.Message, + Type: strconv.Itoa(AIProxyLibraryResponse.ErrCode), + Code: AIProxyLibraryResponse.ErrCode, + }, + StatusCode: resp.StatusCode, + }, nil + } + fullTextResponse := responseAIProxyLibrary2OpenAI(&AIProxyLibraryResponse) + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, err = c.Writer.Write(jsonResponse) + if err != nil { + return openai.ErrorWrapper(err, "write_response_body_failed", http.StatusInternalServerError), nil + } + return nil, &fullTextResponse.Usage +} diff --git a/service/aiproxy/relay/adaptor/aiproxy/model.go b/service/aiproxy/relay/adaptor/aiproxy/model.go new file mode 100644 index 00000000000..4030e5fbc15 --- /dev/null +++ b/service/aiproxy/relay/adaptor/aiproxy/model.go @@ -0,0 +1,32 @@ +package aiproxy + +type LibraryRequest struct { + Model string `json:"model"` + Query string `json:"query"` + LibraryID string `json:"libraryId"` + Stream bool `json:"stream"` +} + +type LibraryError struct { + Message string `json:"message"` + ErrCode int `json:"errCode"` +} + +type LibraryDocument struct { + Title string `json:"title"` + URL string `json:"url"` +} + +type LibraryResponse struct { + LibraryError + Answer string `json:"answer"` + Documents []LibraryDocument `json:"documents"` + Success bool `json:"success"` +} + +type LibraryStreamResponse struct { + Content string `json:"content"` + Model string `json:"model"` + Documents []LibraryDocument `json:"documents"` + Finish bool `json:"finish"` +} diff --git a/service/aiproxy/relay/adaptor/ali/adaptor.go b/service/aiproxy/relay/adaptor/ali/adaptor.go new file mode 100644 index 00000000000..b9c7f7dfabc --- /dev/null +++ b/service/aiproxy/relay/adaptor/ali/adaptor.go @@ -0,0 +1,110 @@ +package ali + +import ( + "errors" + "io" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" +) + +// https://help.aliyun.com/zh/dashscope/developer-reference/api-details + +type Adaptor struct { + meta *meta.Meta +} + +func (a *Adaptor) Init(meta *meta.Meta) { + a.meta = meta +} + +func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { + switch meta.Mode { + case relaymode.Embeddings: + return meta.BaseURL + "/api/v1/services/embeddings/text-embedding/text-embedding", nil + case relaymode.ImagesGenerations: + return meta.BaseURL + "/api/v1/services/aigc/text2image/image-synthesis", nil + default: + return meta.BaseURL + "/api/v1/services/aigc/text-generation/generation", nil + } +} + +func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { + adaptor.SetupCommonRequestHeader(c, req, meta) + if meta.IsStream { + req.Header.Set("Accept", "text/event-stream") + req.Header.Set("X-Dashscope-Sse", "enable") + } + req.Header.Set("Authorization", "Bearer "+meta.APIKey) + + if meta.Mode == relaymode.ImagesGenerations { + req.Header.Set("X-Dashscope-Async", "enable") + } + if a.meta.Config.Plugin != "" { + req.Header.Set("X-Dashscope-Plugin", a.meta.Config.Plugin) + } + return nil +} + +func (a *Adaptor) ConvertRequest(_ *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + switch relayMode { + case relaymode.Embeddings: + aliEmbeddingRequest := ConvertEmbeddingRequest(request) + return aliEmbeddingRequest, nil + default: + aliRequest := ConvertRequest(request) + return aliRequest, nil + } +} + +func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + + aliRequest := ConvertImageRequest(*request) + return aliRequest, nil +} + +func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + return adaptor.DoRequestHelper(a, c, meta, requestBody) +} + +func (a *Adaptor) ConvertSTTRequest(*http.Request) (io.ReadCloser, error) { + return nil, nil +} + +func (a *Adaptor) ConvertTTSRequest(*model.TextToSpeechRequest) (any, error) { + return nil, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + err, usage = StreamHandler(c, resp) + } else { + switch meta.Mode { + case relaymode.Embeddings: + err, usage = EmbeddingHandler(c, resp) + case relaymode.ImagesGenerations: + err, usage = ImageHandler(c, resp, meta.APIKey) + default: + err, usage = Handler(c, resp) + } + } + return +} + +func (a *Adaptor) GetModelList() []string { + return ModelList +} + +func (a *Adaptor) GetChannelName() string { + return "ali" +} diff --git a/service/aiproxy/relay/adaptor/ali/constants.go b/service/aiproxy/relay/adaptor/ali/constants.go new file mode 100644 index 00000000000..3f24ce2e141 --- /dev/null +++ b/service/aiproxy/relay/adaptor/ali/constants.go @@ -0,0 +1,7 @@ +package ali + +var ModelList = []string{ + "qwen-turbo", "qwen-plus", "qwen-max", "qwen-max-longcontext", + "text-embedding-v1", + "ali-stable-diffusion-xl", "ali-stable-diffusion-v1.5", "wanx-v1", +} diff --git a/service/aiproxy/relay/adaptor/ali/image.go b/service/aiproxy/relay/adaptor/ali/image.go new file mode 100644 index 00000000000..d6b01a3b40a --- /dev/null +++ b/service/aiproxy/relay/adaptor/ali/image.go @@ -0,0 +1,188 @@ +package ali + +import ( + "context" + "encoding/base64" + "errors" + "io" + "net/http" + "time" + + "github.com/gin-gonic/gin" + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +func ImageHandler(c *gin.Context, resp *http.Response, apiKey string) (*model.ErrorWithStatusCode, *model.Usage) { + responseFormat := c.GetString("response_format") + + var aliTaskResponse TaskResponse + responseBody, err := io.ReadAll(resp.Body) + if err != nil { + return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil + } + err = resp.Body.Close() + if err != nil { + return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil + } + err = json.Unmarshal(responseBody, &aliTaskResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + + if aliTaskResponse.Message != "" { + logger.SysErrorf("aliAsyncTask err: %s", responseBody) + return openai.ErrorWrapper(errors.New(aliTaskResponse.Message), "ali_async_task_failed", http.StatusInternalServerError), nil + } + + aliResponse, err := asyncTaskWait(c, aliTaskResponse.Output.TaskID, apiKey) + if err != nil { + return openai.ErrorWrapper(err, "ali_async_task_wait_failed", http.StatusInternalServerError), nil + } + + if aliResponse.Output.TaskStatus != "SUCCEEDED" { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: aliResponse.Output.Message, + Type: "ali_error", + Param: "", + Code: aliResponse.Output.Code, + }, + StatusCode: resp.StatusCode, + }, nil + } + + fullTextResponse := responseAli2OpenAIImage(aliResponse, responseFormat) + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil, nil +} + +func asyncTask(ctx context.Context, taskID string, key string) (*TaskResponse, error) { + url := "https://dashscope.aliyuncs.com/api/v1/tasks/" + taskID + + var aliResponse TaskResponse + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return &aliResponse, err + } + + req.Header.Set("Authorization", "Bearer "+key) + + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + logger.SysError("aliAsyncTask client.Do err: " + err.Error()) + return &aliResponse, err + } + defer resp.Body.Close() + + var response TaskResponse + err = json.NewDecoder(resp.Body).Decode(&response) + if err != nil { + logger.SysError("aliAsyncTask NewDecoder err: " + err.Error()) + return &aliResponse, err + } + + return &response, nil +} + +func asyncTaskWait(ctx context.Context, taskID string, key string) (*TaskResponse, error) { + waitSeconds := 2 + step := 0 + maxStep := 20 + + for { + step++ + rsp, err := asyncTask(ctx, taskID, key) + if err != nil { + return nil, err + } + + if rsp.Output.TaskStatus == "" { + return rsp, nil + } + + switch rsp.Output.TaskStatus { + case "FAILED": + fallthrough + case "CANCELED": + fallthrough + case "SUCCEEDED": + fallthrough + case "UNKNOWN": + return rsp, nil + } + if step >= maxStep { + break + } + time.Sleep(time.Duration(waitSeconds) * time.Second) + } + + return nil, errors.New("aliAsyncTaskWait timeout") +} + +func responseAli2OpenAIImage(response *TaskResponse, responseFormat string) *openai.ImageResponse { + imageResponse := openai.ImageResponse{ + Created: helper.GetTimestamp(), + } + + for _, data := range response.Output.Results { + var b64Json string + if responseFormat == "b64_json" { + // 读取 data.Url 的图片数据并转存到 b64Json + imageData, err := getImageData(data.URL) + if err != nil { + // 处理获取图片数据失败的情况 + logger.SysError("getImageData Error getting image data: " + err.Error()) + continue + } + + // 将图片数据转为 Base64 编码的字符串 + b64Json = Base64Encode(imageData) + } else { + // 如果 responseFormat 不是 "b64_json",则直接使用 data.B64Image + b64Json = data.B64Image + } + + imageResponse.Data = append(imageResponse.Data, openai.ImageData{ + URL: data.URL, + B64Json: b64Json, + RevisedPrompt: "", + }) + } + return &imageResponse +} + +func getImageData(url string) ([]byte, error) { + req, err := http.NewRequestWithContext(context.Background(), http.MethodGet, url, nil) + if err != nil { + return nil, err + } + response, err := http.DefaultClient.Do(req) + if err != nil { + return nil, err + } + defer response.Body.Close() + + imageData, err := io.ReadAll(response.Body) + if err != nil { + return nil, err + } + + return imageData, nil +} + +func Base64Encode(data []byte) string { + b64Json := base64.StdEncoding.EncodeToString(data) + return b64Json +} diff --git a/service/aiproxy/relay/adaptor/ali/main.go b/service/aiproxy/relay/adaptor/ali/main.go new file mode 100644 index 00000000000..be39baf7227 --- /dev/null +++ b/service/aiproxy/relay/adaptor/ali/main.go @@ -0,0 +1,257 @@ +package ali + +import ( + "bufio" + "net/http" + "slices" + "strings" + + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/common/render" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +// https://help.aliyun.com/document_detail/613695.html?spm=a2c4g.2399480.0.0.1adb778fAdzP9w#341800c0f8w0r + +const EnableSearchModelSuffix = "-internet" + +func ConvertRequest(request *model.GeneralOpenAIRequest) *ChatRequest { + enableSearch := false + aliModel := request.Model + if strings.HasSuffix(aliModel, EnableSearchModelSuffix) { + enableSearch = true + aliModel = strings.TrimSuffix(aliModel, EnableSearchModelSuffix) + } + if request.TopP != nil && *request.TopP >= 1 { + *request.TopP = 0.9999 + } + return &ChatRequest{ + Model: aliModel, + Input: Input{ + Messages: request.Messages, + }, + Parameters: Parameters{ + EnableSearch: enableSearch, + IncrementalOutput: request.Stream, + Seed: uint64(request.Seed), + MaxTokens: request.MaxTokens, + Temperature: request.Temperature, + TopP: request.TopP, + TopK: request.TopK, + ResultFormat: "message", + Tools: request.Tools, + }, + } +} + +func ConvertEmbeddingRequest(request *model.GeneralOpenAIRequest) *EmbeddingRequest { + return &EmbeddingRequest{ + Model: request.Model, + Input: struct { + Texts []string `json:"texts"` + }{ + Texts: request.ParseInput(), + }, + } +} + +func ConvertImageRequest(request model.ImageRequest) *ImageRequest { + var imageRequest ImageRequest + imageRequest.Input.Prompt = request.Prompt + imageRequest.Model = request.Model + imageRequest.Parameters.Size = strings.Replace(request.Size, "x", "*", -1) + imageRequest.Parameters.N = request.N + imageRequest.ResponseFormat = request.ResponseFormat + + return &imageRequest +} + +func EmbeddingHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + var aliResponse EmbeddingResponse + err := json.NewDecoder(resp.Body).Decode(&aliResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + + err = resp.Body.Close() + if err != nil { + return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil + } + + if aliResponse.Code != "" { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: aliResponse.Message, + Type: aliResponse.Code, + Param: aliResponse.RequestID, + Code: aliResponse.Code, + }, + StatusCode: resp.StatusCode, + }, nil + } + requestModel := c.GetString(ctxkey.RequestModel) + fullTextResponse := embeddingResponseAli2OpenAI(&aliResponse) + fullTextResponse.Model = requestModel + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil, &fullTextResponse.Usage +} + +func embeddingResponseAli2OpenAI(response *EmbeddingResponse) *openai.EmbeddingResponse { + openAIEmbeddingResponse := openai.EmbeddingResponse{ + Object: "list", + Data: make([]openai.EmbeddingResponseItem, 0, len(response.Output.Embeddings)), + Model: "text-embedding-v1", + Usage: model.Usage{TotalTokens: response.Usage.TotalTokens}, + } + + for _, item := range response.Output.Embeddings { + openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, openai.EmbeddingResponseItem{ + Object: `embedding`, + Index: item.TextIndex, + Embedding: item.Embedding, + }) + } + return &openAIEmbeddingResponse +} + +func responseAli2OpenAI(response *ChatResponse) *openai.TextResponse { + fullTextResponse := openai.TextResponse{ + ID: response.RequestID, + Object: "chat.completion", + Created: helper.GetTimestamp(), + Choices: response.Output.Choices, + Usage: model.Usage{ + PromptTokens: response.Usage.InputTokens, + CompletionTokens: response.Usage.OutputTokens, + TotalTokens: response.Usage.InputTokens + response.Usage.OutputTokens, + }, + } + return &fullTextResponse +} + +func streamResponseAli2OpenAI(aliResponse *ChatResponse) *openai.ChatCompletionsStreamResponse { + if len(aliResponse.Output.Choices) == 0 { + return nil + } + aliChoice := aliResponse.Output.Choices[0] + var choice openai.ChatCompletionsStreamResponseChoice + choice.Delta = aliChoice.Message + if aliChoice.FinishReason != "null" { + finishReason := aliChoice.FinishReason + choice.FinishReason = &finishReason + } + response := openai.ChatCompletionsStreamResponse{ + ID: aliResponse.RequestID, + Object: "chat.completion.chunk", + Created: helper.GetTimestamp(), + Model: "qwen", + Choices: []openai.ChatCompletionsStreamResponseChoice{choice}, + } + return &response +} + +func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var usage model.Usage + scanner := bufio.NewScanner(resp.Body) + scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { + if atEOF && len(data) == 0 { + return 0, nil, nil + } + if i := slices.Index(data, '\n'); i >= 0 { + return i + 1, data[0:i], nil + } + if atEOF { + return len(data), data, nil + } + return 0, nil, nil + }) + + common.SetEventStreamHeaders(c) + + for scanner.Scan() { + data := scanner.Bytes() + if len(data) < 5 || conv.BytesToString(data[:5]) != "data:" { + continue + } + data = data[5:] + + if conv.BytesToString(data) == "[DONE]" { + break + } + + var aliResponse ChatResponse + err := json.Unmarshal(data, &aliResponse) + if err != nil { + logger.SysError("error unmarshalling stream response: " + err.Error()) + continue + } + if aliResponse.Usage.OutputTokens != 0 { + usage.PromptTokens = aliResponse.Usage.InputTokens + usage.CompletionTokens = aliResponse.Usage.OutputTokens + usage.TotalTokens = aliResponse.Usage.InputTokens + aliResponse.Usage.OutputTokens + } + response := streamResponseAli2OpenAI(&aliResponse) + if response == nil { + continue + } + err = render.ObjectData(c, response) + if err != nil { + logger.SysError(err.Error()) + } + } + + if err := scanner.Err(); err != nil { + logger.SysError("error reading stream: " + err.Error()) + } + + render.Done(c) + + return nil, &usage +} + +func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var aliResponse ChatResponse + err := json.NewDecoder(resp.Body).Decode(&aliResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + if aliResponse.Code != "" { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: aliResponse.Message, + Type: aliResponse.Code, + Param: aliResponse.RequestID, + Code: aliResponse.Code, + }, + StatusCode: resp.StatusCode, + }, nil + } + fullTextResponse := responseAli2OpenAI(&aliResponse) + fullTextResponse.Model = "qwen" + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil, &fullTextResponse.Usage +} diff --git a/service/aiproxy/relay/adaptor/ali/model.go b/service/aiproxy/relay/adaptor/ali/model.go new file mode 100644 index 00000000000..a7e858a7ff6 --- /dev/null +++ b/service/aiproxy/relay/adaptor/ali/model.go @@ -0,0 +1,149 @@ +package ali + +import ( + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type Input struct { + // Prompt string `json:"prompt"` + Messages []model.Message `json:"messages"` +} + +type Parameters struct { + TopP *float64 `json:"top_p,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + ResultFormat string `json:"result_format,omitempty"` + Tools []model.Tool `json:"tools,omitempty"` + TopK int `json:"top_k,omitempty"` + Seed uint64 `json:"seed,omitempty"` + MaxTokens int `json:"max_tokens,omitempty"` + EnableSearch bool `json:"enable_search,omitempty"` + IncrementalOutput bool `json:"incremental_output,omitempty"` +} + +type ChatRequest struct { + Model string `json:"model"` + Input Input `json:"input"` + Parameters Parameters `json:"parameters,omitempty"` +} + +type ImageRequest struct { + Input struct { + Prompt string `json:"prompt"` + NegativePrompt string `json:"negative_prompt,omitempty"` + } `json:"input"` + Model string `json:"model"` + ResponseFormat string `json:"response_format,omitempty"` + Parameters struct { + Size string `json:"size,omitempty"` + Steps string `json:"steps,omitempty"` + Scale string `json:"scale,omitempty"` + N int `json:"n,omitempty"` + } `json:"parameters,omitempty"` +} + +type TaskResponse struct { + RequestID string `json:"request_id,omitempty"` + Code string `json:"code,omitempty"` + Message string `json:"message,omitempty"` + Output struct { + TaskID string `json:"task_id,omitempty"` + TaskStatus string `json:"task_status,omitempty"` + Code string `json:"code,omitempty"` + Message string `json:"message,omitempty"` + Results []struct { + B64Image string `json:"b64_image,omitempty"` + URL string `json:"url,omitempty"` + Code string `json:"code,omitempty"` + Message string `json:"message,omitempty"` + } `json:"results,omitempty"` + TaskMetrics struct { + Total int `json:"TOTAL,omitempty"` + Succeeded int `json:"SUCCEEDED,omitempty"` + Failed int `json:"FAILED,omitempty"` + } `json:"task_metrics,omitempty"` + } `json:"output,omitempty"` + Usage Usage `json:"usage"` + StatusCode int `json:"status_code,omitempty"` +} + +type Header struct { + Attributes any `json:"attributes,omitempty"` + Action string `json:"action,omitempty"` + Streaming string `json:"streaming,omitempty"` + TaskID string `json:"task_id,omitempty"` + Event string `json:"event,omitempty"` + ErrorCode string `json:"error_code,omitempty"` + ErrorMessage string `json:"error_message,omitempty"` +} + +type Payload struct { + Model string `json:"model,omitempty"` + Task string `json:"task,omitempty"` + TaskGroup string `json:"task_group,omitempty"` + Function string `json:"function,omitempty"` + Input struct { + Text string `json:"text,omitempty"` + } `json:"input,omitempty"` + Parameters struct { + Format string `json:"format,omitempty"` + SampleRate int `json:"sample_rate,omitempty"` + Rate float64 `json:"rate,omitempty"` + } `json:"parameters,omitempty"` + Usage struct { + Characters int `json:"characters,omitempty"` + } `json:"usage,omitempty"` +} + +type WSSMessage struct { + Header Header `json:"header,omitempty"` + Payload Payload `json:"payload,omitempty"` +} + +type EmbeddingRequest struct { + Parameters *struct { + TextType string `json:"text_type,omitempty"` + } `json:"parameters,omitempty"` + Model string `json:"model"` + Input struct { + Texts []string `json:"texts"` + } `json:"input"` +} + +type Embedding struct { + Embedding []float64 `json:"embedding"` + TextIndex int `json:"text_index"` +} + +type EmbeddingResponse struct { + Error + Output struct { + Embeddings []Embedding `json:"embeddings"` + } `json:"output"` + Usage Usage `json:"usage"` +} + +type Error struct { + Code string `json:"code"` + Message string `json:"message"` + RequestID string `json:"request_id"` +} + +type Usage struct { + InputTokens int `json:"input_tokens"` + OutputTokens int `json:"output_tokens"` + TotalTokens int `json:"total_tokens"` +} + +type Output struct { + // Text string `json:"text"` + // FinishReason string `json:"finish_reason"` + Choices []openai.TextResponseChoice `json:"choices"` +} + +type ChatResponse struct { + Error + Output Output `json:"output"` + Usage Usage `json:"usage"` +} diff --git a/service/aiproxy/relay/adaptor/anthropic/adaptor.go b/service/aiproxy/relay/adaptor/anthropic/adaptor.go new file mode 100644 index 00000000000..d3a0b85d0b9 --- /dev/null +++ b/service/aiproxy/relay/adaptor/anthropic/adaptor.go @@ -0,0 +1,84 @@ +package anthropic + +import ( + "errors" + "io" + "net/http" + "strings" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type Adaptor struct{} + +func (a *Adaptor) Init(_ *meta.Meta) { +} + +func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { + return meta.BaseURL + "/v1/messages", nil +} + +func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { + adaptor.SetupCommonRequestHeader(c, req, meta) + req.Header.Set("X-Api-Key", meta.APIKey) + anthropicVersion := c.Request.Header.Get("Anthropic-Version") + if anthropicVersion == "" { + anthropicVersion = "2023-06-01" + } + req.Header.Set("Anthropic-Version", anthropicVersion) + req.Header.Set("Anthropic-Beta", "messages-2023-12-15") + + // https://x.com/alexalbert__/status/1812921642143900036 + // claude-3-5-sonnet can support 8k context + if strings.HasPrefix(meta.ActualModelName, "claude-3-5-sonnet") { + req.Header.Set("Anthropic-Beta", "max-tokens-3-5-sonnet-2024-07-15") + } + + return nil +} + +func (a *Adaptor) ConvertRequest(_ *gin.Context, _ int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + return ConvertRequest(request), nil +} + +func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + return request, nil +} + +func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + return adaptor.DoRequestHelper(a, c, meta, requestBody) +} + +func (a *Adaptor) ConvertSTTRequest(*http.Request) (io.ReadCloser, error) { + return nil, nil +} + +func (a *Adaptor) ConvertTTSRequest(*model.TextToSpeechRequest) (any, error) { + return nil, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + err, usage = StreamHandler(c, resp) + } else { + err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName) + } + return +} + +func (a *Adaptor) GetModelList() []string { + return ModelList +} + +func (a *Adaptor) GetChannelName() string { + return "anthropic" +} diff --git a/service/aiproxy/relay/adaptor/anthropic/constants.go b/service/aiproxy/relay/adaptor/anthropic/constants.go new file mode 100644 index 00000000000..cb574706d48 --- /dev/null +++ b/service/aiproxy/relay/adaptor/anthropic/constants.go @@ -0,0 +1,13 @@ +package anthropic + +var ModelList = []string{ + "claude-instant-1.2", "claude-2.0", "claude-2.1", + "claude-3-haiku-20240307", + "claude-3-5-haiku-20241022", + "claude-3-sonnet-20240229", + "claude-3-opus-20240229", + "claude-3-5-sonnet-20240620", + "claude-3-5-sonnet-20241022", + "claude-3-5-sonnet-latest", + "claude-3-5-haiku-20241022", +} diff --git a/service/aiproxy/relay/adaptor/anthropic/main.go b/service/aiproxy/relay/adaptor/anthropic/main.go new file mode 100644 index 00000000000..f0efdfa7a7a --- /dev/null +++ b/service/aiproxy/relay/adaptor/anthropic/main.go @@ -0,0 +1,377 @@ +package anthropic + +import ( + "bufio" + "net/http" + "slices" + + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/render" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/image" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +const toolUseType = "tool_use" + +func stopReasonClaude2OpenAI(reason *string) string { + if reason == nil { + return "" + } + switch *reason { + case "end_turn": + return "stop" + case "stop_sequence": + return "stop" + case "max_tokens": + return "length" + case toolUseType: + return "tool_calls" + default: + return *reason + } +} + +func ConvertRequest(textRequest *model.GeneralOpenAIRequest) *Request { + claudeTools := make([]Tool, 0, len(textRequest.Tools)) + + for _, tool := range textRequest.Tools { + if params, ok := tool.Function.Parameters.(map[string]any); ok { + claudeTools = append(claudeTools, Tool{ + Name: tool.Function.Name, + Description: tool.Function.Description, + InputSchema: InputSchema{ + Type: params["type"].(string), + Properties: params["properties"], + Required: params["required"], + }, + }) + } + } + + claudeRequest := Request{ + Model: textRequest.Model, + MaxTokens: textRequest.MaxTokens, + Temperature: textRequest.Temperature, + TopP: textRequest.TopP, + TopK: textRequest.TopK, + Stream: textRequest.Stream, + Tools: claudeTools, + } + if len(claudeTools) > 0 { + claudeToolChoice := struct { + Type string `json:"type"` + Name string `json:"name,omitempty"` + }{Type: "auto"} // default value https://docs.anthropic.com/en/docs/build-with-claude/tool-use#controlling-claudes-output + if choice, ok := textRequest.ToolChoice.(map[string]any); ok { + if function, ok := choice["function"].(map[string]any); ok { + claudeToolChoice.Type = "tool" + claudeToolChoice.Name = function["name"].(string) + } + } else if toolChoiceType, ok := textRequest.ToolChoice.(string); ok { + if toolChoiceType == "any" { + claudeToolChoice.Type = toolChoiceType + } + } + claudeRequest.ToolChoice = claudeToolChoice + } + if claudeRequest.MaxTokens == 0 { + claudeRequest.MaxTokens = 4096 + } + // legacy model name mapping + switch claudeRequest.Model { + case "claude-instant-1": + claudeRequest.Model = "claude-instant-1.1" + case "claude-2": + claudeRequest.Model = "claude-2.1" + } + for _, message := range textRequest.Messages { + if message.Role == "system" && claudeRequest.System == "" { + claudeRequest.System = message.StringContent() + continue + } + claudeMessage := Message{ + Role: message.Role, + } + var content Content + if message.IsStringContent() { + content.Type = "text" + content.Text = message.StringContent() + if message.Role == "tool" { + claudeMessage.Role = "user" + content.Type = "tool_result" + content.Content = content.Text + content.Text = "" + content.ToolUseID = message.ToolCallID + } + claudeMessage.Content = append(claudeMessage.Content, content) + for i := range message.ToolCalls { + inputParam := make(map[string]any) + _ = json.Unmarshal(conv.StringToBytes(message.ToolCalls[i].Function.Arguments), &inputParam) + claudeMessage.Content = append(claudeMessage.Content, Content{ + Type: toolUseType, + ID: message.ToolCalls[i].ID, + Name: message.ToolCalls[i].Function.Name, + Input: inputParam, + }) + } + claudeRequest.Messages = append(claudeRequest.Messages, claudeMessage) + continue + } + var contents []Content + openaiContent := message.ParseContent() + for _, part := range openaiContent { + var content Content + switch part.Type { + case model.ContentTypeText: + content.Type = "text" + content.Text = part.Text + case model.ContentTypeImageURL: + content.Type = "image" + content.Source = &ImageSource{ + Type: "base64", + } + mimeType, data, _ := image.GetImageFromURL(part.ImageURL.URL) + content.Source.MediaType = mimeType + content.Source.Data = data + } + contents = append(contents, content) + } + claudeMessage.Content = contents + claudeRequest.Messages = append(claudeRequest.Messages, claudeMessage) + } + return &claudeRequest +} + +// https://docs.anthropic.com/claude/reference/messages-streaming +func StreamResponseClaude2OpenAI(claudeResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) { + var response *Response + var responseText string + var stopReason string + tools := make([]model.Tool, 0) + + switch claudeResponse.Type { + case "message_start": + return nil, claudeResponse.Message + case "content_block_start": + if claudeResponse.ContentBlock != nil { + responseText = claudeResponse.ContentBlock.Text + if claudeResponse.ContentBlock.Type == toolUseType { + tools = append(tools, model.Tool{ + ID: claudeResponse.ContentBlock.ID, + Type: "function", + Function: model.Function{ + Name: claudeResponse.ContentBlock.Name, + Arguments: "", + }, + }) + } + } + case "content_block_delta": + if claudeResponse.Delta != nil { + responseText = claudeResponse.Delta.Text + if claudeResponse.Delta.Type == "input_json_delta" { + tools = append(tools, model.Tool{ + Function: model.Function{ + Arguments: claudeResponse.Delta.PartialJSON, + }, + }) + } + } + case "message_delta": + if claudeResponse.Usage != nil { + response = &Response{ + Usage: *claudeResponse.Usage, + } + } + if claudeResponse.Delta != nil && claudeResponse.Delta.StopReason != nil { + stopReason = *claudeResponse.Delta.StopReason + } + } + var choice openai.ChatCompletionsStreamResponseChoice + choice.Delta.Content = responseText + if len(tools) > 0 { + choice.Delta.Content = nil // compatible with other OpenAI derivative applications, like LobeOpenAICompatibleFactory ... + choice.Delta.ToolCalls = tools + } + choice.Delta.Role = "assistant" + finishReason := stopReasonClaude2OpenAI(&stopReason) + if finishReason != "null" { + choice.FinishReason = &finishReason + } + var openaiResponse openai.ChatCompletionsStreamResponse + openaiResponse.Object = "chat.completion.chunk" + openaiResponse.Choices = []openai.ChatCompletionsStreamResponseChoice{choice} + return &openaiResponse, response +} + +func ResponseClaude2OpenAI(claudeResponse *Response) *openai.TextResponse { + var responseText string + if len(claudeResponse.Content) > 0 { + responseText = claudeResponse.Content[0].Text + } + tools := make([]model.Tool, 0) + for _, v := range claudeResponse.Content { + if v.Type == toolUseType { + args, _ := json.Marshal(v.Input) + tools = append(tools, model.Tool{ + ID: v.ID, + Type: "function", // compatible with other OpenAI derivative applications + Function: model.Function{ + Name: v.Name, + Arguments: conv.BytesToString(args), + }, + }) + } + } + choice := openai.TextResponseChoice{ + Index: 0, + Message: model.Message{ + Role: "assistant", + Content: responseText, + Name: nil, + ToolCalls: tools, + }, + FinishReason: stopReasonClaude2OpenAI(claudeResponse.StopReason), + } + fullTextResponse := openai.TextResponse{ + ID: "chatcmpl-" + claudeResponse.ID, + Model: claudeResponse.Model, + Object: "chat.completion", + Created: helper.GetTimestamp(), + Choices: []openai.TextResponseChoice{choice}, + } + return &fullTextResponse +} + +func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + createdTime := helper.GetTimestamp() + scanner := bufio.NewScanner(resp.Body) + scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { + if atEOF && len(data) == 0 { + return 0, nil, nil + } + if i := slices.Index(data, '\n'); i >= 0 { + return i + 1, data[0:i], nil + } + if atEOF { + return len(data), data, nil + } + return 0, nil, nil + }) + + common.SetEventStreamHeaders(c) + + var usage model.Usage + var modelName string + var id string + var lastToolCallChoice openai.ChatCompletionsStreamResponseChoice + + for scanner.Scan() { + data := scanner.Bytes() + if len(data) < 6 || conv.BytesToString(data[:6]) != "data: " { + continue + } + data = data[6:] + + if conv.BytesToString(data) == "[DONE]" { + break + } + + var claudeResponse StreamResponse + err := json.Unmarshal(data, &claudeResponse) + if err != nil { + logger.SysErrorf("error unmarshalling stream response: %s, data: %s", err.Error(), conv.BytesToString(data)) + continue + } + + response, meta := StreamResponseClaude2OpenAI(&claudeResponse) + if response == nil { + continue + } + if meta != nil { + usage.PromptTokens += meta.Usage.InputTokens + usage.CompletionTokens += meta.Usage.OutputTokens + if len(meta.ID) > 0 { // only message_start has an id, otherwise it's a finish_reason event. + modelName = meta.Model + id = "chatcmpl-" + meta.ID + continue + } + if len(lastToolCallChoice.Delta.ToolCalls) > 0 { + lastArgs := &lastToolCallChoice.Delta.ToolCalls[len(lastToolCallChoice.Delta.ToolCalls)-1].Function + if len(lastArgs.Arguments) == 0 { // compatible with OpenAI sending an empty object `{}` when no arguments. + lastArgs.Arguments = "{}" + response.Choices[len(response.Choices)-1].Delta.Content = nil + response.Choices[len(response.Choices)-1].Delta.ToolCalls = lastToolCallChoice.Delta.ToolCalls + } + } + } + + response.ID = id + response.Model = modelName + response.Created = createdTime + + for _, choice := range response.Choices { + if len(choice.Delta.ToolCalls) > 0 { + lastToolCallChoice = choice + } + } + err = render.ObjectData(c, response) + if err != nil { + logger.SysError(err.Error()) + } + } + + if err := scanner.Err(); err != nil { + logger.SysError("error reading stream: " + err.Error()) + } + + render.Done(c) + + return nil, &usage +} + +func Handler(c *gin.Context, resp *http.Response, _ int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var claudeResponse Response + err := json.NewDecoder(resp.Body).Decode(&claudeResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + if claudeResponse.Error.Type != "" { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: claudeResponse.Error.Message, + Type: claudeResponse.Error.Type, + Param: "", + Code: claudeResponse.Error.Type, + }, + StatusCode: resp.StatusCode, + }, nil + } + fullTextResponse := ResponseClaude2OpenAI(&claudeResponse) + fullTextResponse.Model = modelName + usage := model.Usage{ + PromptTokens: claudeResponse.Usage.InputTokens, + CompletionTokens: claudeResponse.Usage.OutputTokens, + TotalTokens: claudeResponse.Usage.InputTokens + claudeResponse.Usage.OutputTokens, + } + fullTextResponse.Usage = usage + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil, &usage +} diff --git a/service/aiproxy/relay/adaptor/anthropic/model.go b/service/aiproxy/relay/adaptor/anthropic/model.go new file mode 100644 index 00000000000..a4102886da6 --- /dev/null +++ b/service/aiproxy/relay/adaptor/anthropic/model.go @@ -0,0 +1,95 @@ +package anthropic + +// https://docs.anthropic.com/claude/reference/messages_post + +type Metadata struct { + UserID string `json:"user_id"` +} + +type ImageSource struct { + Type string `json:"type"` + MediaType string `json:"media_type"` + Data string `json:"data"` +} + +type Content struct { + Type string `json:"type"` + Text string `json:"text,omitempty"` + Source *ImageSource `json:"source,omitempty"` + // tool_calls + ID string `json:"id,omitempty"` + Name string `json:"name,omitempty"` + Input any `json:"input,omitempty"` + Content string `json:"content,omitempty"` + ToolUseID string `json:"tool_use_id,omitempty"` +} + +type Message struct { + Role string `json:"role"` + Content []Content `json:"content"` +} + +type Tool struct { + InputSchema InputSchema `json:"input_schema"` + Name string `json:"name"` + Description string `json:"description,omitempty"` +} + +type InputSchema struct { + Properties any `json:"properties,omitempty"` + Required any `json:"required,omitempty"` + Type string `json:"type"` +} + +type Request struct { + ToolChoice any `json:"tool_choice,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"top_p,omitempty"` + Model string `json:"model"` + System string `json:"system,omitempty"` + Messages []Message `json:"messages"` + StopSequences []string `json:"stop_sequences,omitempty"` + Tools []Tool `json:"tools,omitempty"` + MaxTokens int `json:"max_tokens,omitempty"` + TopK int `json:"top_k,omitempty"` + Stream bool `json:"stream,omitempty"` +} + +type Usage struct { + InputTokens int `json:"input_tokens"` + OutputTokens int `json:"output_tokens"` +} + +type Error struct { + Type string `json:"type"` + Message string `json:"message"` +} + +type Response struct { + StopReason *string `json:"stop_reason"` + StopSequence *string `json:"stop_sequence"` + Error Error `json:"error"` + ID string `json:"id"` + Type string `json:"type"` + Role string `json:"role"` + Model string `json:"model"` + Content []Content `json:"content"` + Usage Usage `json:"usage"` +} + +type Delta struct { + StopReason *string `json:"stop_reason"` + StopSequence *string `json:"stop_sequence"` + Type string `json:"type"` + Text string `json:"text"` + PartialJSON string `json:"partial_json,omitempty"` +} + +type StreamResponse struct { + Message *Response `json:"message"` + ContentBlock *Content `json:"content_block"` + Delta *Delta `json:"delta"` + Usage *Usage `json:"usage"` + Type string `json:"type"` + Index int `json:"index"` +} diff --git a/service/aiproxy/relay/adaptor/aws/adaptor.go b/service/aiproxy/relay/adaptor/aws/adaptor.go new file mode 100644 index 00000000000..68fa986857d --- /dev/null +++ b/service/aiproxy/relay/adaptor/aws/adaptor.go @@ -0,0 +1,92 @@ +package aws + +import ( + "errors" + "io" + "net/http" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/credentials" + "github.com/aws/aws-sdk-go-v2/service/bedrockruntime" + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/aws/utils" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +var _ adaptor.Adaptor = new(Adaptor) + +type Adaptor struct { + awsAdapter utils.AwsAdapter + + Meta *meta.Meta + AwsClient *bedrockruntime.Client +} + +func (a *Adaptor) ConvertSTTRequest(*http.Request) (io.ReadCloser, error) { + return nil, nil +} + +func (a *Adaptor) ConvertTTSRequest(*model.TextToSpeechRequest) (any, error) { + return nil, nil +} + +func (a *Adaptor) Init(meta *meta.Meta) { + a.Meta = meta + a.AwsClient = bedrockruntime.New(bedrockruntime.Options{ + Region: meta.Config.Region, + Credentials: aws.NewCredentialsCache(credentials.NewStaticCredentialsProvider(meta.Config.AK, meta.Config.SK, "")), + }) +} + +func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + + adaptor := GetAdaptor(request.Model) + if adaptor == nil { + return nil, errors.New("adaptor not found") + } + + a.awsAdapter = adaptor + return adaptor.ConvertRequest(c, relayMode, request) +} + +func (a *Adaptor) DoResponse(c *gin.Context, _ *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if a.awsAdapter == nil { + return nil, utils.WrapErr(errors.New("awsAdapter is nil")) + } + return a.awsAdapter.DoResponse(c, a.AwsClient, meta) +} + +func (a *Adaptor) GetModelList() (models []string) { + for model := range adaptors { + models = append(models, model) + } + return +} + +func (a *Adaptor) GetChannelName() string { + return "aws" +} + +func (a *Adaptor) GetRequestURL(_ *meta.Meta) (string, error) { + return "", nil +} + +func (a *Adaptor) SetupRequestHeader(_ *gin.Context, _ *http.Request, _ *meta.Meta) error { + return nil +} + +func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + return request, nil +} + +func (a *Adaptor) DoRequest(_ *gin.Context, _ *meta.Meta, _ io.Reader) (*http.Response, error) { + return nil, nil +} diff --git a/service/aiproxy/relay/adaptor/aws/claude/adapter.go b/service/aiproxy/relay/adaptor/aws/claude/adapter.go new file mode 100644 index 00000000000..94e451f9ea5 --- /dev/null +++ b/service/aiproxy/relay/adaptor/aws/claude/adapter.go @@ -0,0 +1,36 @@ +package aws + +import ( + "github.com/aws/aws-sdk-go-v2/service/bedrockruntime" + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/anthropic" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/aws/utils" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" + "github.com/pkg/errors" +) + +var _ utils.AwsAdapter = new(Adaptor) + +type Adaptor struct{} + +func (a *Adaptor) ConvertRequest(c *gin.Context, _ int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + + claudeReq := anthropic.ConvertRequest(request) + c.Set(ctxkey.RequestModel, request.Model) + c.Set(ctxkey.ConvertedRequest, claudeReq) + return claudeReq, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, awsCli *bedrockruntime.Client, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + err, usage = StreamHandler(c, awsCli) + } else { + err, usage = Handler(c, awsCli, meta.ActualModelName) + } + return +} diff --git a/service/aiproxy/relay/adaptor/aws/claude/main.go b/service/aiproxy/relay/adaptor/aws/claude/main.go new file mode 100644 index 00000000000..bc7fb873f5a --- /dev/null +++ b/service/aiproxy/relay/adaptor/aws/claude/main.go @@ -0,0 +1,205 @@ +// Package aws provides the AWS adaptor for the relay service. +package aws + +import ( + "io" + "net/http" + + json "github.com/json-iterator/go" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/bedrockruntime" + "github.com/aws/aws-sdk-go-v2/service/bedrockruntime/types" + "github.com/gin-gonic/gin" + "github.com/jinzhu/copier" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/common/render" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/anthropic" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/aws/utils" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + relaymodel "github.com/labring/sealos/service/aiproxy/relay/model" + "github.com/pkg/errors" +) + +// AwsModelIDMap maps internal model identifiers to AWS model identifiers. +// For more details, see: https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html +var AwsModelIDMap = map[string]string{ + "claude-instant-1.2": "anthropic.claude-instant-v1", + "claude-2.0": "anthropic.claude-v2", + "claude-2.1": "anthropic.claude-v2:1", + "claude-3-haiku-20240307": "anthropic.claude-3-haiku-20240307-v1:0", + "claude-3-sonnet-20240229": "anthropic.claude-3-sonnet-20240229-v1:0", + "claude-3-opus-20240229": "anthropic.claude-3-opus-20240229-v1:0", + "claude-3-5-sonnet-20240620": "anthropic.claude-3-5-sonnet-20240620-v1:0", + "claude-3-5-sonnet-20241022": "anthropic.claude-3-5-sonnet-20241022-v2:0", + "claude-3-5-sonnet-latest": "anthropic.claude-3-5-sonnet-20241022-v2:0", + "claude-3-5-haiku-20241022": "anthropic.claude-3-5-haiku-20241022-v1:0", +} + +func awsModelID(requestModel string) (string, error) { + if awsModelID, ok := AwsModelIDMap[requestModel]; ok { + return awsModelID, nil + } + + return "", errors.Errorf("model %s not found", requestModel) +} + +func Handler(c *gin.Context, awsCli *bedrockruntime.Client, modelName string) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) { + awsModelID, err := awsModelID(c.GetString(ctxkey.RequestModel)) + if err != nil { + return utils.WrapErr(errors.Wrap(err, "awsModelID")), nil + } + + awsReq := &bedrockruntime.InvokeModelInput{ + ModelId: aws.String(awsModelID), + Accept: aws.String("application/json"), + ContentType: aws.String("application/json"), + } + + convReq, ok := c.Get(ctxkey.ConvertedRequest) + if !ok { + return utils.WrapErr(errors.New("request not found")), nil + } + claudeReq := convReq.(*anthropic.Request) + awsClaudeReq := &Request{ + AnthropicVersion: "bedrock-2023-05-31", + } + if err = copier.Copy(awsClaudeReq, claudeReq); err != nil { + return utils.WrapErr(errors.Wrap(err, "copy request")), nil + } + + awsReq.Body, err = json.Marshal(awsClaudeReq) + if err != nil { + return utils.WrapErr(errors.Wrap(err, "marshal request")), nil + } + + awsResp, err := awsCli.InvokeModel(c.Request.Context(), awsReq) + if err != nil { + return utils.WrapErr(errors.Wrap(err, "InvokeModel")), nil + } + + claudeResponse := new(anthropic.Response) + err = json.Unmarshal(awsResp.Body, claudeResponse) + if err != nil { + return utils.WrapErr(errors.Wrap(err, "unmarshal response")), nil + } + + openaiResp := anthropic.ResponseClaude2OpenAI(claudeResponse) + openaiResp.Model = modelName + usage := relaymodel.Usage{ + PromptTokens: claudeResponse.Usage.InputTokens, + CompletionTokens: claudeResponse.Usage.OutputTokens, + TotalTokens: claudeResponse.Usage.InputTokens + claudeResponse.Usage.OutputTokens, + } + openaiResp.Usage = usage + + c.JSON(http.StatusOK, openaiResp) + return nil, &usage +} + +func StreamHandler(c *gin.Context, awsCli *bedrockruntime.Client) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) { + createdTime := helper.GetTimestamp() + awsModelID, err := awsModelID(c.GetString(ctxkey.RequestModel)) + if err != nil { + return utils.WrapErr(errors.Wrap(err, "awsModelID")), nil + } + + awsReq := &bedrockruntime.InvokeModelWithResponseStreamInput{ + ModelId: aws.String(awsModelID), + Accept: aws.String("application/json"), + ContentType: aws.String("application/json"), + } + + convReq, ok := c.Get(ctxkey.ConvertedRequest) + if !ok { + return utils.WrapErr(errors.New("request not found")), nil + } + claudeReq := convReq.(*anthropic.Request) + + awsClaudeReq := &Request{ + AnthropicVersion: "bedrock-2023-05-31", + } + if err = copier.Copy(awsClaudeReq, claudeReq); err != nil { + return utils.WrapErr(errors.Wrap(err, "copy request")), nil + } + awsReq.Body, err = json.Marshal(awsClaudeReq) + if err != nil { + return utils.WrapErr(errors.Wrap(err, "marshal request")), nil + } + + awsResp, err := awsCli.InvokeModelWithResponseStream(c.Request.Context(), awsReq) + if err != nil { + return utils.WrapErr(errors.Wrap(err, "InvokeModelWithResponseStream")), nil + } + stream := awsResp.GetStream() + defer stream.Close() + + c.Writer.Header().Set("Content-Type", "text/event-stream") + var usage relaymodel.Usage + var id string + var lastToolCallChoice openai.ChatCompletionsStreamResponseChoice + + c.Stream(func(_ io.Writer) bool { + event, ok := <-stream.Events() + if !ok { + render.StringData(c, "[DONE]") + return false + } + + switch v := event.(type) { + case *types.ResponseStreamMemberChunk: + claudeResp := anthropic.StreamResponse{} + err := json.Unmarshal(v.Value.Bytes, &claudeResp) + if err != nil { + logger.SysError("error unmarshalling stream response: " + err.Error()) + return false + } + + response, meta := anthropic.StreamResponseClaude2OpenAI(&claudeResp) + if response == nil { + return true + } + if meta != nil { + usage.PromptTokens += meta.Usage.InputTokens + usage.CompletionTokens += meta.Usage.OutputTokens + if len(meta.ID) > 0 { // only message_start has an id, otherwise it's a finish_reason event. + id = "chatcmpl-" + meta.ID + return true + } + if len(lastToolCallChoice.Delta.ToolCalls) > 0 { + lastArgs := &lastToolCallChoice.Delta.ToolCalls[len(lastToolCallChoice.Delta.ToolCalls)-1].Function + if len(lastArgs.Arguments) == 0 { // compatible with OpenAI sending an empty object `{}` when no arguments. + lastArgs.Arguments = "{}" + response.Choices[len(response.Choices)-1].Delta.Content = nil + response.Choices[len(response.Choices)-1].Delta.ToolCalls = lastToolCallChoice.Delta.ToolCalls + } + } + } + response.ID = id + response.Model = c.GetString(ctxkey.OriginalModel) + response.Created = createdTime + + for _, choice := range response.Choices { + if len(choice.Delta.ToolCalls) > 0 { + lastToolCallChoice = choice + } + } + err = render.ObjectData(c, response) + if err != nil { + logger.SysError("error stream response: " + err.Error()) + return false + } + return true + case *types.UnknownUnionMember: + logger.SysErrorf("unknown tag: %s", v.Tag) + return false + default: + logger.SysErrorf("union is nil or unknown type: %v", v) + return false + } + }) + + return nil, &usage +} diff --git a/service/aiproxy/relay/adaptor/aws/claude/model.go b/service/aiproxy/relay/adaptor/aws/claude/model.go new file mode 100644 index 00000000000..d8a14dd6200 --- /dev/null +++ b/service/aiproxy/relay/adaptor/aws/claude/model.go @@ -0,0 +1,19 @@ +package aws + +import "github.com/labring/sealos/service/aiproxy/relay/adaptor/anthropic" + +// Request is the request to AWS Claude +// +// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-messages.html +type Request struct { + ToolChoice any `json:"tool_choice,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"top_p,omitempty"` + AnthropicVersion string `json:"anthropic_version"` + System string `json:"system,omitempty"` + Messages []anthropic.Message `json:"messages"` + StopSequences []string `json:"stop_sequences,omitempty"` + Tools []anthropic.Tool `json:"tools,omitempty"` + MaxTokens int `json:"max_tokens,omitempty"` + TopK int `json:"top_k,omitempty"` +} diff --git a/service/aiproxy/relay/adaptor/aws/llama3/adapter.go b/service/aiproxy/relay/adaptor/aws/llama3/adapter.go new file mode 100644 index 00000000000..3fcef4b8fab --- /dev/null +++ b/service/aiproxy/relay/adaptor/aws/llama3/adapter.go @@ -0,0 +1,36 @@ +package aws + +import ( + "github.com/aws/aws-sdk-go-v2/service/bedrockruntime" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/aws/utils" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" + "github.com/pkg/errors" +) + +var _ utils.AwsAdapter = new(Adaptor) + +type Adaptor struct{} + +func (a *Adaptor) ConvertRequest(c *gin.Context, _ int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + + llamaReq := ConvertRequest(request) + c.Set(ctxkey.RequestModel, request.Model) + c.Set(ctxkey.ConvertedRequest, llamaReq) + return llamaReq, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, awsCli *bedrockruntime.Client, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + err, usage = StreamHandler(c, awsCli) + } else { + err, usage = Handler(c, awsCli, meta.ActualModelName) + } + return +} diff --git a/service/aiproxy/relay/adaptor/aws/llama3/main.go b/service/aiproxy/relay/adaptor/aws/llama3/main.go new file mode 100644 index 00000000000..10b56fde851 --- /dev/null +++ b/service/aiproxy/relay/adaptor/aws/llama3/main.go @@ -0,0 +1,233 @@ +// Package aws provides the AWS adaptor for the relay service. +package aws + +import ( + "bytes" + "fmt" + "io" + "net/http" + "text/template" + + json "github.com/json-iterator/go" + + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/common/random" + "github.com/labring/sealos/service/aiproxy/common/render" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/bedrockruntime" + "github.com/aws/aws-sdk-go-v2/service/bedrockruntime/types" + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/aws/utils" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + relaymodel "github.com/labring/sealos/service/aiproxy/relay/model" + "github.com/pkg/errors" +) + +// AwsModelIDMap maps internal model identifiers to AWS model identifiers. +// It currently supports only llama-3-8b and llama-3-70b instruction models. +// For more details, see: https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html +var AwsModelIDMap = map[string]string{ + "llama3-8b-8192": "meta.llama3-8b-instruct-v1:0", + "llama3-70b-8192": "meta.llama3-70b-instruct-v1:0", +} + +func awsModelID(requestModel string) (string, error) { + if awsModelID, ok := AwsModelIDMap[requestModel]; ok { + return awsModelID, nil + } + + return "", errors.Errorf("model %s not found", requestModel) +} + +// promptTemplate with range +const promptTemplate = `<|begin_of_text|>{{range .Messages}}<|start_header_id|>{{.Role}}<|end_header_id|>{{.StringContent}}<|eot_id|>{{end}}<|start_header_id|>assistant<|end_header_id|> +` + +var promptTpl = template.Must(template.New("llama3-chat").Parse(promptTemplate)) + +func RenderPrompt(messages []relaymodel.Message) string { + var buf bytes.Buffer + err := promptTpl.Execute(&buf, struct{ Messages []relaymodel.Message }{messages}) + if err != nil { + logger.SysError("error rendering prompt messages: " + err.Error()) + } + return buf.String() +} + +func ConvertRequest(textRequest *relaymodel.GeneralOpenAIRequest) *Request { + llamaRequest := Request{ + MaxGenLen: textRequest.MaxTokens, + Temperature: textRequest.Temperature, + TopP: textRequest.TopP, + } + if llamaRequest.MaxGenLen == 0 { + llamaRequest.MaxGenLen = 2048 + } + prompt := RenderPrompt(textRequest.Messages) + llamaRequest.Prompt = prompt + return &llamaRequest +} + +func Handler(c *gin.Context, awsCli *bedrockruntime.Client, modelName string) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) { + awsModelID, err := awsModelID(c.GetString(ctxkey.RequestModel)) + if err != nil { + return utils.WrapErr(errors.Wrap(err, "awsModelID")), nil + } + + awsReq := &bedrockruntime.InvokeModelInput{ + ModelId: aws.String(awsModelID), + Accept: aws.String("application/json"), + ContentType: aws.String("application/json"), + } + + llamaReq, ok := c.Get(ctxkey.ConvertedRequest) + if !ok { + return utils.WrapErr(errors.New("request not found")), nil + } + + awsReq.Body, err = json.Marshal(llamaReq) + if err != nil { + return utils.WrapErr(errors.Wrap(err, "marshal request")), nil + } + + awsResp, err := awsCli.InvokeModel(c.Request.Context(), awsReq) + if err != nil { + return utils.WrapErr(errors.Wrap(err, "InvokeModel")), nil + } + + var llamaResponse Response + err = json.Unmarshal(awsResp.Body, &llamaResponse) + if err != nil { + return utils.WrapErr(errors.Wrap(err, "unmarshal response")), nil + } + + openaiResp := ResponseLlama2OpenAI(&llamaResponse) + openaiResp.Model = modelName + usage := relaymodel.Usage{ + PromptTokens: llamaResponse.PromptTokenCount, + CompletionTokens: llamaResponse.GenerationTokenCount, + TotalTokens: llamaResponse.PromptTokenCount + llamaResponse.GenerationTokenCount, + } + openaiResp.Usage = usage + + c.JSON(http.StatusOK, openaiResp) + return nil, &usage +} + +func ResponseLlama2OpenAI(llamaResponse *Response) *openai.TextResponse { + var responseText string + if len(llamaResponse.Generation) > 0 { + responseText = llamaResponse.Generation + } + choice := openai.TextResponseChoice{ + Index: 0, + Message: relaymodel.Message{ + Role: "assistant", + Content: responseText, + Name: nil, + }, + FinishReason: llamaResponse.StopReason, + } + fullTextResponse := openai.TextResponse{ + ID: "chatcmpl-" + random.GetUUID(), + Object: "chat.completion", + Created: helper.GetTimestamp(), + Choices: []openai.TextResponseChoice{choice}, + } + return &fullTextResponse +} + +func StreamHandler(c *gin.Context, awsCli *bedrockruntime.Client) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) { + createdTime := helper.GetTimestamp() + awsModelID, err := awsModelID(c.GetString(ctxkey.RequestModel)) + if err != nil { + return utils.WrapErr(errors.Wrap(err, "awsModelID")), nil + } + + awsReq := &bedrockruntime.InvokeModelWithResponseStreamInput{ + ModelId: aws.String(awsModelID), + Accept: aws.String("application/json"), + ContentType: aws.String("application/json"), + } + + llamaReq, ok := c.Get(ctxkey.ConvertedRequest) + if !ok { + return utils.WrapErr(errors.New("request not found")), nil + } + + awsReq.Body, err = json.Marshal(llamaReq) + if err != nil { + return utils.WrapErr(errors.Wrap(err, "marshal request")), nil + } + + awsResp, err := awsCli.InvokeModelWithResponseStream(c.Request.Context(), awsReq) + if err != nil { + return utils.WrapErr(errors.Wrap(err, "InvokeModelWithResponseStream")), nil + } + stream := awsResp.GetStream() + defer stream.Close() + + c.Writer.Header().Set("Content-Type", "text/event-stream") + var usage relaymodel.Usage + c.Stream(func(_ io.Writer) bool { + event, ok := <-stream.Events() + if !ok { + c.Render(-1, common.CustomEvent{Data: "data: [DONE]"}) + return false + } + + switch v := event.(type) { + case *types.ResponseStreamMemberChunk: + var llamaResp StreamResponse + err := json.Unmarshal(v.Value.Bytes, &llamaResp) + if err != nil { + logger.SysError("error unmarshalling stream response: " + err.Error()) + return false + } + + if llamaResp.PromptTokenCount > 0 { + usage.PromptTokens = llamaResp.PromptTokenCount + } + if llamaResp.StopReason == "stop" { + usage.CompletionTokens = llamaResp.GenerationTokenCount + usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens + } + response := StreamResponseLlama2OpenAI(&llamaResp) + response.ID = "chatcmpl-" + random.GetUUID() + response.Model = c.GetString(ctxkey.OriginalModel) + response.Created = createdTime + err = render.ObjectData(c, response) + if err != nil { + logger.SysError("error stream response: " + err.Error()) + return true + } + return true + case *types.UnknownUnionMember: + fmt.Println("unknown tag:", v.Tag) + return false + default: + fmt.Println("union is nil or unknown type") + return false + } + }) + + return nil, &usage +} + +func StreamResponseLlama2OpenAI(llamaResponse *StreamResponse) *openai.ChatCompletionsStreamResponse { + var choice openai.ChatCompletionsStreamResponseChoice + choice.Delta.Content = llamaResponse.Generation + choice.Delta.Role = "assistant" + finishReason := llamaResponse.StopReason + if finishReason != "null" { + choice.FinishReason = &finishReason + } + var openaiResponse openai.ChatCompletionsStreamResponse + openaiResponse.Object = "chat.completion.chunk" + openaiResponse.Choices = []openai.ChatCompletionsStreamResponseChoice{choice} + return &openaiResponse +} diff --git a/service/aiproxy/relay/adaptor/aws/llama3/main_test.go b/service/aiproxy/relay/adaptor/aws/llama3/main_test.go new file mode 100644 index 00000000000..22b7aa1ca7d --- /dev/null +++ b/service/aiproxy/relay/adaptor/aws/llama3/main_test.go @@ -0,0 +1,45 @@ +package aws_test + +import ( + "testing" + + aws "github.com/labring/sealos/service/aiproxy/relay/adaptor/aws/llama3" + relaymodel "github.com/labring/sealos/service/aiproxy/relay/model" + "github.com/stretchr/testify/assert" +) + +func TestRenderPrompt(t *testing.T) { + messages := []relaymodel.Message{ + { + Role: "user", + Content: "What's your name?", + }, + } + prompt := aws.RenderPrompt(messages) + expected := `<|begin_of_text|><|start_header_id|>user<|end_header_id|>What's your name?<|eot_id|><|start_header_id|>assistant<|end_header_id|> +` + assert.Equal(t, expected, prompt) + + messages = []relaymodel.Message{ + { + Role: "system", + Content: "Your name is Kat. You are a detective.", + }, + { + Role: "user", + Content: "What's your name?", + }, + { + Role: "assistant", + Content: "Kat", + }, + { + Role: "user", + Content: "What's your job?", + }, + } + prompt = aws.RenderPrompt(messages) + expected = `<|begin_of_text|><|start_header_id|>system<|end_header_id|>Your name is Kat. You are a detective.<|eot_id|><|start_header_id|>user<|end_header_id|>What's your name?<|eot_id|><|start_header_id|>assistant<|end_header_id|>Kat<|eot_id|><|start_header_id|>user<|end_header_id|>What's your job?<|eot_id|><|start_header_id|>assistant<|end_header_id|> +` + assert.Equal(t, expected, prompt) +} diff --git a/service/aiproxy/relay/adaptor/aws/llama3/model.go b/service/aiproxy/relay/adaptor/aws/llama3/model.go new file mode 100644 index 00000000000..3d8ab8e0957 --- /dev/null +++ b/service/aiproxy/relay/adaptor/aws/llama3/model.go @@ -0,0 +1,29 @@ +package aws + +// Request is the request to AWS Llama3 +// +// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-meta.html +type Request struct { + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"top_p,omitempty"` + Prompt string `json:"prompt"` + MaxGenLen int `json:"max_gen_len,omitempty"` +} + +// Response is the response from AWS Llama3 +// +// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-meta.html +type Response struct { + Generation string `json:"generation"` + StopReason string `json:"stop_reason"` + PromptTokenCount int `json:"prompt_token_count"` + GenerationTokenCount int `json:"generation_token_count"` +} + +// {'generation': 'Hi', 'prompt_token_count': 15, 'generation_token_count': 1, 'stop_reason': None} +type StreamResponse struct { + Generation string `json:"generation"` + StopReason string `json:"stop_reason"` + PromptTokenCount int `json:"prompt_token_count"` + GenerationTokenCount int `json:"generation_token_count"` +} diff --git a/service/aiproxy/relay/adaptor/aws/registry.go b/service/aiproxy/relay/adaptor/aws/registry.go new file mode 100644 index 00000000000..32083fad9fa --- /dev/null +++ b/service/aiproxy/relay/adaptor/aws/registry.go @@ -0,0 +1,37 @@ +package aws + +import ( + claude "github.com/labring/sealos/service/aiproxy/relay/adaptor/aws/claude" + llama3 "github.com/labring/sealos/service/aiproxy/relay/adaptor/aws/llama3" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/aws/utils" +) + +type ModelType int + +const ( + AwsClaude ModelType = iota + 1 + AwsLlama3 +) + +var adaptors = map[string]ModelType{} + +func init() { + for model := range claude.AwsModelIDMap { + adaptors[model] = AwsClaude + } + for model := range llama3.AwsModelIDMap { + adaptors[model] = AwsLlama3 + } +} + +func GetAdaptor(model string) utils.AwsAdapter { + adaptorType := adaptors[model] + switch adaptorType { + case AwsClaude: + return &claude.Adaptor{} + case AwsLlama3: + return &llama3.Adaptor{} + default: + return nil + } +} diff --git a/service/aiproxy/relay/adaptor/aws/utils/adaptor.go b/service/aiproxy/relay/adaptor/aws/utils/adaptor.go new file mode 100644 index 00000000000..1af4579e967 --- /dev/null +++ b/service/aiproxy/relay/adaptor/aws/utils/adaptor.go @@ -0,0 +1,51 @@ +package utils + +import ( + "errors" + "io" + "net/http" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/credentials" + "github.com/aws/aws-sdk-go-v2/service/bedrockruntime" + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type AwsAdapter interface { + ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) + DoResponse(c *gin.Context, awsCli *bedrockruntime.Client, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) +} + +type Adaptor struct { + Meta *meta.Meta + AwsClient *bedrockruntime.Client +} + +func (a *Adaptor) Init(meta *meta.Meta) { + a.Meta = meta + a.AwsClient = bedrockruntime.New(bedrockruntime.Options{ + Region: meta.Config.Region, + Credentials: aws.NewCredentialsCache(credentials.NewStaticCredentialsProvider(meta.Config.AK, meta.Config.SK, "")), + }) +} + +func (a *Adaptor) GetRequestURL(_ *meta.Meta) (string, error) { + return "", nil +} + +func (a *Adaptor) SetupRequestHeader(_ *gin.Context, _ *http.Request, _ *meta.Meta) error { + return nil +} + +func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + return request, nil +} + +func (a *Adaptor) DoRequest(_ *gin.Context, _ *meta.Meta, _ io.Reader) (*http.Response, error) { + return nil, nil +} diff --git a/service/aiproxy/relay/adaptor/aws/utils/utils.go b/service/aiproxy/relay/adaptor/aws/utils/utils.go new file mode 100644 index 00000000000..0323f8c2195 --- /dev/null +++ b/service/aiproxy/relay/adaptor/aws/utils/utils.go @@ -0,0 +1,16 @@ +package utils + +import ( + "net/http" + + relaymodel "github.com/labring/sealos/service/aiproxy/relay/model" +) + +func WrapErr(err error) *relaymodel.ErrorWithStatusCode { + return &relaymodel.ErrorWithStatusCode{ + StatusCode: http.StatusInternalServerError, + Error: relaymodel.Error{ + Message: err.Error(), + }, + } +} diff --git a/service/aiproxy/relay/adaptor/baichuan/constants.go b/service/aiproxy/relay/adaptor/baichuan/constants.go new file mode 100644 index 00000000000..cb20a1ffe16 --- /dev/null +++ b/service/aiproxy/relay/adaptor/baichuan/constants.go @@ -0,0 +1,7 @@ +package baichuan + +var ModelList = []string{ + "Baichuan2-Turbo", + "Baichuan2-Turbo-192k", + "Baichuan-Text-Embedding", +} diff --git a/service/aiproxy/relay/adaptor/baidu/adaptor.go b/service/aiproxy/relay/adaptor/baidu/adaptor.go new file mode 100644 index 00000000000..6befae42296 --- /dev/null +++ b/service/aiproxy/relay/adaptor/baidu/adaptor.go @@ -0,0 +1,142 @@ +package baidu + +import ( + "errors" + "fmt" + "io" + "net/http" + "strings" + + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type Adaptor struct{} + +func (a *Adaptor) Init(_ *meta.Meta) { +} + +func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { + // https://cloud.baidu.com/doc/WENXINWORKSHOP/s/clntwmv7t + suffix := "chat/" + if strings.HasPrefix(meta.ActualModelName, "Embedding") || + strings.HasPrefix(meta.ActualModelName, "bge-large") || + strings.HasPrefix(meta.ActualModelName, "tao-8k") { + suffix = "embeddings/" + } + switch meta.ActualModelName { + case "ERNIE-4.0-8K", "ERNIE-4.0", "ERNIE-Bot-4": + suffix += "completions_pro" + case "ERNIE-Bot": + suffix += "completions" + case "ERNIE-Bot-turbo": + suffix += "eb-instant" + case "ERNIE-Speed": + suffix += "ernie_speed" + case "ERNIE-3.5-8K": + suffix += "completions" + case "ERNIE-3.5-8K-0205": + suffix += "ernie-3.5-8k-0205" + case "ERNIE-3.5-8K-1222": + suffix += "ernie-3.5-8k-1222" + case "ERNIE-Bot-8K": + suffix += "ernie_bot_8k" + case "ERNIE-3.5-4K-0205": + suffix += "ernie-3.5-4k-0205" + case "ERNIE-Speed-8K": + suffix += "ernie_speed" + case "ERNIE-Speed-128K": + suffix += "ernie-speed-128k" + case "ERNIE-Lite-8K-0922": + suffix += "eb-instant" + case "ERNIE-Lite-8K-0308": + suffix += "ernie-lite-8k" + case "ERNIE-Tiny-8K": + suffix += "ernie-tiny-8k" + case "BLOOMZ-7B": + suffix += "bloomz_7b1" + case "Embedding-V1": + suffix += "embedding-v1" + case "bge-large-zh": + suffix += "bge_large_zh" + case "bge-large-en": + suffix += "bge_large_en" + case "tao-8k": + suffix += "tao_8k" + default: + suffix += strings.ToLower(meta.ActualModelName) + } + fullRequestURL := fmt.Sprintf("%s/rpc/2.0/ai_custom/v1/wenxinworkshop/%s", meta.BaseURL, suffix) + var accessToken string + var err error + if accessToken, err = GetAccessToken(meta.APIKey); err != nil { + return "", err + } + fullRequestURL += "?access_token=" + accessToken + return fullRequestURL, nil +} + +func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { + adaptor.SetupCommonRequestHeader(c, req, meta) + req.Header.Set("Authorization", "Bearer "+meta.APIKey) + return nil +} + +func (a *Adaptor) ConvertRequest(_ *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + switch relayMode { + case relaymode.Embeddings: + baiduEmbeddingRequest := ConvertEmbeddingRequest(request) + return baiduEmbeddingRequest, nil + default: + baiduRequest := ConvertRequest(request) + return baiduRequest, nil + } +} + +func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + return request, nil +} + +func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + return adaptor.DoRequestHelper(a, c, meta, requestBody) +} + +func (a *Adaptor) ConvertSTTRequest(*http.Request) (io.ReadCloser, error) { + return nil, nil +} + +func (a *Adaptor) ConvertTTSRequest(*model.TextToSpeechRequest) (any, error) { + return nil, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + err, usage = StreamHandler(c, resp) + } else { + switch meta.Mode { + case relaymode.Embeddings: + err, usage = EmbeddingHandler(c, resp) + default: + err, usage = Handler(c, resp) + } + } + return +} + +func (a *Adaptor) GetModelList() []string { + return ModelList +} + +func (a *Adaptor) GetChannelName() string { + return "baidu" +} diff --git a/service/aiproxy/relay/adaptor/baidu/constants.go b/service/aiproxy/relay/adaptor/baidu/constants.go new file mode 100644 index 00000000000..f952adc6b90 --- /dev/null +++ b/service/aiproxy/relay/adaptor/baidu/constants.go @@ -0,0 +1,20 @@ +package baidu + +var ModelList = []string{ + "ERNIE-4.0-8K", + "ERNIE-3.5-8K", + "ERNIE-3.5-8K-0205", + "ERNIE-3.5-8K-1222", + "ERNIE-Bot-8K", + "ERNIE-3.5-4K-0205", + "ERNIE-Speed-8K", + "ERNIE-Speed-128K", + "ERNIE-Lite-8K-0922", + "ERNIE-Lite-8K-0308", + "ERNIE-Tiny-8K", + "BLOOMZ-7B", + "Embedding-V1", + "bge-large-zh", + "bge-large-en", + "tao-8k", +} diff --git a/service/aiproxy/relay/adaptor/baidu/main.go b/service/aiproxy/relay/adaptor/baidu/main.go new file mode 100644 index 00000000000..b9ddf8b869b --- /dev/null +++ b/service/aiproxy/relay/adaptor/baidu/main.go @@ -0,0 +1,317 @@ +package baidu + +import ( + "bufio" + "context" + "errors" + "fmt" + "net/http" + "strings" + "sync" + "time" + + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/render" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/client" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/constant" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/flfmc9do2 + +type TokenResponse struct { + AccessToken string `json:"access_token"` + ExpiresIn int `json:"expires_in"` +} + +type Message struct { + Role string `json:"role"` + Content string `json:"content"` +} + +type ChatRequest struct { + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"top_p,omitempty"` + PenaltyScore *float64 `json:"penalty_score,omitempty"` + System string `json:"system,omitempty"` + UserID string `json:"user_id,omitempty"` + Messages []model.Message `json:"messages"` + MaxOutputTokens int `json:"max_output_tokens,omitempty"` + Stream bool `json:"stream,omitempty"` + DisableSearch bool `json:"disable_search,omitempty"` + EnableCitation bool `json:"enable_citation,omitempty"` +} + +type Error struct { + ErrorMsg string `json:"error_msg"` + ErrorCode int `json:"error_code"` +} + +var baiduTokenStore sync.Map + +func ConvertRequest(request *model.GeneralOpenAIRequest) *ChatRequest { + baiduRequest := ChatRequest{ + Messages: request.Messages, + Temperature: request.Temperature, + TopP: request.TopP, + Stream: request.Stream, + DisableSearch: false, + EnableCitation: false, + MaxOutputTokens: request.MaxTokens, + UserID: request.User, + } + // Convert frequency penalty to penalty score range [1.0, 2.0] + if request.FrequencyPenalty != nil { + penaltyScore := *request.FrequencyPenalty + if penaltyScore < -2.0 { + penaltyScore = -2.0 + } + if penaltyScore > 2.0 { + penaltyScore = 2.0 + } + // Map [-2.0, 2.0] to [1.0, 2.0] + mappedScore := (penaltyScore+2.0)/4.0 + 1.0 + baiduRequest.PenaltyScore = &mappedScore + } + + for i, message := range request.Messages { + if message.Role == "system" { + baiduRequest.System = message.StringContent() + request.Messages = append(request.Messages[:i], request.Messages[i+1:]...) + break + } + } + return &baiduRequest +} + +func responseBaidu2OpenAI(response *ChatResponse) *openai.TextResponse { + choice := openai.TextResponseChoice{ + Index: 0, + Message: model.Message{ + Role: "assistant", + Content: response.Result, + }, + FinishReason: "stop", + } + fullTextResponse := openai.TextResponse{ + ID: response.ID, + Object: "chat.completion", + Created: response.Created, + Choices: []openai.TextResponseChoice{choice}, + Usage: response.Usage, + } + return &fullTextResponse +} + +func streamResponseBaidu2OpenAI(baiduResponse *ChatStreamResponse) *openai.ChatCompletionsStreamResponse { + var choice openai.ChatCompletionsStreamResponseChoice + choice.Delta.Content = baiduResponse.Result + if baiduResponse.IsEnd { + choice.FinishReason = &constant.StopFinishReason + } + response := openai.ChatCompletionsStreamResponse{ + ID: baiduResponse.ID, + Object: "chat.completion.chunk", + Created: baiduResponse.Created, + Model: "ernie-bot", + Choices: []openai.ChatCompletionsStreamResponseChoice{choice}, + } + return &response +} + +func ConvertEmbeddingRequest(request *model.GeneralOpenAIRequest) *EmbeddingRequest { + return &EmbeddingRequest{ + Input: request.ParseInput(), + } +} + +func embeddingResponseBaidu2OpenAI(response *EmbeddingResponse) *openai.EmbeddingResponse { + openAIEmbeddingResponse := openai.EmbeddingResponse{ + Object: "list", + Data: make([]openai.EmbeddingResponseItem, 0, len(response.Data)), + Model: "baidu-embedding", + Usage: response.Usage, + } + for _, item := range response.Data { + openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, openai.EmbeddingResponseItem{ + Object: item.Object, + Index: item.Index, + Embedding: item.Embedding, + }) + } + return &openAIEmbeddingResponse +} + +func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var usage model.Usage + scanner := bufio.NewScanner(resp.Body) + scanner.Split(bufio.ScanLines) + + common.SetEventStreamHeaders(c) + + for scanner.Scan() { + data := scanner.Bytes() + if len(data) < 6 || conv.BytesToString(data[:6]) != "data: " { + continue + } + data = data[6:] + + if conv.BytesToString(data) == "[DONE]" { + break + } + + var baiduResponse ChatStreamResponse + err := json.Unmarshal(data, &baiduResponse) + if err != nil { + logger.SysErrorf("error unmarshalling stream response: %s, data: %s", err.Error(), conv.BytesToString(data)) + continue + } + if baiduResponse.Usage.TotalTokens != 0 { + usage.TotalTokens = baiduResponse.Usage.TotalTokens + usage.PromptTokens = baiduResponse.Usage.PromptTokens + usage.CompletionTokens = baiduResponse.Usage.TotalTokens - baiduResponse.Usage.PromptTokens + } + response := streamResponseBaidu2OpenAI(&baiduResponse) + err = render.ObjectData(c, response) + if err != nil { + logger.SysError(err.Error()) + } + } + + if err := scanner.Err(); err != nil { + logger.SysError("error reading stream: " + err.Error()) + } + + render.Done(c) + + return nil, &usage +} + +func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var baiduResponse ChatResponse + err := json.NewDecoder(resp.Body).Decode(&baiduResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + if baiduResponse.ErrorMsg != "" { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: baiduResponse.ErrorMsg, + Type: "baidu_error", + Param: "", + Code: baiduResponse.ErrorCode, + }, + StatusCode: resp.StatusCode, + }, nil + } + fullTextResponse := responseBaidu2OpenAI(&baiduResponse) + fullTextResponse.Model = "ernie-bot" + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil, &fullTextResponse.Usage +} + +func EmbeddingHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var baiduResponse EmbeddingResponse + err := json.NewDecoder(resp.Body).Decode(&baiduResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + if baiduResponse.ErrorMsg != "" { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: baiduResponse.ErrorMsg, + Type: "baidu_error", + Param: "", + Code: baiduResponse.ErrorCode, + }, + StatusCode: resp.StatusCode, + }, nil + } + fullTextResponse := embeddingResponseBaidu2OpenAI(&baiduResponse) + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil, &fullTextResponse.Usage +} + +func GetAccessToken(apiKey string) (string, error) { + if val, ok := baiduTokenStore.Load(apiKey); ok { + var accessToken AccessToken + if accessToken, ok = val.(AccessToken); ok { + // soon this will expire + if time.Now().Add(time.Hour).After(accessToken.ExpiresAt) { + go func() { + _, _ = getBaiduAccessTokenHelper(apiKey) + }() + } + return accessToken.AccessToken, nil + } + } + accessToken, err := getBaiduAccessTokenHelper(apiKey) + if err != nil { + return "", err + } + if accessToken == nil { + return "", errors.New("GetAccessToken return a nil token") + } + return accessToken.AccessToken, nil +} + +func getBaiduAccessTokenHelper(apiKey string) (*AccessToken, error) { + parts := strings.Split(apiKey, "|") + if len(parts) != 2 { + return nil, errors.New("invalid baidu apikey") + } + req, err := http.NewRequestWithContext(context.Background(), + http.MethodPost, + fmt.Sprintf("https://aip.baidubce.com/oauth/2.0/token?grant_type=client_credentials&client_id=%s&client_secret=%s", + parts[0], parts[1]), + nil) + if err != nil { + return nil, err + } + req.Header.Add("Content-Type", "application/json") + req.Header.Add("Accept", "application/json") + res, err := client.ImpatientHTTPClient.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() + + var accessToken AccessToken + err = json.NewDecoder(res.Body).Decode(&accessToken) + if err != nil { + return nil, err + } + if accessToken.Error != "" { + return nil, errors.New(accessToken.Error + ": " + accessToken.ErrorDescription) + } + if accessToken.AccessToken == "" { + return nil, errors.New("getBaiduAccessTokenHelper get empty access token") + } + accessToken.ExpiresAt = time.Now().Add(time.Duration(accessToken.ExpiresIn) * time.Second) + baiduTokenStore.Store(apiKey, accessToken) + return &accessToken, nil +} diff --git a/service/aiproxy/relay/adaptor/baidu/model.go b/service/aiproxy/relay/adaptor/baidu/model.go new file mode 100644 index 00000000000..0f0d52a298c --- /dev/null +++ b/service/aiproxy/relay/adaptor/baidu/model.go @@ -0,0 +1,51 @@ +package baidu + +import ( + "time" + + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type ChatResponse struct { + ID string `json:"id"` + Object string `json:"object"` + Result string `json:"result"` + Error + Usage model.Usage `json:"usage"` + Created int64 `json:"created"` + IsTruncated bool `json:"is_truncated"` + NeedClearHistory bool `json:"need_clear_history"` +} + +type ChatStreamResponse struct { + ChatResponse + SentenceID int `json:"sentence_id"` + IsEnd bool `json:"is_end"` +} + +type EmbeddingRequest struct { + Input []string `json:"input"` +} + +type EmbeddingData struct { + Object string `json:"object"` + Embedding []float64 `json:"embedding"` + Index int `json:"index"` +} + +type EmbeddingResponse struct { + ID string `json:"id"` + Object string `json:"object"` + Data []EmbeddingData `json:"data"` + Error + Usage model.Usage `json:"usage"` + Created int64 `json:"created"` +} + +type AccessToken struct { + ExpiresAt time.Time `json:"-"` + AccessToken string `json:"access_token"` + Error string `json:"error,omitempty"` + ErrorDescription string `json:"error_description,omitempty"` + ExpiresIn int64 `json:"expires_in,omitempty"` +} diff --git a/service/aiproxy/relay/adaptor/cloudflare/adaptor.go b/service/aiproxy/relay/adaptor/cloudflare/adaptor.go new file mode 100644 index 00000000000..694ce44c5c2 --- /dev/null +++ b/service/aiproxy/relay/adaptor/cloudflare/adaptor.go @@ -0,0 +1,108 @@ +package cloudflare + +import ( + "errors" + "fmt" + "io" + "net/http" + "strings" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" +) + +type Adaptor struct { + meta *meta.Meta +} + +// ConvertImageRequest implements adaptor.Adaptor. +func (*Adaptor) ConvertImageRequest(_ *model.ImageRequest) (any, error) { + return nil, errors.New("not implemented") +} + +// ConvertImageRequest implements adaptor.Adaptor. + +func (a *Adaptor) Init(meta *meta.Meta) { + a.meta = meta +} + +// WorkerAI cannot be used across accounts with AIGateWay +// https://developers.cloudflare.com/ai-gateway/providers/workersai/#openai-compatible-endpoints +// https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/workers-ai +func (a *Adaptor) isAIGateWay(baseURL string) bool { + return strings.HasPrefix(baseURL, "https://gateway.ai.cloudflare.com") && strings.HasSuffix(baseURL, "/workers-ai") +} + +func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { + isAIGateWay := a.isAIGateWay(meta.BaseURL) + var urlPrefix string + if isAIGateWay { + urlPrefix = meta.BaseURL + } else { + urlPrefix = fmt.Sprintf("%s/client/v4/accounts/%s/ai", meta.BaseURL, meta.Config.UserID) + } + + switch meta.Mode { + case relaymode.ChatCompletions: + return urlPrefix + "/v1/chat/completions", nil + case relaymode.Embeddings: + return urlPrefix + "/v1/embeddings", nil + default: + if isAIGateWay { + return fmt.Sprintf("%s/%s", urlPrefix, meta.ActualModelName), nil + } + return fmt.Sprintf("%s/run/%s", urlPrefix, meta.ActualModelName), nil + } +} + +func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { + adaptor.SetupCommonRequestHeader(c, req, meta) + req.Header.Set("Authorization", "Bearer "+meta.APIKey) + return nil +} + +func (a *Adaptor) ConvertRequest(_ *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + switch relayMode { + case relaymode.Completions: + return ConvertCompletionsRequest(request), nil + case relaymode.ChatCompletions, relaymode.Embeddings: + return request, nil + default: + return nil, errors.New("not implemented") + } +} + +func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + return adaptor.DoRequestHelper(a, c, meta, requestBody) +} + +func (a *Adaptor) ConvertSTTRequest(*http.Request) (io.ReadCloser, error) { + return nil, nil +} + +func (a *Adaptor) ConvertTTSRequest(*model.TextToSpeechRequest) (any, error) { + return nil, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + err, usage = StreamHandler(c, resp, meta.PromptTokens, meta.ActualModelName) + } else { + err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName) + } + return +} + +func (a *Adaptor) GetModelList() []string { + return ModelList +} + +func (a *Adaptor) GetChannelName() string { + return "cloudflare" +} diff --git a/service/aiproxy/relay/adaptor/cloudflare/constant.go b/service/aiproxy/relay/adaptor/cloudflare/constant.go new file mode 100644 index 00000000000..54052aa6ca1 --- /dev/null +++ b/service/aiproxy/relay/adaptor/cloudflare/constant.go @@ -0,0 +1,37 @@ +package cloudflare + +var ModelList = []string{ + "@cf/meta/llama-3.1-8b-instruct", + "@cf/meta/llama-2-7b-chat-fp16", + "@cf/meta/llama-2-7b-chat-int8", + "@cf/mistral/mistral-7b-instruct-v0.1", + "@hf/thebloke/deepseek-coder-6.7b-base-awq", + "@hf/thebloke/deepseek-coder-6.7b-instruct-awq", + "@cf/deepseek-ai/deepseek-math-7b-base", + "@cf/deepseek-ai/deepseek-math-7b-instruct", + "@cf/thebloke/discolm-german-7b-v1-awq", + "@cf/tiiuae/falcon-7b-instruct", + "@cf/google/gemma-2b-it-lora", + "@hf/google/gemma-7b-it", + "@cf/google/gemma-7b-it-lora", + "@hf/nousresearch/hermes-2-pro-mistral-7b", + "@hf/thebloke/llama-2-13b-chat-awq", + "@cf/meta-llama/llama-2-7b-chat-hf-lora", + "@cf/meta/llama-3-8b-instruct", + "@hf/thebloke/llamaguard-7b-awq", + "@hf/thebloke/mistral-7b-instruct-v0.1-awq", + "@hf/mistralai/mistral-7b-instruct-v0.2", + "@cf/mistral/mistral-7b-instruct-v0.2-lora", + "@hf/thebloke/neural-chat-7b-v3-1-awq", + "@cf/openchat/openchat-3.5-0106", + "@hf/thebloke/openhermes-2.5-mistral-7b-awq", + "@cf/microsoft/phi-2", + "@cf/qwen/qwen1.5-0.5b-chat", + "@cf/qwen/qwen1.5-1.8b-chat", + "@cf/qwen/qwen1.5-14b-chat-awq", + "@cf/qwen/qwen1.5-7b-chat-awq", + "@cf/defog/sqlcoder-7b-2", + "@hf/nexusflow/starling-lm-7b-beta", + "@cf/tinyllama/tinyllama-1.1b-chat-v1.0", + "@hf/thebloke/zephyr-7b-beta-awq", +} diff --git a/service/aiproxy/relay/adaptor/cloudflare/main.go b/service/aiproxy/relay/adaptor/cloudflare/main.go new file mode 100644 index 00000000000..87679da7489 --- /dev/null +++ b/service/aiproxy/relay/adaptor/cloudflare/main.go @@ -0,0 +1,106 @@ +package cloudflare + +import ( + "bufio" + "net/http" + + json "github.com/json-iterator/go" + + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/common/render" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +func ConvertCompletionsRequest(textRequest *model.GeneralOpenAIRequest) *Request { + p, _ := textRequest.Prompt.(string) + return &Request{ + Prompt: p, + MaxTokens: textRequest.MaxTokens, + Stream: textRequest.Stream, + Temperature: textRequest.Temperature, + } +} + +func StreamHandler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + scanner := bufio.NewScanner(resp.Body) + scanner.Split(bufio.ScanLines) + + common.SetEventStreamHeaders(c) + id := helper.GetResponseID(c) + responseModel := c.GetString(ctxkey.OriginalModel) + var responseText string + + for scanner.Scan() { + data := scanner.Bytes() + if len(data) < 6 || conv.BytesToString(data[:6]) != "data: " { + continue + } + data = data[6:] + + if conv.BytesToString(data) == "[DONE]" { + break + } + + var response openai.ChatCompletionsStreamResponse + err := json.Unmarshal(data, &response) + if err != nil { + logger.SysErrorf("error unmarshalling stream response: %s, data: %s", err.Error(), conv.BytesToString(data)) + continue + } + for _, v := range response.Choices { + v.Delta.Role = "assistant" + responseText += v.Delta.StringContent() + } + response.ID = id + response.Model = modelName + err = render.ObjectData(c, response) + if err != nil { + logger.SysError(err.Error()) + } + } + + if err := scanner.Err(); err != nil { + logger.SysError("error reading stream: " + err.Error()) + } + + render.Done(c) + + usage := openai.ResponseText2Usage(responseText, responseModel, promptTokens) + return nil, usage +} + +func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var response openai.TextResponse + err := json.NewDecoder(resp.Body).Decode(&response) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + + response.Model = modelName + var responseText string + for _, v := range response.Choices { + responseText += v.Message.Content.(string) + } + usage := openai.ResponseText2Usage(responseText, modelName, promptTokens) + response.Usage = *usage + response.ID = helper.GetResponseID(c) + jsonResponse, err := json.Marshal(response) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil, usage +} diff --git a/service/aiproxy/relay/adaptor/cloudflare/model.go b/service/aiproxy/relay/adaptor/cloudflare/model.go new file mode 100644 index 00000000000..8d1b480192f --- /dev/null +++ b/service/aiproxy/relay/adaptor/cloudflare/model.go @@ -0,0 +1,13 @@ +package cloudflare + +import "github.com/labring/sealos/service/aiproxy/relay/model" + +type Request struct { + Temperature *float64 `json:"temperature,omitempty"` + Lora string `json:"lora,omitempty"` + Prompt string `json:"prompt,omitempty"` + Messages []model.Message `json:"messages,omitempty"` + MaxTokens int `json:"max_tokens,omitempty"` + Raw bool `json:"raw,omitempty"` + Stream bool `json:"stream,omitempty"` +} diff --git a/service/aiproxy/relay/adaptor/cohere/adaptor.go b/service/aiproxy/relay/adaptor/cohere/adaptor.go new file mode 100644 index 00000000000..525b39ab387 --- /dev/null +++ b/service/aiproxy/relay/adaptor/cohere/adaptor.go @@ -0,0 +1,68 @@ +package cohere + +import ( + "errors" + "io" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type Adaptor struct{} + +// ConvertImageRequest implements adaptor.Adaptor. +func (*Adaptor) ConvertImageRequest(_ *model.ImageRequest) (any, error) { + return nil, errors.New("not implemented") +} + +func (a *Adaptor) Init(_ *meta.Meta) { +} + +func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { + return meta.BaseURL + "/v1/chat", nil +} + +func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { + adaptor.SetupCommonRequestHeader(c, req, meta) + req.Header.Set("Authorization", "Bearer "+meta.APIKey) + return nil +} + +func (a *Adaptor) ConvertRequest(_ *gin.Context, _ int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + return ConvertRequest(request), nil +} + +func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + return adaptor.DoRequestHelper(a, c, meta, requestBody) +} + +func (a *Adaptor) ConvertSTTRequest(*http.Request) (io.ReadCloser, error) { + return nil, nil +} + +func (a *Adaptor) ConvertTTSRequest(*model.TextToSpeechRequest) (any, error) { + return nil, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + err, usage = StreamHandler(c, resp) + } else { + err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName) + } + return +} + +func (a *Adaptor) GetModelList() []string { + return ModelList +} + +func (a *Adaptor) GetChannelName() string { + return "Cohere" +} diff --git a/service/aiproxy/relay/adaptor/cohere/constant.go b/service/aiproxy/relay/adaptor/cohere/constant.go new file mode 100644 index 00000000000..9e70652ccb9 --- /dev/null +++ b/service/aiproxy/relay/adaptor/cohere/constant.go @@ -0,0 +1,14 @@ +package cohere + +var ModelList = []string{ + "command", "command-nightly", + "command-light", "command-light-nightly", + "command-r", "command-r-plus", +} + +func init() { + num := len(ModelList) + for i := 0; i < num; i++ { + ModelList = append(ModelList, ModelList[i]+"-internet") + } +} diff --git a/service/aiproxy/relay/adaptor/cohere/main.go b/service/aiproxy/relay/adaptor/cohere/main.go new file mode 100644 index 00000000000..d40e0056e0a --- /dev/null +++ b/service/aiproxy/relay/adaptor/cohere/main.go @@ -0,0 +1,219 @@ +package cohere + +import ( + "bufio" + "fmt" + "net/http" + "strings" + + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/render" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +var WebSearchConnector = Connector{ID: "web-search"} + +func stopReasonCohere2OpenAI(reason *string) string { + if reason == nil { + return "" + } + switch *reason { + case "COMPLETE": + return "stop" + default: + return *reason + } +} + +func ConvertRequest(textRequest *model.GeneralOpenAIRequest) *Request { + cohereRequest := Request{ + Model: textRequest.Model, + Message: "", + MaxTokens: textRequest.MaxTokens, + Temperature: textRequest.Temperature, + P: textRequest.TopP, + K: textRequest.TopK, + Stream: textRequest.Stream, + FrequencyPenalty: textRequest.FrequencyPenalty, + PresencePenalty: textRequest.PresencePenalty, + Seed: int(textRequest.Seed), + } + if cohereRequest.Model == "" { + cohereRequest.Model = "command-r" + } + if strings.HasSuffix(cohereRequest.Model, "-internet") { + cohereRequest.Model = strings.TrimSuffix(cohereRequest.Model, "-internet") + cohereRequest.Connectors = append(cohereRequest.Connectors, WebSearchConnector) + } + for _, message := range textRequest.Messages { + if message.Role == "user" { + cohereRequest.Message = message.Content.(string) + } else { + var role string + switch message.Role { + case "assistant": + role = "CHATBOT" + case "system": + role = "SYSTEM" + default: + role = "USER" + } + cohereRequest.ChatHistory = append(cohereRequest.ChatHistory, ChatMessage{ + Role: role, + Message: message.Content.(string), + }) + } + } + return &cohereRequest +} + +func StreamResponseCohere2OpenAI(cohereResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) { + var response *Response + var responseText string + var finishReason string + + switch cohereResponse.EventType { + case "stream-start": + return nil, nil + case "text-generation": + responseText += cohereResponse.Text + case "stream-end": + usage := cohereResponse.Response.Meta.Tokens + response = &Response{ + Meta: Meta{ + Tokens: Usage{ + InputTokens: usage.InputTokens, + OutputTokens: usage.OutputTokens, + }, + }, + } + finishReason = *cohereResponse.Response.FinishReason + default: + return nil, nil + } + + var choice openai.ChatCompletionsStreamResponseChoice + choice.Delta.Content = responseText + choice.Delta.Role = "assistant" + if finishReason != "" { + choice.FinishReason = &finishReason + } + var openaiResponse openai.ChatCompletionsStreamResponse + openaiResponse.Object = "chat.completion.chunk" + openaiResponse.Choices = []openai.ChatCompletionsStreamResponseChoice{choice} + return &openaiResponse, response +} + +func ResponseCohere2OpenAI(cohereResponse *Response) *openai.TextResponse { + choice := openai.TextResponseChoice{ + Index: 0, + Message: model.Message{ + Role: "assistant", + Content: cohereResponse.Text, + Name: nil, + }, + FinishReason: stopReasonCohere2OpenAI(cohereResponse.FinishReason), + } + fullTextResponse := openai.TextResponse{ + ID: "chatcmpl-" + cohereResponse.ResponseID, + Model: "model", + Object: "chat.completion", + Created: helper.GetTimestamp(), + Choices: []openai.TextResponseChoice{choice}, + } + return &fullTextResponse +} + +func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + createdTime := helper.GetTimestamp() + scanner := bufio.NewScanner(resp.Body) + scanner.Split(bufio.ScanLines) + + common.SetEventStreamHeaders(c) + var usage model.Usage + + for scanner.Scan() { + data := scanner.Text() + data = strings.TrimSuffix(data, "\r") + + var cohereResponse StreamResponse + err := json.Unmarshal(conv.StringToBytes(data), &cohereResponse) + if err != nil { + logger.SysError("error unmarshalling stream response: " + err.Error()) + continue + } + + response, meta := StreamResponseCohere2OpenAI(&cohereResponse) + if meta != nil { + usage.PromptTokens += meta.Meta.Tokens.InputTokens + usage.CompletionTokens += meta.Meta.Tokens.OutputTokens + continue + } + if response == nil { + continue + } + + response.ID = fmt.Sprintf("chatcmpl-%d", createdTime) + response.Model = c.GetString("original_model") + response.Created = createdTime + + err = render.ObjectData(c, response) + if err != nil { + logger.SysError(err.Error()) + } + } + + if err := scanner.Err(); err != nil { + logger.SysError("error reading stream: " + err.Error()) + } + + render.Done(c) + + return nil, &usage +} + +func Handler(c *gin.Context, resp *http.Response, _ int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var cohereResponse Response + err := json.NewDecoder(resp.Body).Decode(&cohereResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + if cohereResponse.ResponseID == "" { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: cohereResponse.Message, + Type: cohereResponse.Message, + Param: "", + Code: resp.StatusCode, + }, + StatusCode: resp.StatusCode, + }, nil + } + fullTextResponse := ResponseCohere2OpenAI(&cohereResponse) + fullTextResponse.Model = modelName + usage := model.Usage{ + PromptTokens: cohereResponse.Meta.Tokens.InputTokens, + CompletionTokens: cohereResponse.Meta.Tokens.OutputTokens, + TotalTokens: cohereResponse.Meta.Tokens.InputTokens + cohereResponse.Meta.Tokens.OutputTokens, + } + fullTextResponse.Usage = usage + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil, &usage +} diff --git a/service/aiproxy/relay/adaptor/cohere/model.go b/service/aiproxy/relay/adaptor/cohere/model.go new file mode 100644 index 00000000000..64e1ccc8f5e --- /dev/null +++ b/service/aiproxy/relay/adaptor/cohere/model.go @@ -0,0 +1,147 @@ +package cohere + +type Request struct { + P *float64 `json:"p,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + PresencePenalty *float64 `json:"presence_penalty,omitempty"` + FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"` + Model string `json:"model,omitempty"` + Message string `json:"message" required:"true"` + Preamble string `json:"preamble,omitempty"` + PromptTruncation string `json:"prompt_truncation,omitempty"` + ConversationID string `json:"conversation_id,omitempty"` + StopSequences []string `json:"stop_sequences,omitempty"` + Tools []Tool `json:"tools,omitempty"` + ToolResults []ToolResult `json:"tool_results,omitempty"` + Documents []Document `json:"documents,omitempty"` + Connectors []Connector `json:"connectors,omitempty"` + ChatHistory []ChatMessage `json:"chat_history,omitempty"` + K int `json:"k,omitempty"` + MaxInputTokens int `json:"max_input_tokens,omitempty"` + Seed int `json:"seed,omitempty"` + MaxTokens int `json:"max_tokens,omitempty"` + Stream bool `json:"stream,omitempty"` +} + +type ChatMessage struct { + Role string `json:"role" required:"true"` + Message string `json:"message" required:"true"` +} + +type Tool struct { + ParameterDefinitions map[string]ParameterSpec `json:"parameter_definitions"` + Name string `json:"name" required:"true"` + Description string `json:"description" required:"true"` +} + +type ParameterSpec struct { + Description string `json:"description"` + Type string `json:"type" required:"true"` + Required bool `json:"required"` +} + +type ToolResult struct { + Call ToolCall `json:"call"` + Outputs []map[string]interface{} `json:"outputs"` +} + +type ToolCall struct { + Parameters map[string]interface{} `json:"parameters" required:"true"` + Name string `json:"name" required:"true"` +} + +type StreamResponse struct { + Response *Response `json:"response,omitempty"` + EventType string `json:"event_type"` + GenerationID string `json:"generation_id,omitempty"` + Text string `json:"text,omitempty"` + FinishReason string `json:"finish_reason,omitempty"` + SearchQueries []*SearchQuery `json:"search_queries,omitempty"` + SearchResults []*SearchResult `json:"search_results,omitempty"` + Documents []*Document `json:"documents,omitempty"` + Citations []*Citation `json:"citations,omitempty"` + IsFinished bool `json:"is_finished"` +} + +type SearchQuery struct { + Text string `json:"text"` + GenerationID string `json:"generation_id"` +} + +type SearchResult struct { + SearchQuery *SearchQuery `json:"search_query"` + Connector *Connector `json:"connector"` + DocumentIDs []string `json:"document_ids"` +} + +type Connector struct { + ID string `json:"id"` +} + +type Document struct { + ID string `json:"id"` + Snippet string `json:"snippet"` + Timestamp string `json:"timestamp"` + Title string `json:"title"` + URL string `json:"url"` +} + +type Citation struct { + Text string `json:"text"` + DocumentIDs []string `json:"document_ids"` + Start int `json:"start"` + End int `json:"end"` +} + +type Response struct { + FinishReason *string `json:"finish_reason"` + ResponseID string `json:"response_id"` + Text string `json:"text"` + GenerationID string `json:"generation_id"` + Message string `json:"message"` + ChatHistory []*Message `json:"chat_history"` + Citations []*Citation `json:"citations"` + Documents []*Document `json:"documents"` + SearchResults []*SearchResult `json:"search_results"` + SearchQueries []*SearchQuery `json:"search_queries"` + Meta Meta `json:"meta"` +} + +type Message struct { + Role string `json:"role"` + Message string `json:"message"` +} + +type Version struct { + Version string `json:"version"` +} + +type Units struct { + InputTokens int `json:"input_tokens"` + OutputTokens int `json:"output_tokens"` +} + +type ChatEntry struct { + Role string `json:"role"` + Message string `json:"message"` +} + +type Meta struct { + APIVersion APIVersion `json:"api_version"` + BilledUnits BilledUnits `json:"billed_units"` + Tokens Usage `json:"tokens"` +} + +type APIVersion struct { + Version string `json:"version"` +} + +type BilledUnits struct { + InputTokens int `json:"input_tokens"` + OutputTokens int `json:"output_tokens"` +} + +type Usage struct { + InputTokens int `json:"input_tokens"` + OutputTokens int `json:"output_tokens"` +} diff --git a/service/aiproxy/relay/adaptor/common.go b/service/aiproxy/relay/adaptor/common.go new file mode 100644 index 00000000000..d4d369bb2c1 --- /dev/null +++ b/service/aiproxy/relay/adaptor/common.go @@ -0,0 +1,47 @@ +package adaptor + +import ( + "fmt" + "io" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/client" + "github.com/labring/sealos/service/aiproxy/relay/meta" +) + +func SetupCommonRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) { + req.Header.Set("Content-Type", c.Request.Header.Get("Content-Type")) + req.Header.Set("Accept", c.Request.Header.Get("Accept")) + if meta.IsStream && c.Request.Header.Get("Accept") == "" { + req.Header.Set("Accept", "text/event-stream") + } +} + +func DoRequestHelper(a Adaptor, c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + fullRequestURL, err := a.GetRequestURL(meta) + if err != nil { + return nil, fmt.Errorf("get request url failed: %w", err) + } + req, err := http.NewRequestWithContext(c.Request.Context(), c.Request.Method, fullRequestURL, requestBody) + if err != nil { + return nil, fmt.Errorf("new request failed: %w", err) + } + err = a.SetupRequestHeader(c, req, meta) + if err != nil { + return nil, fmt.Errorf("setup request header failed: %w", err) + } + resp, err := DoRequest(c, req) + if err != nil { + return nil, fmt.Errorf("do request failed: %w", err) + } + return resp, nil +} + +func DoRequest(_ *gin.Context, req *http.Request) (*http.Response, error) { + resp, err := client.HTTPClient.Do(req) + if err != nil { + return nil, err + } + return resp, nil +} diff --git a/service/aiproxy/relay/adaptor/coze/adaptor.go b/service/aiproxy/relay/adaptor/coze/adaptor.go new file mode 100644 index 00000000000..4bb92b3285d --- /dev/null +++ b/service/aiproxy/relay/adaptor/coze/adaptor.go @@ -0,0 +1,83 @@ +package coze + +import ( + "errors" + "io" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type Adaptor struct { + meta *meta.Meta +} + +func (a *Adaptor) Init(meta *meta.Meta) { + a.meta = meta +} + +func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { + return meta.BaseURL + "/open_api/v2/chat", nil +} + +func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { + adaptor.SetupCommonRequestHeader(c, req, meta) + req.Header.Set("Authorization", "Bearer "+meta.APIKey) + return nil +} + +func (a *Adaptor) ConvertRequest(_ *gin.Context, _ int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + request.User = a.meta.Config.UserID + return ConvertRequest(request), nil +} + +func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + return request, nil +} + +func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + return adaptor.DoRequestHelper(a, c, meta, requestBody) +} + +func (a *Adaptor) ConvertSTTRequest(*http.Request) (io.ReadCloser, error) { + return nil, nil +} + +func (a *Adaptor) ConvertTTSRequest(*model.TextToSpeechRequest) (any, error) { + return nil, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + var responseText *string + if meta.IsStream { + err, responseText = StreamHandler(c, resp) + } else { + err, responseText = Handler(c, resp, meta.PromptTokens, meta.ActualModelName) + } + if responseText != nil { + usage = openai.ResponseText2Usage(*responseText, meta.ActualModelName, meta.PromptTokens) + } else { + usage = &model.Usage{} + } + usage.PromptTokens = meta.PromptTokens + usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens + return +} + +func (a *Adaptor) GetModelList() []string { + return ModelList +} + +func (a *Adaptor) GetChannelName() string { + return "coze" +} diff --git a/service/aiproxy/relay/adaptor/coze/constant/contenttype/define.go b/service/aiproxy/relay/adaptor/coze/constant/contenttype/define.go new file mode 100644 index 00000000000..69c876bc4c4 --- /dev/null +++ b/service/aiproxy/relay/adaptor/coze/constant/contenttype/define.go @@ -0,0 +1,5 @@ +package contenttype + +const ( + Text = "text" +) diff --git a/service/aiproxy/relay/adaptor/coze/constant/event/define.go b/service/aiproxy/relay/adaptor/coze/constant/event/define.go new file mode 100644 index 00000000000..c03e8c173ec --- /dev/null +++ b/service/aiproxy/relay/adaptor/coze/constant/event/define.go @@ -0,0 +1,7 @@ +package event + +const ( + Message = "message" + Done = "done" + Error = "error" +) diff --git a/service/aiproxy/relay/adaptor/coze/constant/messagetype/define.go b/service/aiproxy/relay/adaptor/coze/constant/messagetype/define.go new file mode 100644 index 00000000000..6c1c25db4c8 --- /dev/null +++ b/service/aiproxy/relay/adaptor/coze/constant/messagetype/define.go @@ -0,0 +1,6 @@ +package messagetype + +const ( + Answer = "answer" + FollowUp = "follow_up" +) diff --git a/service/aiproxy/relay/adaptor/coze/constants.go b/service/aiproxy/relay/adaptor/coze/constants.go new file mode 100644 index 00000000000..d20fd875804 --- /dev/null +++ b/service/aiproxy/relay/adaptor/coze/constants.go @@ -0,0 +1,3 @@ +package coze + +var ModelList = []string{} diff --git a/service/aiproxy/relay/adaptor/coze/main.go b/service/aiproxy/relay/adaptor/coze/main.go new file mode 100644 index 00000000000..da8e57e222d --- /dev/null +++ b/service/aiproxy/relay/adaptor/coze/main.go @@ -0,0 +1,195 @@ +package coze + +import ( + "bufio" + "net/http" + "strings" + + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/render" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/coze/constant/messagetype" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +// https://www.coze.com/open + +func stopReasonCoze2OpenAI(reason *string) string { + if reason == nil { + return "" + } + switch *reason { + case "end_turn": + return "stop" + case "stop_sequence": + return "stop" + case "max_tokens": + return "length" + default: + return *reason + } +} + +func ConvertRequest(textRequest *model.GeneralOpenAIRequest) *Request { + cozeRequest := Request{ + Stream: textRequest.Stream, + User: textRequest.User, + BotID: strings.TrimPrefix(textRequest.Model, "bot-"), + } + for i, message := range textRequest.Messages { + if i == len(textRequest.Messages)-1 { + cozeRequest.Query = message.StringContent() + continue + } + cozeMessage := Message{ + Role: message.Role, + Content: message.StringContent(), + } + cozeRequest.ChatHistory = append(cozeRequest.ChatHistory, cozeMessage) + } + return &cozeRequest +} + +func StreamResponseCoze2OpenAI(cozeResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) { + var response *Response + var stopReason string + var choice openai.ChatCompletionsStreamResponseChoice + + if cozeResponse.Message != nil { + if cozeResponse.Message.Type != messagetype.Answer { + return nil, nil + } + choice.Delta.Content = cozeResponse.Message.Content + } + choice.Delta.Role = "assistant" + finishReason := stopReasonCoze2OpenAI(&stopReason) + if finishReason != "null" { + choice.FinishReason = &finishReason + } + var openaiResponse openai.ChatCompletionsStreamResponse + openaiResponse.Object = "chat.completion.chunk" + openaiResponse.Choices = []openai.ChatCompletionsStreamResponseChoice{choice} + openaiResponse.ID = cozeResponse.ConversationID + return &openaiResponse, response +} + +func ResponseCoze2OpenAI(cozeResponse *Response) *openai.TextResponse { + var responseText string + for _, message := range cozeResponse.Messages { + if message.Type == messagetype.Answer { + responseText = message.Content + break + } + } + choice := openai.TextResponseChoice{ + Index: 0, + Message: model.Message{ + Role: "assistant", + Content: responseText, + Name: nil, + }, + FinishReason: "stop", + } + fullTextResponse := openai.TextResponse{ + ID: "chatcmpl-" + cozeResponse.ConversationID, + Model: "coze-bot", + Object: "chat.completion", + Created: helper.GetTimestamp(), + Choices: []openai.TextResponseChoice{choice}, + } + return &fullTextResponse +} + +func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *string) { + defer resp.Body.Close() + + var responseText string + createdTime := helper.GetTimestamp() + scanner := bufio.NewScanner(resp.Body) + scanner.Split(bufio.ScanLines) + + common.SetEventStreamHeaders(c) + var modelName string + + for scanner.Scan() { + data := scanner.Bytes() + if len(data) < 6 || conv.BytesToString(data[:6]) != "data: " { + continue + } + data = data[6:] + + if conv.BytesToString(data) == "[DONE]" { + break + } + + var cozeResponse StreamResponse + err := json.Unmarshal(data, &cozeResponse) + if err != nil { + logger.SysErrorf("error unmarshalling stream response: %s, data: %s", err.Error(), conv.BytesToString(data)) + continue + } + + response, _ := StreamResponseCoze2OpenAI(&cozeResponse) + if response == nil { + continue + } + + for _, choice := range response.Choices { + responseText += conv.AsString(choice.Delta.Content) + } + response.Model = modelName + response.Created = createdTime + + err = render.ObjectData(c, response) + if err != nil { + logger.SysError(err.Error()) + } + } + + if err := scanner.Err(); err != nil { + logger.SysError("error reading stream: " + err.Error()) + } + + render.Done(c) + + return nil, &responseText +} + +func Handler(c *gin.Context, resp *http.Response, _ int, modelName string) (*model.ErrorWithStatusCode, *string) { + defer resp.Body.Close() + + var cozeResponse Response + err := json.NewDecoder(resp.Body).Decode(&cozeResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + if cozeResponse.Code != 0 { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: cozeResponse.Msg, + Code: cozeResponse.Code, + }, + StatusCode: resp.StatusCode, + }, nil + } + fullTextResponse := ResponseCoze2OpenAI(&cozeResponse) + fullTextResponse.Model = modelName + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + var responseText string + if len(fullTextResponse.Choices) > 0 { + responseText = fullTextResponse.Choices[0].Message.StringContent() + } + return nil, &responseText +} diff --git a/service/aiproxy/relay/adaptor/coze/model.go b/service/aiproxy/relay/adaptor/coze/model.go new file mode 100644 index 00000000000..a43adf8902f --- /dev/null +++ b/service/aiproxy/relay/adaptor/coze/model.go @@ -0,0 +1,38 @@ +package coze + +type Message struct { + Role string `json:"role"` + Type string `json:"type"` + Content string `json:"content"` + ContentType string `json:"content_type"` +} + +type ErrorInformation struct { + Msg string `json:"msg"` + Code int `json:"code"` +} + +type Request struct { + ConversationID string `json:"conversation_id,omitempty"` + BotID string `json:"bot_id"` + User string `json:"user"` + Query string `json:"query"` + ChatHistory []Message `json:"chat_history,omitempty"` + Stream bool `json:"stream"` +} + +type Response struct { + ConversationID string `json:"conversation_id,omitempty"` + Msg string `json:"msg,omitempty"` + Messages []Message `json:"messages,omitempty"` + Code int `json:"code,omitempty"` +} + +type StreamResponse struct { + Message *Message `json:"message,omitempty"` + ErrorInformation *ErrorInformation `json:"error_information,omitempty"` + Event string `json:"event,omitempty"` + ConversationID string `json:"conversation_id,omitempty"` + Index int `json:"index,omitempty"` + IsFinish bool `json:"is_finish,omitempty"` +} diff --git a/service/aiproxy/relay/adaptor/deepl/adaptor.go b/service/aiproxy/relay/adaptor/deepl/adaptor.go new file mode 100644 index 00000000000..3973d8bb3c6 --- /dev/null +++ b/service/aiproxy/relay/adaptor/deepl/adaptor.go @@ -0,0 +1,81 @@ +package deepl + +import ( + "errors" + "io" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type Adaptor struct { + meta *meta.Meta + promptText string +} + +func (a *Adaptor) Init(meta *meta.Meta) { + a.meta = meta +} + +func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { + return meta.BaseURL + "/v2/translate", nil +} + +func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { + adaptor.SetupCommonRequestHeader(c, req, meta) + req.Header.Set("Authorization", "DeepL-Auth-Key "+meta.APIKey) + return nil +} + +func (a *Adaptor) ConvertRequest(_ *gin.Context, _ int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + convertedRequest, text := ConvertRequest(request) + a.promptText = text + return convertedRequest, nil +} + +func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + return request, nil +} + +func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + return adaptor.DoRequestHelper(a, c, meta, requestBody) +} + +func (a *Adaptor) ConvertSTTRequest(*http.Request) (io.ReadCloser, error) { + return nil, nil +} + +func (a *Adaptor) ConvertTTSRequest(*model.TextToSpeechRequest) (any, error) { + return nil, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + err = StreamHandler(c, resp, meta.ActualModelName) + } else { + err = Handler(c, resp, meta.ActualModelName) + } + promptTokens := len(a.promptText) + usage = &model.Usage{ + PromptTokens: promptTokens, + TotalTokens: promptTokens, + } + return +} + +func (a *Adaptor) GetModelList() []string { + return ModelList +} + +func (a *Adaptor) GetChannelName() string { + return "deepl" +} diff --git a/service/aiproxy/relay/adaptor/deepl/constants.go b/service/aiproxy/relay/adaptor/deepl/constants.go new file mode 100644 index 00000000000..6a4f25454ab --- /dev/null +++ b/service/aiproxy/relay/adaptor/deepl/constants.go @@ -0,0 +1,9 @@ +package deepl + +// https://developers.deepl.com/docs/api-reference/glossaries + +var ModelList = []string{ + "deepl-zh", + "deepl-en", + "deepl-ja", +} diff --git a/service/aiproxy/relay/adaptor/deepl/helper.go b/service/aiproxy/relay/adaptor/deepl/helper.go new file mode 100644 index 00000000000..6d3a914b922 --- /dev/null +++ b/service/aiproxy/relay/adaptor/deepl/helper.go @@ -0,0 +1,11 @@ +package deepl + +import "strings" + +func parseLangFromModelName(modelName string) string { + parts := strings.Split(modelName, "-") + if len(parts) == 1 { + return "ZH" + } + return parts[1] +} diff --git a/service/aiproxy/relay/adaptor/deepl/main.go b/service/aiproxy/relay/adaptor/deepl/main.go new file mode 100644 index 00000000000..2ae86e13f97 --- /dev/null +++ b/service/aiproxy/relay/adaptor/deepl/main.go @@ -0,0 +1,117 @@ +package deepl + +import ( + "net/http" + + "github.com/gin-gonic/gin" + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/render" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/constant" + "github.com/labring/sealos/service/aiproxy/relay/constant/finishreason" + "github.com/labring/sealos/service/aiproxy/relay/constant/role" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +// https://developers.deepl.com/docs/getting-started/your-first-api-request + +func ConvertRequest(textRequest *model.GeneralOpenAIRequest) (*Request, string) { + var text string + if len(textRequest.Messages) != 0 { + text = textRequest.Messages[len(textRequest.Messages)-1].StringContent() + } + deeplRequest := Request{ + TargetLang: parseLangFromModelName(textRequest.Model), + Text: []string{text}, + } + return &deeplRequest, text +} + +func StreamResponseDeepL2OpenAI(deeplResponse *Response) *openai.ChatCompletionsStreamResponse { + var choice openai.ChatCompletionsStreamResponseChoice + if len(deeplResponse.Translations) != 0 { + choice.Delta.Content = deeplResponse.Translations[0].Text + } + choice.Delta.Role = role.Assistant + choice.FinishReason = &constant.StopFinishReason + openaiResponse := openai.ChatCompletionsStreamResponse{ + Object: constant.StreamObject, + Created: helper.GetTimestamp(), + Choices: []openai.ChatCompletionsStreamResponseChoice{choice}, + } + return &openaiResponse +} + +func ResponseDeepL2OpenAI(deeplResponse *Response) *openai.TextResponse { + var responseText string + if len(deeplResponse.Translations) != 0 { + responseText = deeplResponse.Translations[0].Text + } + choice := openai.TextResponseChoice{ + Index: 0, + Message: model.Message{ + Role: role.Assistant, + Content: responseText, + Name: nil, + }, + FinishReason: finishreason.Stop, + } + fullTextResponse := openai.TextResponse{ + Object: constant.NonStreamObject, + Created: helper.GetTimestamp(), + Choices: []openai.TextResponseChoice{choice}, + } + return &fullTextResponse +} + +func StreamHandler(c *gin.Context, resp *http.Response, modelName string) *model.ErrorWithStatusCode { + defer resp.Body.Close() + + var deeplResponse Response + err := json.NewDecoder(resp.Body).Decode(&deeplResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError) + } + fullTextResponse := StreamResponseDeepL2OpenAI(&deeplResponse) + fullTextResponse.Model = modelName + fullTextResponse.ID = helper.GetResponseID(c) + common.SetEventStreamHeaders(c) + err = render.ObjectData(c, fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "render_response_body_failed", http.StatusInternalServerError) + } + render.Done(c) + return nil +} + +func Handler(c *gin.Context, resp *http.Response, modelName string) *model.ErrorWithStatusCode { + defer resp.Body.Close() + + var deeplResponse Response + err := json.NewDecoder(resp.Body).Decode(&deeplResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError) + } + if deeplResponse.Message != "" { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: deeplResponse.Message, + Code: "deepl_error", + }, + StatusCode: resp.StatusCode, + } + } + fullTextResponse := ResponseDeepL2OpenAI(&deeplResponse) + fullTextResponse.Model = modelName + fullTextResponse.ID = helper.GetResponseID(c) + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError) + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil +} diff --git a/service/aiproxy/relay/adaptor/deepl/model.go b/service/aiproxy/relay/adaptor/deepl/model.go new file mode 100644 index 00000000000..4f3a3e01d58 --- /dev/null +++ b/service/aiproxy/relay/adaptor/deepl/model.go @@ -0,0 +1,16 @@ +package deepl + +type Request struct { + TargetLang string `json:"target_lang"` + Text []string `json:"text"` +} + +type Translation struct { + DetectedSourceLanguage string `json:"detected_source_language,omitempty"` + Text string `json:"text,omitempty"` +} + +type Response struct { + Message string `json:"message,omitempty"` + Translations []Translation `json:"translations,omitempty"` +} diff --git a/service/aiproxy/relay/adaptor/deepseek/constants.go b/service/aiproxy/relay/adaptor/deepseek/constants.go new file mode 100644 index 00000000000..ad840bc2cc0 --- /dev/null +++ b/service/aiproxy/relay/adaptor/deepseek/constants.go @@ -0,0 +1,6 @@ +package deepseek + +var ModelList = []string{ + "deepseek-chat", + "deepseek-coder", +} diff --git a/service/aiproxy/relay/adaptor/doubao/constants.go b/service/aiproxy/relay/adaptor/doubao/constants.go new file mode 100644 index 00000000000..dbe819dd511 --- /dev/null +++ b/service/aiproxy/relay/adaptor/doubao/constants.go @@ -0,0 +1,13 @@ +package doubao + +// https://console.volcengine.com/ark/region:ark+cn-beijing/model + +var ModelList = []string{ + "Doubao-pro-128k", + "Doubao-pro-32k", + "Doubao-pro-4k", + "Doubao-lite-128k", + "Doubao-lite-32k", + "Doubao-lite-4k", + "Doubao-embedding", +} diff --git a/service/aiproxy/relay/adaptor/doubao/main.go b/service/aiproxy/relay/adaptor/doubao/main.go new file mode 100644 index 00000000000..9e3cb858574 --- /dev/null +++ b/service/aiproxy/relay/adaptor/doubao/main.go @@ -0,0 +1,23 @@ +package doubao + +import ( + "fmt" + "strings" + + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" +) + +func GetRequestURL(meta *meta.Meta) (string, error) { + switch meta.Mode { + case relaymode.ChatCompletions: + if strings.HasPrefix(meta.ActualModelName, "bot-") { + return meta.BaseURL + "/api/v3/bots/chat/completions", nil + } + return meta.BaseURL + "/api/v3/chat/completions", nil + case relaymode.Embeddings: + return meta.BaseURL + "/api/v3/embeddings", nil + default: + return "", fmt.Errorf("unsupported relay mode %d for doubao", meta.Mode) + } +} diff --git a/service/aiproxy/relay/adaptor/gemini/adaptor.go b/service/aiproxy/relay/adaptor/gemini/adaptor.go new file mode 100644 index 00000000000..09b9e1296ff --- /dev/null +++ b/service/aiproxy/relay/adaptor/gemini/adaptor.go @@ -0,0 +1,101 @@ +package gemini + +import ( + "errors" + "fmt" + "io" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/common/helper" + channelhelper "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" +) + +type Adaptor struct{} + +func (a *Adaptor) Init(_ *meta.Meta) { +} + +func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { + version := helper.AssignOrDefault(meta.Config.APIVersion, config.GetGeminiVersion()) + var action string + switch meta.Mode { + case relaymode.Embeddings: + action = "batchEmbedContents" + default: + action = "generateContent" + } + + if meta.IsStream { + action = "streamGenerateContent?alt=sse" + } + return fmt.Sprintf("%s/%s/models/%s:%s", meta.BaseURL, version, meta.ActualModelName, action), nil +} + +func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { + channelhelper.SetupCommonRequestHeader(c, req, meta) + req.Header.Set("X-Goog-Api-Key", meta.APIKey) + return nil +} + +func (a *Adaptor) ConvertRequest(_ *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + switch relayMode { + case relaymode.Embeddings: + geminiEmbeddingRequest := ConvertEmbeddingRequest(request) + return geminiEmbeddingRequest, nil + default: + geminiRequest := ConvertRequest(request) + return geminiRequest, nil + } +} + +func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + return request, nil +} + +func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + return channelhelper.DoRequestHelper(a, c, meta, requestBody) +} + +func (a *Adaptor) ConvertSTTRequest(*http.Request) (io.ReadCloser, error) { + return nil, nil +} + +func (a *Adaptor) ConvertTTSRequest(*model.TextToSpeechRequest) (any, error) { + return nil, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + var responseText string + err, responseText = StreamHandler(c, resp) + usage = openai.ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens) + } else { + switch meta.Mode { + case relaymode.Embeddings: + err, usage = EmbeddingHandler(c, resp) + default: + err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName) + } + } + return +} + +func (a *Adaptor) GetModelList() []string { + return ModelList +} + +func (a *Adaptor) GetChannelName() string { + return "google gemini" +} diff --git a/service/aiproxy/relay/adaptor/gemini/constants.go b/service/aiproxy/relay/adaptor/gemini/constants.go new file mode 100644 index 00000000000..b0f84dfc556 --- /dev/null +++ b/service/aiproxy/relay/adaptor/gemini/constants.go @@ -0,0 +1,7 @@ +package gemini + +// https://ai.google.dev/models/gemini + +var ModelList = []string{ + "gemini-pro", "gemini-1.0-pro", "gemini-1.5-flash", "gemini-1.5-pro", "text-embedding-004", "aqa", +} diff --git a/service/aiproxy/relay/adaptor/gemini/main.go b/service/aiproxy/relay/adaptor/gemini/main.go new file mode 100644 index 00000000000..87922183c80 --- /dev/null +++ b/service/aiproxy/relay/adaptor/gemini/main.go @@ -0,0 +1,405 @@ +package gemini + +import ( + "bufio" + "net/http" + + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/render" + + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/image" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/common/random" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/constant" + "github.com/labring/sealos/service/aiproxy/relay/model" + + "github.com/gin-gonic/gin" +) + +// https://ai.google.dev/docs/gemini_api_overview?hl=zh-cn + +const ( + VisionMaxImageNum = 16 +) + +var mimeTypeMap = map[string]string{ + "json_object": "application/json", + "text": "text/plain", +} + +// Setting safety to the lowest possible values since Gemini is already powerless enough +func ConvertRequest(textRequest *model.GeneralOpenAIRequest) *ChatRequest { + safetySetting := config.GetGeminiSafetySetting() + geminiRequest := ChatRequest{ + Contents: make([]ChatContent, 0, len(textRequest.Messages)), + SafetySettings: []ChatSafetySettings{ + { + Category: "HARM_CATEGORY_HARASSMENT", + Threshold: safetySetting, + }, + { + Category: "HARM_CATEGORY_HATE_SPEECH", + Threshold: safetySetting, + }, + { + Category: "HARM_CATEGORY_SEXUALLY_EXPLICIT", + Threshold: safetySetting, + }, + { + Category: "HARM_CATEGORY_DANGEROUS_CONTENT", + Threshold: safetySetting, + }, + }, + GenerationConfig: ChatGenerationConfig{ + Temperature: textRequest.Temperature, + TopP: textRequest.TopP, + MaxOutputTokens: textRequest.MaxTokens, + }, + } + if textRequest.ResponseFormat != nil { + if mimeType, ok := mimeTypeMap[textRequest.ResponseFormat.Type]; ok { + geminiRequest.GenerationConfig.ResponseMimeType = mimeType + } + if textRequest.ResponseFormat.JSONSchema != nil { + geminiRequest.GenerationConfig.ResponseSchema = textRequest.ResponseFormat.JSONSchema.Schema + geminiRequest.GenerationConfig.ResponseMimeType = mimeTypeMap["json_object"] + } + } + if textRequest.Tools != nil { + functions := make([]model.Function, 0, len(textRequest.Tools)) + for _, tool := range textRequest.Tools { + functions = append(functions, tool.Function) + } + geminiRequest.Tools = []ChatTools{ + { + FunctionDeclarations: functions, + }, + } + } else if textRequest.Functions != nil { + geminiRequest.Tools = []ChatTools{ + { + FunctionDeclarations: textRequest.Functions, + }, + } + } + shouldAddDummyModelMessage := false + for _, message := range textRequest.Messages { + content := ChatContent{ + Role: message.Role, + Parts: []Part{ + { + Text: message.StringContent(), + }, + }, + } + openaiContent := message.ParseContent() + var parts []Part + imageNum := 0 + for _, part := range openaiContent { + if part.Type == model.ContentTypeText { + parts = append(parts, Part{ + Text: part.Text, + }) + } else if part.Type == model.ContentTypeImageURL { + imageNum++ + if imageNum > VisionMaxImageNum { + continue + } + mimeType, data, _ := image.GetImageFromURL(part.ImageURL.URL) + parts = append(parts, Part{ + InlineData: &InlineData{ + MimeType: mimeType, + Data: data, + }, + }) + } + } + content.Parts = parts + + // there's no assistant role in gemini and API shall vomit if Role is not user or model + if content.Role == "assistant" { + content.Role = "model" + } + // Converting system prompt to prompt from user for the same reason + if content.Role == "system" { + content.Role = "user" + shouldAddDummyModelMessage = true + } + geminiRequest.Contents = append(geminiRequest.Contents, content) + + // If a system message is the last message, we need to add a dummy model message to make gemini happy + if shouldAddDummyModelMessage { + geminiRequest.Contents = append(geminiRequest.Contents, ChatContent{ + Role: "model", + Parts: []Part{ + { + Text: "Okay", + }, + }, + }) + shouldAddDummyModelMessage = false + } + } + + return &geminiRequest +} + +func ConvertEmbeddingRequest(request *model.GeneralOpenAIRequest) *BatchEmbeddingRequest { + inputs := request.ParseInput() + requests := make([]EmbeddingRequest, len(inputs)) + model := "models/" + request.Model + + for i, input := range inputs { + requests[i] = EmbeddingRequest{ + Model: model, + Content: ChatContent{ + Parts: []Part{ + { + Text: input, + }, + }, + }, + } + } + + return &BatchEmbeddingRequest{ + Requests: requests, + } +} + +type ChatResponse struct { + Candidates []ChatCandidate `json:"candidates"` + PromptFeedback ChatPromptFeedback `json:"promptFeedback"` +} + +func (g *ChatResponse) GetResponseText() string { + if g == nil { + return "" + } + if len(g.Candidates) > 0 && len(g.Candidates[0].Content.Parts) > 0 { + return g.Candidates[0].Content.Parts[0].Text + } + return "" +} + +type ChatCandidate struct { + FinishReason string `json:"finishReason"` + Content ChatContent `json:"content"` + SafetyRatings []ChatSafetyRating `json:"safetyRatings"` + Index int64 `json:"index"` +} + +type ChatSafetyRating struct { + Category string `json:"category"` + Probability string `json:"probability"` +} + +type ChatPromptFeedback struct { + SafetyRatings []ChatSafetyRating `json:"safetyRatings"` +} + +func getToolCalls(candidate *ChatCandidate) []model.Tool { + var toolCalls []model.Tool + + item := candidate.Content.Parts[0] + if item.FunctionCall == nil { + return toolCalls + } + argsBytes, err := json.Marshal(item.FunctionCall.Arguments) + if err != nil { + logger.FatalLog("getToolCalls failed: " + err.Error()) + return toolCalls + } + toolCall := model.Tool{ + ID: "call_" + random.GetUUID(), + Type: "function", + Function: model.Function{ + Arguments: conv.BytesToString(argsBytes), + Name: item.FunctionCall.FunctionName, + }, + } + toolCalls = append(toolCalls, toolCall) + return toolCalls +} + +func responseGeminiChat2OpenAI(response *ChatResponse) *openai.TextResponse { + fullTextResponse := openai.TextResponse{ + ID: "chatcmpl-" + random.GetUUID(), + Object: "chat.completion", + Created: helper.GetTimestamp(), + Choices: make([]openai.TextResponseChoice, 0, len(response.Candidates)), + } + for i, candidate := range response.Candidates { + choice := openai.TextResponseChoice{ + Index: i, + Message: model.Message{ + Role: "assistant", + }, + FinishReason: constant.StopFinishReason, + } + if len(candidate.Content.Parts) > 0 { + if candidate.Content.Parts[0].FunctionCall != nil { + choice.Message.ToolCalls = getToolCalls(&candidate) + } else { + choice.Message.Content = candidate.Content.Parts[0].Text + } + } else { + choice.Message.Content = "" + choice.FinishReason = candidate.FinishReason + } + fullTextResponse.Choices = append(fullTextResponse.Choices, choice) + } + return &fullTextResponse +} + +func streamResponseGeminiChat2OpenAI(geminiResponse *ChatResponse) *openai.ChatCompletionsStreamResponse { + var choice openai.ChatCompletionsStreamResponseChoice + choice.Delta.Content = geminiResponse.GetResponseText() + // choice.FinishReason = &constant.StopFinishReason + var response openai.ChatCompletionsStreamResponse + response.ID = "chatcmpl-" + random.GetUUID() + response.Created = helper.GetTimestamp() + response.Object = "chat.completion.chunk" + response.Model = "gemini" + response.Choices = []openai.ChatCompletionsStreamResponseChoice{choice} + return &response +} + +func embeddingResponseGemini2OpenAI(response *EmbeddingResponse) *openai.EmbeddingResponse { + openAIEmbeddingResponse := openai.EmbeddingResponse{ + Object: "list", + Data: make([]openai.EmbeddingResponseItem, 0, len(response.Embeddings)), + Model: "gemini-embedding", + Usage: model.Usage{TotalTokens: 0}, + } + for _, item := range response.Embeddings { + openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, openai.EmbeddingResponseItem{ + Object: `embedding`, + Index: 0, + Embedding: item.Values, + }) + } + return &openAIEmbeddingResponse +} + +func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) { + defer resp.Body.Close() + + responseText := "" + scanner := bufio.NewScanner(resp.Body) + scanner.Split(bufio.ScanLines) + + common.SetEventStreamHeaders(c) + + for scanner.Scan() { + data := scanner.Bytes() + if len(data) < 6 || conv.BytesToString(data[:6]) != "data: " { + continue + } + data = data[6:] + + if conv.BytesToString(data) == "[DONE]" { + break + } + + var geminiResponse ChatResponse + err := json.Unmarshal(data, &geminiResponse) + if err != nil { + logger.SysErrorf("error unmarshalling stream response: %s, data: %s", err.Error(), conv.BytesToString(data)) + continue + } + + response := streamResponseGeminiChat2OpenAI(&geminiResponse) + if response == nil { + continue + } + + responseText += response.Choices[0].Delta.StringContent() + + err = render.ObjectData(c, response) + if err != nil { + logger.SysError(err.Error()) + } + } + + if err := scanner.Err(); err != nil { + logger.SysError("error reading stream: " + err.Error()) + } + + render.Done(c) + + return nil, responseText +} + +func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var geminiResponse ChatResponse + err := json.NewDecoder(resp.Body).Decode(&geminiResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + if len(geminiResponse.Candidates) == 0 { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: "No candidates returned", + Type: "server_error", + Param: "", + Code: 500, + }, + StatusCode: resp.StatusCode, + }, nil + } + fullTextResponse := responseGeminiChat2OpenAI(&geminiResponse) + fullTextResponse.Model = modelName + completionTokens := openai.CountTokenText(geminiResponse.GetResponseText(), modelName) + usage := model.Usage{ + PromptTokens: promptTokens, + CompletionTokens: completionTokens, + TotalTokens: promptTokens + completionTokens, + } + fullTextResponse.Usage = usage + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil, &usage +} + +func EmbeddingHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var geminiEmbeddingResponse EmbeddingResponse + err := json.NewDecoder(resp.Body).Decode(&geminiEmbeddingResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + if geminiEmbeddingResponse.Error != nil { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: geminiEmbeddingResponse.Error.Message, + Type: "gemini_error", + Param: "", + Code: geminiEmbeddingResponse.Error.Code, + }, + StatusCode: resp.StatusCode, + }, nil + } + fullTextResponse := embeddingResponseGemini2OpenAI(&geminiEmbeddingResponse) + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil, &fullTextResponse.Usage +} diff --git a/service/aiproxy/relay/adaptor/gemini/model.go b/service/aiproxy/relay/adaptor/gemini/model.go new file mode 100644 index 00000000000..b69352d16d5 --- /dev/null +++ b/service/aiproxy/relay/adaptor/gemini/model.go @@ -0,0 +1,76 @@ +package gemini + +type ChatRequest struct { + Contents []ChatContent `json:"contents"` + SafetySettings []ChatSafetySettings `json:"safety_settings,omitempty"` + Tools []ChatTools `json:"tools,omitempty"` + GenerationConfig ChatGenerationConfig `json:"generation_config,omitempty"` +} + +type EmbeddingRequest struct { + Model string `json:"model"` + TaskType string `json:"taskType,omitempty"` + Title string `json:"title,omitempty"` + Content ChatContent `json:"content"` + OutputDimensionality int `json:"outputDimensionality,omitempty"` +} + +type BatchEmbeddingRequest struct { + Requests []EmbeddingRequest `json:"requests"` +} + +type EmbeddingData struct { + Values []float64 `json:"values"` +} + +type EmbeddingResponse struct { + Error *Error `json:"error,omitempty"` + Embeddings []EmbeddingData `json:"embeddings"` +} + +type Error struct { + Message string `json:"message,omitempty"` + Status string `json:"status,omitempty"` + Code int `json:"code,omitempty"` +} + +type InlineData struct { + MimeType string `json:"mimeType"` + Data string `json:"data"` +} + +type FunctionCall struct { + Arguments any `json:"args"` + FunctionName string `json:"name"` +} + +type Part struct { + InlineData *InlineData `json:"inlineData,omitempty"` + FunctionCall *FunctionCall `json:"functionCall,omitempty"` + Text string `json:"text,omitempty"` +} + +type ChatContent struct { + Role string `json:"role,omitempty"` + Parts []Part `json:"parts"` +} + +type ChatSafetySettings struct { + Category string `json:"category"` + Threshold string `json:"threshold"` +} + +type ChatTools struct { + FunctionDeclarations any `json:"function_declarations,omitempty"` +} + +type ChatGenerationConfig struct { + ResponseSchema any `json:"responseSchema,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"topP,omitempty"` + ResponseMimeType string `json:"responseMimeType,omitempty"` + StopSequences []string `json:"stopSequences,omitempty"` + TopK float64 `json:"topK,omitempty"` + MaxOutputTokens int `json:"maxOutputTokens,omitempty"` + CandidateCount int `json:"candidateCount,omitempty"` +} diff --git a/service/aiproxy/relay/adaptor/groq/constants.go b/service/aiproxy/relay/adaptor/groq/constants.go new file mode 100644 index 00000000000..0864ebe75e3 --- /dev/null +++ b/service/aiproxy/relay/adaptor/groq/constants.go @@ -0,0 +1,27 @@ +package groq + +// https://console.groq.com/docs/models + +var ModelList = []string{ + "gemma-7b-it", + "gemma2-9b-it", + "llama-3.1-70b-versatile", + "llama-3.1-8b-instant", + "llama-3.2-11b-text-preview", + "llama-3.2-11b-vision-preview", + "llama-3.2-1b-preview", + "llama-3.2-3b-preview", + "llama-3.2-11b-vision-preview", + "llama-3.2-90b-text-preview", + "llama-3.2-90b-vision-preview", + "llama-guard-3-8b", + "llama3-70b-8192", + "llama3-8b-8192", + "llama3-groq-70b-8192-tool-use-preview", + "llama3-groq-8b-8192-tool-use-preview", + "llava-v1.5-7b-4096-preview", + "mixtral-8x7b-32768", + "distil-whisper-large-v3-en", + "whisper-large-v3", + "whisper-large-v3-turbo", +} diff --git a/service/aiproxy/relay/adaptor/interface.go b/service/aiproxy/relay/adaptor/interface.go new file mode 100644 index 00000000000..2429fa075cb --- /dev/null +++ b/service/aiproxy/relay/adaptor/interface.go @@ -0,0 +1,24 @@ +package adaptor + +import ( + "io" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type Adaptor interface { + Init(meta *meta.Meta) + GetRequestURL(meta *meta.Meta) (string, error) + SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error + ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) + ConvertImageRequest(request *model.ImageRequest) (any, error) + ConvertSTTRequest(request *http.Request) (io.ReadCloser, error) + ConvertTTSRequest(request *model.TextToSpeechRequest) (any, error) + DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) + DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) + GetModelList() []string + GetChannelName() string +} diff --git a/service/aiproxy/relay/adaptor/lingyiwanwu/constants.go b/service/aiproxy/relay/adaptor/lingyiwanwu/constants.go new file mode 100644 index 00000000000..30000e9dc83 --- /dev/null +++ b/service/aiproxy/relay/adaptor/lingyiwanwu/constants.go @@ -0,0 +1,9 @@ +package lingyiwanwu + +// https://platform.lingyiwanwu.com/docs + +var ModelList = []string{ + "yi-34b-chat-0205", + "yi-34b-chat-200k", + "yi-vl-plus", +} diff --git a/service/aiproxy/relay/adaptor/minimax/constants.go b/service/aiproxy/relay/adaptor/minimax/constants.go new file mode 100644 index 00000000000..1b2fc10485d --- /dev/null +++ b/service/aiproxy/relay/adaptor/minimax/constants.go @@ -0,0 +1,11 @@ +package minimax + +// https://www.minimaxi.com/document/guides/chat-model/V2?id=65e0736ab2845de20908e2dd + +var ModelList = []string{ + "abab6.5-chat", + "abab6.5s-chat", + "abab6-chat", + "abab5.5-chat", + "abab5.5s-chat", +} diff --git a/service/aiproxy/relay/adaptor/minimax/main.go b/service/aiproxy/relay/adaptor/minimax/main.go new file mode 100644 index 00000000000..13e9bc27c24 --- /dev/null +++ b/service/aiproxy/relay/adaptor/minimax/main.go @@ -0,0 +1,15 @@ +package minimax + +import ( + "fmt" + + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" +) + +func GetRequestURL(meta *meta.Meta) (string, error) { + if meta.Mode == relaymode.ChatCompletions { + return meta.BaseURL + "/v1/text/chatcompletion_v2", nil + } + return "", fmt.Errorf("unsupported relay mode %d for minimax", meta.Mode) +} diff --git a/service/aiproxy/relay/adaptor/mistral/constants.go b/service/aiproxy/relay/adaptor/mistral/constants.go new file mode 100644 index 00000000000..cdb157f5721 --- /dev/null +++ b/service/aiproxy/relay/adaptor/mistral/constants.go @@ -0,0 +1,10 @@ +package mistral + +var ModelList = []string{ + "open-mistral-7b", + "open-mixtral-8x7b", + "mistral-small-latest", + "mistral-medium-latest", + "mistral-large-latest", + "mistral-embed", +} diff --git a/service/aiproxy/relay/adaptor/moonshot/constants.go b/service/aiproxy/relay/adaptor/moonshot/constants.go new file mode 100644 index 00000000000..1b86f0fa6e4 --- /dev/null +++ b/service/aiproxy/relay/adaptor/moonshot/constants.go @@ -0,0 +1,7 @@ +package moonshot + +var ModelList = []string{ + "moonshot-v1-8k", + "moonshot-v1-32k", + "moonshot-v1-128k", +} diff --git a/service/aiproxy/relay/adaptor/novita/constants.go b/service/aiproxy/relay/adaptor/novita/constants.go new file mode 100644 index 00000000000..c6618308e22 --- /dev/null +++ b/service/aiproxy/relay/adaptor/novita/constants.go @@ -0,0 +1,19 @@ +package novita + +// https://novita.ai/llm-api + +var ModelList = []string{ + "meta-llama/llama-3-8b-instruct", + "meta-llama/llama-3-70b-instruct", + "nousresearch/hermes-2-pro-llama-3-8b", + "nousresearch/nous-hermes-llama2-13b", + "mistralai/mistral-7b-instruct", + "cognitivecomputations/dolphin-mixtral-8x22b", + "sao10k/l3-70b-euryale-v2.1", + "sophosympatheia/midnight-rose-70b", + "gryphe/mythomax-l2-13b", + "Nous-Hermes-2-Mixtral-8x7B-DPO", + "lzlv_70b", + "teknium/openhermes-2.5-mistral-7b", + "microsoft/wizardlm-2-8x22b", +} diff --git a/service/aiproxy/relay/adaptor/novita/main.go b/service/aiproxy/relay/adaptor/novita/main.go new file mode 100644 index 00000000000..b33c100aed9 --- /dev/null +++ b/service/aiproxy/relay/adaptor/novita/main.go @@ -0,0 +1,15 @@ +package novita + +import ( + "fmt" + + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" +) + +func GetRequestURL(meta *meta.Meta) (string, error) { + if meta.Mode == relaymode.ChatCompletions { + return meta.BaseURL + "/chat/completions", nil + } + return "", fmt.Errorf("unsupported relay mode %d for novita", meta.Mode) +} diff --git a/service/aiproxy/relay/adaptor/ollama/adaptor.go b/service/aiproxy/relay/adaptor/ollama/adaptor.go new file mode 100644 index 00000000000..2c6f048f61e --- /dev/null +++ b/service/aiproxy/relay/adaptor/ollama/adaptor.go @@ -0,0 +1,88 @@ +package ollama + +import ( + "errors" + "io" + "net/http" + + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type Adaptor struct{} + +func (a *Adaptor) Init(_ *meta.Meta) { +} + +func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { + // https://github.com/ollama/ollama/blob/main/docs/api.md + fullRequestURL := meta.BaseURL + "/api/chat" + if meta.Mode == relaymode.Embeddings { + fullRequestURL = meta.BaseURL + "/api/embed" + } + return fullRequestURL, nil +} + +func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { + adaptor.SetupCommonRequestHeader(c, req, meta) + req.Header.Set("Authorization", "Bearer "+meta.APIKey) + return nil +} + +func (a *Adaptor) ConvertRequest(_ *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + switch relayMode { + case relaymode.Embeddings: + ollamaEmbeddingRequest := ConvertEmbeddingRequest(request) + return ollamaEmbeddingRequest, nil + default: + return ConvertRequest(request), nil + } +} + +func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + return request, nil +} + +func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + return adaptor.DoRequestHelper(a, c, meta, requestBody) +} + +func (a *Adaptor) ConvertSTTRequest(*http.Request) (io.ReadCloser, error) { + return nil, nil +} + +func (a *Adaptor) ConvertTTSRequest(*model.TextToSpeechRequest) (any, error) { + return nil, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + err, usage = StreamHandler(c, resp) + } else { + switch meta.Mode { + case relaymode.Embeddings: + err, usage = EmbeddingHandler(c, resp) + default: + err, usage = Handler(c, resp) + } + } + return +} + +func (a *Adaptor) GetModelList() []string { + return ModelList +} + +func (a *Adaptor) GetChannelName() string { + return "ollama" +} diff --git a/service/aiproxy/relay/adaptor/ollama/constants.go b/service/aiproxy/relay/adaptor/ollama/constants.go new file mode 100644 index 00000000000..d9dc72a8a51 --- /dev/null +++ b/service/aiproxy/relay/adaptor/ollama/constants.go @@ -0,0 +1,11 @@ +package ollama + +var ModelList = []string{ + "codellama:7b-instruct", + "llama2:7b", + "llama2:latest", + "llama3:latest", + "phi3:latest", + "qwen:0.5b-chat", + "qwen:7b", +} diff --git a/service/aiproxy/relay/adaptor/ollama/main.go b/service/aiproxy/relay/adaptor/ollama/main.go new file mode 100644 index 00000000000..d3967f6bb2d --- /dev/null +++ b/service/aiproxy/relay/adaptor/ollama/main.go @@ -0,0 +1,250 @@ +package ollama + +import ( + "bufio" + "net/http" + "strings" + + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/render" + + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/random" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/image" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/constant" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +func ConvertRequest(request *model.GeneralOpenAIRequest) *ChatRequest { + ollamaRequest := ChatRequest{ + Model: request.Model, + Options: &Options{ + Seed: int(request.Seed), + Temperature: request.Temperature, + TopP: request.TopP, + FrequencyPenalty: request.FrequencyPenalty, + PresencePenalty: request.PresencePenalty, + NumPredict: request.MaxTokens, + NumCtx: request.NumCtx, + }, + Stream: request.Stream, + } + for _, message := range request.Messages { + openaiContent := message.ParseContent() + var imageUrls []string + var contentText string + for _, part := range openaiContent { + switch part.Type { + case model.ContentTypeText: + contentText = part.Text + case model.ContentTypeImageURL: + _, data, _ := image.GetImageFromURL(part.ImageURL.URL) + imageUrls = append(imageUrls, data) + } + } + ollamaRequest.Messages = append(ollamaRequest.Messages, Message{ + Role: message.Role, + Content: contentText, + Images: imageUrls, + }) + } + return &ollamaRequest +} + +func responseOllama2OpenAI(response *ChatResponse) *openai.TextResponse { + choice := openai.TextResponseChoice{ + Index: 0, + Message: model.Message{ + Role: response.Message.Role, + Content: response.Message.Content, + }, + } + if response.Done { + choice.FinishReason = "stop" + } + fullTextResponse := openai.TextResponse{ + ID: "chatcmpl-" + random.GetUUID(), + Model: response.Model, + Object: "chat.completion", + Created: helper.GetTimestamp(), + Choices: []openai.TextResponseChoice{choice}, + Usage: model.Usage{ + PromptTokens: response.PromptEvalCount, + CompletionTokens: response.EvalCount, + TotalTokens: response.PromptEvalCount + response.EvalCount, + }, + } + return &fullTextResponse +} + +func streamResponseOllama2OpenAI(ollamaResponse *ChatResponse) *openai.ChatCompletionsStreamResponse { + var choice openai.ChatCompletionsStreamResponseChoice + choice.Delta.Role = ollamaResponse.Message.Role + choice.Delta.Content = ollamaResponse.Message.Content + if ollamaResponse.Done { + choice.FinishReason = &constant.StopFinishReason + } + response := openai.ChatCompletionsStreamResponse{ + ID: "chatcmpl-" + random.GetUUID(), + Object: "chat.completion.chunk", + Created: helper.GetTimestamp(), + Model: ollamaResponse.Model, + Choices: []openai.ChatCompletionsStreamResponseChoice{choice}, + } + return &response +} + +func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var usage model.Usage + scanner := bufio.NewScanner(resp.Body) + scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { + if atEOF && len(data) == 0 { + return 0, nil, nil + } + if i := strings.Index(conv.BytesToString(data), "}\n"); i >= 0 { + return i + 2, data[0 : i+1], nil + } + if atEOF { + return len(data), data, nil + } + return 0, nil, nil + }) + + common.SetEventStreamHeaders(c) + + for scanner.Scan() { + data := scanner.Text() + if strings.HasPrefix(data, "}") { + data = strings.TrimPrefix(data, "}") + "}" + } + + var ollamaResponse ChatResponse + err := json.Unmarshal(conv.StringToBytes(data), &ollamaResponse) + if err != nil { + logger.SysError("error unmarshalling stream response: " + err.Error()) + continue + } + + if ollamaResponse.EvalCount != 0 { + usage.PromptTokens = ollamaResponse.PromptEvalCount + usage.CompletionTokens = ollamaResponse.EvalCount + usage.TotalTokens = ollamaResponse.PromptEvalCount + ollamaResponse.EvalCount + } + + response := streamResponseOllama2OpenAI(&ollamaResponse) + err = render.ObjectData(c, response) + if err != nil { + logger.SysError(err.Error()) + } + } + + if err := scanner.Err(); err != nil { + logger.SysError("error reading stream: " + err.Error()) + } + + render.Done(c) + + return nil, &usage +} + +func ConvertEmbeddingRequest(request *model.GeneralOpenAIRequest) *EmbeddingRequest { + return &EmbeddingRequest{ + Model: request.Model, + Input: request.ParseInput(), + Options: &Options{ + Seed: int(request.Seed), + Temperature: request.Temperature, + TopP: request.TopP, + FrequencyPenalty: request.FrequencyPenalty, + PresencePenalty: request.PresencePenalty, + }, + } +} + +func EmbeddingHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var ollamaResponse EmbeddingResponse + err := json.NewDecoder(resp.Body).Decode(&ollamaResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + + if ollamaResponse.Error != "" { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: ollamaResponse.Error, + Type: "ollama_error", + Param: "", + Code: "ollama_error", + }, + StatusCode: resp.StatusCode, + }, nil + } + + fullTextResponse := embeddingResponseOllama2OpenAI(&ollamaResponse) + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil, &fullTextResponse.Usage +} + +func embeddingResponseOllama2OpenAI(response *EmbeddingResponse) *openai.EmbeddingResponse { + openAIEmbeddingResponse := openai.EmbeddingResponse{ + Object: "list", + Data: make([]openai.EmbeddingResponseItem, 0, 1), + Model: response.Model, + Usage: model.Usage{TotalTokens: 0}, + } + + for i, embedding := range response.Embeddings { + openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, openai.EmbeddingResponseItem{ + Object: `embedding`, + Index: i, + Embedding: embedding, + }) + } + return &openAIEmbeddingResponse +} + +func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var ollamaResponse ChatResponse + err := json.NewDecoder(resp.Body).Decode(&ollamaResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + if ollamaResponse.Error != "" { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: ollamaResponse.Error, + Type: "ollama_error", + Param: "", + Code: "ollama_error", + }, + StatusCode: resp.StatusCode, + }, nil + } + fullTextResponse := responseOllama2OpenAI(&ollamaResponse) + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil, &fullTextResponse.Usage +} diff --git a/service/aiproxy/relay/adaptor/ollama/model.go b/service/aiproxy/relay/adaptor/ollama/model.go new file mode 100644 index 00000000000..7dc4c773c89 --- /dev/null +++ b/service/aiproxy/relay/adaptor/ollama/model.go @@ -0,0 +1,51 @@ +package ollama + +type Options struct { + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"top_p,omitempty"` + FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"` + PresencePenalty *float64 `json:"presence_penalty,omitempty"` + Seed int `json:"seed,omitempty"` + TopK int `json:"top_k,omitempty"` + NumPredict int `json:"num_predict,omitempty"` + NumCtx int `json:"num_ctx,omitempty"` +} + +type Message struct { + Role string `json:"role,omitempty"` + Content string `json:"content,omitempty"` + Images []string `json:"images,omitempty"` +} + +type ChatRequest struct { + Options *Options `json:"options,omitempty"` + Model string `json:"model,omitempty"` + Messages []Message `json:"messages,omitempty"` + Stream bool `json:"stream"` +} + +type ChatResponse struct { + Model string `json:"model,omitempty"` + CreatedAt string `json:"created_at,omitempty"` + Response string `json:"response,omitempty"` + Error string `json:"error,omitempty"` + Message Message `json:"message,omitempty"` + TotalDuration int `json:"total_duration,omitempty"` + LoadDuration int `json:"load_duration,omitempty"` + PromptEvalCount int `json:"prompt_eval_count,omitempty"` + EvalCount int `json:"eval_count,omitempty"` + EvalDuration int `json:"eval_duration,omitempty"` + Done bool `json:"done,omitempty"` +} + +type EmbeddingRequest struct { + Options *Options `json:"options,omitempty"` + Model string `json:"model"` + Input []string `json:"input"` +} + +type EmbeddingResponse struct { + Error string `json:"error,omitempty"` + Model string `json:"model"` + Embeddings [][]float64 `json:"embeddings"` +} diff --git a/service/aiproxy/relay/adaptor/openai/adaptor.go b/service/aiproxy/relay/adaptor/openai/adaptor.go new file mode 100644 index 00000000000..defe23233a9 --- /dev/null +++ b/service/aiproxy/relay/adaptor/openai/adaptor.go @@ -0,0 +1,212 @@ +package openai + +import ( + "bytes" + "errors" + "fmt" + "io" + "mime/multipart" + "net/http" + "strings" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/doubao" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/minimax" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/novita" + "github.com/labring/sealos/service/aiproxy/relay/channeltype" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" +) + +type Adaptor struct { + meta *meta.Meta + contentType string + responseFormat string +} + +func (a *Adaptor) Init(meta *meta.Meta) { + a.meta = meta +} + +func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { + switch meta.ChannelType { + case channeltype.Azure: + switch meta.Mode { + case relaymode.ImagesGenerations: + // https://learn.microsoft.com/en-us/azure/ai-services/openai/dall-e-quickstart?tabs=dalle3%2Ccommand-line&pivots=rest-api + // https://{resource_name}.openai.azure.com/openai/deployments/dall-e-3/images/generations?api-version=2024-03-01-preview + return fmt.Sprintf("%s/openai/deployments/%s/images/generations?api-version=%s", meta.BaseURL, meta.ActualModelName, meta.Config.APIVersion), nil + case relaymode.AudioTranscription: + // https://learn.microsoft.com/en-us/azure/ai-services/openai/whisper-quickstart?tabs=command-line#rest-api + return fmt.Sprintf("%s/openai/deployments/%s/audio/transcriptions?api-version=%s", meta.BaseURL, meta.ActualModelName, meta.Config.APIVersion), nil + case relaymode.AudioSpeech: + // https://learn.microsoft.com/en-us/azure/ai-services/openai/text-to-speech-quickstart?tabs=command-line#rest-api + return fmt.Sprintf("%s/openai/deployments/%s/audio/speech?api-version=%s", meta.BaseURL, meta.ActualModelName, meta.Config.APIVersion), nil + } + + // https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api + requestURL := strings.Split(meta.RequestURLPath, "?")[0] + requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, meta.Config.APIVersion) + task := strings.TrimPrefix(requestURL, "/v1/") + model := meta.ActualModelName + model = strings.Replace(model, ".", "", -1) + // https://github.com/labring/sealos/service/aiproxy/issues/1191 + // {your endpoint}/openai/deployments/{your azure_model}/chat/completions?api-version={api_version} + requestURL = fmt.Sprintf("/openai/deployments/%s/%s", model, task) + return GetFullRequestURL(meta.BaseURL, requestURL, meta.ChannelType), nil + case channeltype.Minimax: + return minimax.GetRequestURL(meta) + case channeltype.Doubao: + return doubao.GetRequestURL(meta) + case channeltype.Novita: + return novita.GetRequestURL(meta) + default: + return GetFullRequestURL(meta.BaseURL, meta.RequestURLPath, meta.ChannelType), nil + } +} + +func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { + adaptor.SetupCommonRequestHeader(c, req, meta) + if meta.ChannelType == channeltype.Azure { + req.Header.Set("Api-Key", meta.APIKey) + return nil + } + if a.contentType != "" { + req.Header.Set("Content-Type", a.contentType) + } + req.Header.Set("Authorization", "Bearer "+meta.APIKey) + if meta.ChannelType == channeltype.OpenRouter { + req.Header.Set("Http-Referer", "https://github.com/labring/sealos/service/aiproxy") + req.Header.Set("X-Title", "One API") + } + return nil +} + +func (a *Adaptor) ConvertRequest(_ *gin.Context, _ int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + if request.Stream { + // always return usage in stream mode + if request.StreamOptions == nil { + request.StreamOptions = &model.StreamOptions{} + } + request.StreamOptions.IncludeUsage = true + } + return request, nil +} + +func (a *Adaptor) ConvertTTSRequest(request *model.TextToSpeechRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + if len(request.Input) > 4096 { + return nil, errors.New("input is too long (over 4096 characters)") + } + return request, nil +} + +func (a *Adaptor) ConvertSTTRequest(request *http.Request) (io.ReadCloser, error) { + if request == nil { + return nil, errors.New("request is nil") + } + + err := request.ParseMultipartForm(1024 * 1024 * 4) + if err != nil { + return nil, err + } + + multipartBody := &bytes.Buffer{} + multipartWriter := multipart.NewWriter(multipartBody) + + for key, values := range request.MultipartForm.Value { + for _, value := range values { + if key == "model" { + err = multipartWriter.WriteField(key, a.meta.ActualModelName) + if err != nil { + return nil, err + } + continue + } + if key == "response_format" { + a.responseFormat = value + } + err = multipartWriter.WriteField(key, value) + if err != nil { + return nil, err + } + } + } + + for key, files := range request.MultipartForm.File { + for _, fileHeader := range files { + file, err := fileHeader.Open() + if err != nil { + return nil, err + } + w, err := multipartWriter.CreateFormFile(key, fileHeader.Filename) + if err != nil { + file.Close() + return nil, err + } + _, err = io.Copy(w, file) + file.Close() + if err != nil { + return nil, err + } + } + } + + multipartWriter.Close() + a.contentType = multipartWriter.FormDataContentType() + return io.NopCloser(multipartBody), nil +} + +func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + return request, nil +} + +func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + return adaptor.DoRequestHelper(a, c, meta, requestBody) +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + var responseText string + err, responseText, usage = StreamHandler(c, resp, meta.Mode) + if usage == nil || usage.TotalTokens == 0 { + usage = ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens) + } + if usage.TotalTokens != 0 && usage.PromptTokens == 0 { // some channels don't return prompt tokens & completion tokens + usage.PromptTokens = meta.PromptTokens + usage.CompletionTokens = usage.TotalTokens - meta.PromptTokens + } + } else { + switch meta.Mode { + case relaymode.ImagesGenerations: + err, _ = ImageHandler(c, resp) + case relaymode.AudioTranscription: + err, usage = STTHandler(c, resp, meta, a.responseFormat) + case relaymode.AudioSpeech: + err, usage = TTSHandler(c, resp, meta) + default: + err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName) + } + } + return +} + +func (a *Adaptor) GetModelList() []string { + _, modelList := GetCompatibleChannelMeta(a.meta.ChannelType) + return modelList +} + +func (a *Adaptor) GetChannelName() string { + channelName, _ := GetCompatibleChannelMeta(a.meta.ChannelType) + return channelName +} diff --git a/service/aiproxy/relay/adaptor/openai/compatible.go b/service/aiproxy/relay/adaptor/openai/compatible.go new file mode 100644 index 00000000000..401488ddc96 --- /dev/null +++ b/service/aiproxy/relay/adaptor/openai/compatible.go @@ -0,0 +1,70 @@ +package openai + +import ( + "github.com/labring/sealos/service/aiproxy/relay/adaptor/ai360" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/baichuan" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/deepseek" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/doubao" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/groq" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/lingyiwanwu" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/minimax" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/mistral" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/moonshot" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/novita" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/siliconflow" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/stepfun" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/togetherai" + "github.com/labring/sealos/service/aiproxy/relay/channeltype" +) + +var CompatibleChannels = []int{ + channeltype.Azure, + channeltype.AI360, + channeltype.Moonshot, + channeltype.Baichuan, + channeltype.Minimax, + channeltype.Doubao, + channeltype.Mistral, + channeltype.Groq, + channeltype.LingYiWanWu, + channeltype.StepFun, + channeltype.DeepSeek, + channeltype.TogetherAI, + channeltype.Novita, + channeltype.SiliconFlow, +} + +func GetCompatibleChannelMeta(channelType int) (string, []string) { + switch channelType { + case channeltype.Azure: + return "azure", ModelList + case channeltype.AI360: + return "360", ai360.ModelList + case channeltype.Moonshot: + return "moonshot", moonshot.ModelList + case channeltype.Baichuan: + return "baichuan", baichuan.ModelList + case channeltype.Minimax: + return "minimax", minimax.ModelList + case channeltype.Mistral: + return "mistralai", mistral.ModelList + case channeltype.Groq: + return "groq", groq.ModelList + case channeltype.LingYiWanWu: + return "lingyiwanwu", lingyiwanwu.ModelList + case channeltype.StepFun: + return "stepfun", stepfun.ModelList + case channeltype.DeepSeek: + return "deepseek", deepseek.ModelList + case channeltype.TogetherAI: + return "together.ai", togetherai.ModelList + case channeltype.Doubao: + return "doubao", doubao.ModelList + case channeltype.Novita: + return "novita", novita.ModelList + case channeltype.SiliconFlow: + return "siliconflow", siliconflow.ModelList + default: + return "openai", ModelList + } +} diff --git a/service/aiproxy/relay/adaptor/openai/constants.go b/service/aiproxy/relay/adaptor/openai/constants.go new file mode 100644 index 00000000000..aacdba1ad3e --- /dev/null +++ b/service/aiproxy/relay/adaptor/openai/constants.go @@ -0,0 +1,23 @@ +package openai + +var ModelList = []string{ + "gpt-3.5-turbo", "gpt-3.5-turbo-0301", "gpt-3.5-turbo-0613", "gpt-3.5-turbo-1106", "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-16k", "gpt-3.5-turbo-16k-0613", + "gpt-3.5-turbo-instruct", + "gpt-4", "gpt-4-0314", "gpt-4-0613", "gpt-4-1106-preview", "gpt-4-0125-preview", + "gpt-4-32k", "gpt-4-32k-0314", "gpt-4-32k-0613", + "gpt-4-turbo-preview", "gpt-4-turbo", "gpt-4-turbo-2024-04-09", + "gpt-4o", "gpt-4o-2024-05-13", + "gpt-4o-2024-08-06", + "chatgpt-4o-latest", + "gpt-4o-mini", "gpt-4o-mini-2024-07-18", + "gpt-4-vision-preview", + "text-embedding-ada-002", "text-embedding-3-small", "text-embedding-3-large", + "text-curie-001", "text-babbage-001", "text-ada-001", "text-davinci-002", "text-davinci-003", + "text-moderation-latest", "text-moderation-stable", + "text-davinci-edit-001", + "davinci-002", "babbage-002", + "dall-e-2", "dall-e-3", + "whisper-1", + "tts-1", "tts-1-1106", "tts-1-hd", "tts-1-hd-1106", +} diff --git a/service/aiproxy/relay/adaptor/openai/helper.go b/service/aiproxy/relay/adaptor/openai/helper.go new file mode 100644 index 00000000000..4ba22af5b09 --- /dev/null +++ b/service/aiproxy/relay/adaptor/openai/helper.go @@ -0,0 +1,31 @@ +package openai + +import ( + "fmt" + "strings" + + "github.com/labring/sealos/service/aiproxy/relay/channeltype" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +func ResponseText2Usage(responseText string, modeName string, promptTokens int) *model.Usage { + usage := &model.Usage{} + usage.PromptTokens = promptTokens + usage.CompletionTokens = CountTokenText(responseText, modeName) + usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens + return usage +} + +func GetFullRequestURL(baseURL string, requestURL string, channelType int) string { + fullRequestURL := fmt.Sprintf("%s%s", baseURL, requestURL) + + if strings.HasPrefix(baseURL, "https://gateway.ai.cloudflare.com") { + switch channelType { + case channeltype.OpenAI: + fullRequestURL = fmt.Sprintf("%s%s", baseURL, strings.TrimPrefix(requestURL, "/v1")) + case channeltype.Azure: + fullRequestURL = fmt.Sprintf("%s%s", baseURL, strings.TrimPrefix(requestURL, "/openai/deployments")) + } + } + return fullRequestURL +} diff --git a/service/aiproxy/relay/adaptor/openai/image.go b/service/aiproxy/relay/adaptor/openai/image.go new file mode 100644 index 00000000000..d52435fdba2 --- /dev/null +++ b/service/aiproxy/relay/adaptor/openai/image.go @@ -0,0 +1,44 @@ +package openai + +import ( + "bytes" + "io" + "net/http" + + "github.com/gin-gonic/gin" + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +func ImageHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + var imageResponse ImageResponse + responseBody, err := io.ReadAll(resp.Body) + if err != nil { + return ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil + } + err = resp.Body.Close() + if err != nil { + return ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil + } + err = json.Unmarshal(responseBody, &imageResponse) + if err != nil { + return ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + + resp.Body = io.NopCloser(bytes.NewBuffer(responseBody)) + + for k, v := range resp.Header { + c.Writer.Header().Set(k, v[0]) + } + c.Writer.WriteHeader(resp.StatusCode) + + _, err = io.Copy(c.Writer, resp.Body) + if err != nil { + return ErrorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError), nil + } + err = resp.Body.Close() + if err != nil { + return ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil + } + return nil, nil +} diff --git a/service/aiproxy/relay/adaptor/openai/main.go b/service/aiproxy/relay/adaptor/openai/main.go new file mode 100644 index 00000000000..d25412d3bb6 --- /dev/null +++ b/service/aiproxy/relay/adaptor/openai/main.go @@ -0,0 +1,246 @@ +package openai + +import ( + "bufio" + "bytes" + "fmt" + "io" + "net/http" + "strings" + + json "github.com/json-iterator/go" + + "github.com/labring/sealos/service/aiproxy/common/render" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" +) + +const ( + dataPrefix = "data: " + done = "[DONE]" + dataPrefixLength = len(dataPrefix) +) + +func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.ErrorWithStatusCode, string, *model.Usage) { + defer resp.Body.Close() + + responseText := "" + scanner := bufio.NewScanner(resp.Body) + scanner.Split(bufio.ScanLines) + var usage *model.Usage + + common.SetEventStreamHeaders(c) + + doneRendered := false + for scanner.Scan() { + data := scanner.Text() + if len(data) < dataPrefixLength { // ignore blank line or wrong format + continue + } + if data[:dataPrefixLength] != dataPrefix && data[:dataPrefixLength] != done { + continue + } + if strings.HasPrefix(data[dataPrefixLength:], done) { + render.StringData(c, data) + doneRendered = true + continue + } + switch relayMode { + case relaymode.ChatCompletions: + var streamResponse ChatCompletionsStreamResponse + err := json.Unmarshal(conv.StringToBytes(data[dataPrefixLength:]), &streamResponse) + if err != nil { + logger.SysError("error unmarshalling stream response: " + err.Error()) + render.StringData(c, data) // if error happened, pass the data to client + continue // just ignore the error + } + if len(streamResponse.Choices) == 0 && streamResponse.Usage == nil { + // but for empty choice and no usage, we should not pass it to client, this is for azure + continue // just ignore empty choice + } + render.StringData(c, data) + for _, choice := range streamResponse.Choices { + responseText += conv.AsString(choice.Delta.Content) + } + if streamResponse.Usage != nil { + usage = streamResponse.Usage + } + case relaymode.Completions: + render.StringData(c, data) + var streamResponse CompletionsStreamResponse + err := json.Unmarshal(conv.StringToBytes(data[dataPrefixLength:]), &streamResponse) + if err != nil { + logger.SysError("error unmarshalling stream response: " + err.Error()) + continue + } + for _, choice := range streamResponse.Choices { + responseText += choice.Text + } + } + } + + if err := scanner.Err(); err != nil { + logger.SysError("error reading stream: " + err.Error()) + } + + if !doneRendered { + render.Done(c) + } + + return nil, responseText, usage +} + +func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) { + var textResponse SlimTextResponse + responseBody, err := io.ReadAll(resp.Body) + _ = resp.Body.Close() + if err != nil { + return ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil + } + err = json.Unmarshal(responseBody, &textResponse) + if err != nil { + return ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + if textResponse.Error.Type != "" { + return &model.ErrorWithStatusCode{ + Error: textResponse.Error, + StatusCode: resp.StatusCode, + }, nil + } + + if textResponse.Usage.TotalTokens == 0 || (textResponse.Usage.PromptTokens == 0 && textResponse.Usage.CompletionTokens == 0) { + completionTokens := 0 + for _, choice := range textResponse.Choices { + completionTokens += CountTokenText(choice.Message.StringContent(), modelName) + } + textResponse.Usage = model.Usage{ + PromptTokens: promptTokens, + CompletionTokens: completionTokens, + TotalTokens: promptTokens + completionTokens, + } + } + + resp.Body = io.NopCloser(bytes.NewBuffer(responseBody)) + defer resp.Body.Close() + + for k, v := range resp.Header { + c.Writer.Header().Set(k, v[0]) + } + c.Writer.WriteHeader(resp.StatusCode) + + _, _ = io.Copy(c.Writer, resp.Body) + return nil, &textResponse.Usage +} + +func TTSHandler(c *gin.Context, resp *http.Response, meta *meta.Meta) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + for k, v := range resp.Header { + c.Writer.Header().Set(k, v[0]) + } + + _, _ = io.Copy(c.Writer, resp.Body) + return nil, &model.Usage{ + PromptTokens: meta.PromptTokens, + CompletionTokens: 0, + TotalTokens: meta.PromptTokens, + } +} + +func STTHandler(c *gin.Context, resp *http.Response, meta *meta.Meta, responseFormat string) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + responseBody, err := io.ReadAll(resp.Body) + if err != nil { + return ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil + } + + var openAIErr SlimTextResponse + if err = json.Unmarshal(responseBody, &openAIErr); err == nil { + if openAIErr.Error.Message != "" { + return ErrorWrapper(fmt.Errorf("type %s, code %v, message %s", openAIErr.Error.Type, openAIErr.Error.Code, openAIErr.Error.Message), "request_error", http.StatusInternalServerError), nil + } + } + + var text string + switch responseFormat { + case "text": + text = getTextFromText(responseBody) + case "srt": + text, err = getTextFromSRT(responseBody) + case "verbose_json": + text, err = getTextFromVerboseJSON(responseBody) + case "vtt": + text, err = getTextFromVTT(responseBody) + case "json": + fallthrough + default: + text, err = getTextFromJSON(responseBody) + } + if err != nil { + return ErrorWrapper(err, "get_text_from_body_err", http.StatusInternalServerError), nil + } + completionTokens := CountTokenText(text, meta.ActualModelName) + + for k, v := range resp.Header { + c.Writer.Header().Set(k, v[0]) + } + _, _ = c.Writer.Write(responseBody) + + return nil, &model.Usage{ + PromptTokens: 0, + CompletionTokens: completionTokens, + TotalTokens: completionTokens, + } +} + +func getTextFromVTT(body []byte) (string, error) { + return getTextFromSRT(body) +} + +func getTextFromVerboseJSON(body []byte) (string, error) { + var whisperResponse WhisperVerboseJSONResponse + if err := json.Unmarshal(body, &whisperResponse); err != nil { + return "", fmt.Errorf("unmarshal_response_body_failed err :%w", err) + } + return whisperResponse.Text, nil +} + +func getTextFromSRT(body []byte) (string, error) { + scanner := bufio.NewScanner(bytes.NewReader(body)) + var builder strings.Builder + var textLine bool + for scanner.Scan() { + line := scanner.Text() + if textLine { + builder.WriteString(line) + textLine = false + continue + } else if strings.Contains(line, "-->") { + textLine = true + continue + } + } + if err := scanner.Err(); err != nil { + return "", err + } + return builder.String(), nil +} + +func getTextFromText(body []byte) string { + return strings.TrimSuffix(conv.BytesToString(body), "\n") +} + +func getTextFromJSON(body []byte) (string, error) { + var whisperResponse WhisperJSONResponse + if err := json.Unmarshal(body, &whisperResponse); err != nil { + return "", fmt.Errorf("unmarshal_response_body_failed err :%w", err) + } + return whisperResponse.Text, nil +} diff --git a/service/aiproxy/relay/adaptor/openai/model.go b/service/aiproxy/relay/adaptor/openai/model.go new file mode 100644 index 00000000000..9bb9b1fe2a2 --- /dev/null +++ b/service/aiproxy/relay/adaptor/openai/model.go @@ -0,0 +1,135 @@ +package openai + +import "github.com/labring/sealos/service/aiproxy/relay/model" + +type TextContent struct { + Type string `json:"type,omitempty"` + Text string `json:"text,omitempty"` +} + +type ImageContent struct { + ImageURL *model.ImageURL `json:"image_url,omitempty"` + Type string `json:"type,omitempty"` +} + +type ChatRequest struct { + Model string `json:"model"` + Messages []model.Message `json:"messages"` + MaxTokens int `json:"max_tokens"` +} + +type TextRequest struct { + Model string `json:"model"` + Prompt string `json:"prompt"` + Messages []model.Message `json:"messages"` + MaxTokens int `json:"max_tokens"` +} + +// ImageRequest docs: https://platform.openai.com/docs/api-reference/images/create +type ImageRequest struct { + Model string `json:"model"` + Prompt string `binding:"required" json:"prompt"` + Size string `json:"size,omitempty"` + Quality string `json:"quality,omitempty"` + ResponseFormat string `json:"response_format,omitempty"` + Style string `json:"style,omitempty"` + User string `json:"user,omitempty"` + N int `json:"n,omitempty"` +} + +type WhisperJSONResponse struct { + Text string `json:"text,omitempty"` +} + +type WhisperVerboseJSONResponse struct { + Task string `json:"task,omitempty"` + Language string `json:"language,omitempty"` + Text string `json:"text,omitempty"` + Segments []Segment `json:"segments,omitempty"` + Duration float64 `json:"duration,omitempty"` +} + +type Segment struct { + Text string `json:"text"` + Tokens []int `json:"tokens"` + ID int `json:"id"` + Seek int `json:"seek"` + Start float64 `json:"start"` + End float64 `json:"end"` + Temperature float64 `json:"temperature"` + AvgLogprob float64 `json:"avg_logprob"` + CompressionRatio float64 `json:"compression_ratio"` + NoSpeechProb float64 `json:"no_speech_prob"` +} + +type UsageOrResponseText struct { + *model.Usage + ResponseText string +} + +type SlimTextResponse struct { + Error model.Error `json:"error"` + Choices []TextResponseChoice `json:"choices"` + model.Usage `json:"usage"` +} + +type TextResponseChoice struct { + FinishReason string `json:"finish_reason"` + model.Message `json:"message"` + Index int `json:"index"` +} + +type TextResponse struct { + ID string `json:"id"` + Model string `json:"model,omitempty"` + Object string `json:"object"` + Choices []TextResponseChoice `json:"choices"` + model.Usage `json:"usage"` + Created int64 `json:"created"` +} + +type EmbeddingResponseItem struct { + Object string `json:"object"` + Embedding []float64 `json:"embedding"` + Index int `json:"index"` +} + +type EmbeddingResponse struct { + Object string `json:"object"` + Model string `json:"model"` + Data []EmbeddingResponseItem `json:"data"` + model.Usage `json:"usage"` +} + +type ImageData struct { + URL string `json:"url,omitempty"` + B64Json string `json:"b64_json,omitempty"` + RevisedPrompt string `json:"revised_prompt,omitempty"` +} + +type ImageResponse struct { + Data []ImageData `json:"data"` + Created int64 `json:"created"` +} + +type ChatCompletionsStreamResponseChoice struct { + FinishReason *string `json:"finish_reason,omitempty"` + Delta model.Message `json:"delta"` + Index int `json:"index"` +} + +type ChatCompletionsStreamResponse struct { + Usage *model.Usage `json:"usage,omitempty"` + ID string `json:"id"` + Object string `json:"object"` + Model string `json:"model"` + Choices []ChatCompletionsStreamResponseChoice `json:"choices"` + Created int64 `json:"created"` +} + +type CompletionsStreamResponse struct { + Choices []struct { + Text string `json:"text"` + FinishReason string `json:"finish_reason"` + } `json:"choices"` +} diff --git a/service/aiproxy/relay/adaptor/openai/token.go b/service/aiproxy/relay/adaptor/openai/token.go new file mode 100644 index 00000000000..c9607b0e6d0 --- /dev/null +++ b/service/aiproxy/relay/adaptor/openai/token.go @@ -0,0 +1,224 @@ +package openai + +import ( + "errors" + "fmt" + "math" + "strings" + "sync" + "unicode/utf8" + + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/common/image" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/relay/model" + "github.com/pkoukk/tiktoken-go" +) + +// tokenEncoderMap won't grow after initialization +var ( + tokenEncoderMap = map[string]*tiktoken.Tiktoken{} + defaultTokenEncoder *tiktoken.Tiktoken + tokenEncoderLock sync.RWMutex +) + +func init() { + gpt35TokenEncoder, err := tiktoken.EncodingForModel("gpt-3.5-turbo") + if err != nil { + logger.FatalLog("failed to get gpt-3.5-turbo token encoder: " + err.Error()) + } + defaultTokenEncoder = gpt35TokenEncoder +} + +func getTokenEncoder(model string) *tiktoken.Tiktoken { + tokenEncoderLock.RLock() + tokenEncoder, ok := tokenEncoderMap[model] + tokenEncoderLock.RUnlock() + + if ok && tokenEncoder != nil { + return tokenEncoder + } + if ok { + tokenEncoder, err := tiktoken.EncodingForModel(model) + if err != nil { + logger.SysError(fmt.Sprintf("failed to get token encoder for model %s: %s, using encoder for gpt-3.5-turbo", model, err.Error())) + tokenEncoder = defaultTokenEncoder + } + tokenEncoderLock.Lock() + tokenEncoderMap[model] = tokenEncoder + tokenEncoderLock.Unlock() + return tokenEncoder + } + return defaultTokenEncoder +} + +func getTokenNum(tokenEncoder *tiktoken.Tiktoken, text string) int { + if config.GetApproximateTokenEnabled() { + return int(float64(len(text)) * 0.38) + } + return len(tokenEncoder.Encode(text, nil, nil)) +} + +func CountTokenMessages(messages []model.Message, model string) int { + tokenEncoder := getTokenEncoder(model) + // Reference: + // https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb + // https://github.com/pkoukk/tiktoken-go/issues/6 + // + // Every message follows <|start|>{role/name}\n{content}<|end|>\n + var tokensPerMessage int + var tokensPerName int + if model == "gpt-3.5-turbo-0301" { + tokensPerMessage = 4 + tokensPerName = -1 // If there's a name, the role is omitted + } else { + tokensPerMessage = 3 + tokensPerName = 1 + } + tokenNum := 0 + for _, message := range messages { + tokenNum += tokensPerMessage + switch v := message.Content.(type) { + case string: + tokenNum += getTokenNum(tokenEncoder, v) + case []any: + for _, it := range v { + m := it.(map[string]any) + switch m["type"] { + case "text": + if textValue, ok := m["text"]; ok { + if textString, ok := textValue.(string); ok { + tokenNum += getTokenNum(tokenEncoder, textString) + } + } + case "image_url": + imageURL, ok := m["image_url"].(map[string]any) + if ok { + url := imageURL["url"].(string) + detail := "" + if imageURL["detail"] != nil { + detail = imageURL["detail"].(string) + } + imageTokens, err := countImageTokens(url, detail, model) + if err != nil { + logger.SysError("error counting image tokens: " + err.Error()) + } else { + tokenNum += imageTokens + } + } + } + } + } + tokenNum += getTokenNum(tokenEncoder, message.Role) + if message.Name != nil { + tokenNum += tokensPerName + tokenNum += getTokenNum(tokenEncoder, *message.Name) + } + } + tokenNum += 3 // Every reply is primed with <|start|>assistant<|message|> + return tokenNum +} + +const ( + lowDetailCost = 85 + highDetailCostPerTile = 170 + additionalCost = 85 + // gpt-4o-mini cost higher than other model + gpt4oMiniLowDetailCost = 2833 + gpt4oMiniHighDetailCost = 5667 + gpt4oMiniAdditionalCost = 2833 +) + +// https://platform.openai.com/docs/guides/vision/calculating-costs +// https://github.com/openai/openai-cookbook/blob/05e3f9be4c7a2ae7ecf029a7c32065b024730ebe/examples/How_to_count_tokens_with_tiktoken.ipynb +func countImageTokens(url string, detail string, model string) (_ int, err error) { + fetchSize := true + var width, height int + // Reference: https://platform.openai.com/docs/guides/vision/low-or-high-fidelity-image-understanding + // detail == "auto" is undocumented on how it works, it just said the model will use the auto setting which will look at the image input size and decide if it should use the low or high setting. + // According to the official guide, "low" disable the high-res model, + // and only receive low-res 512px x 512px version of the image, indicating + // that image is treated as low-res when size is smaller than 512px x 512px, + // then we can assume that image size larger than 512px x 512px is treated + // as high-res. Then we have the following logic: + // if detail == "" || detail == "auto" { + // width, height, err = image.GetImageSize(url) + // if err != nil { + // return 0, err + // } + // fetchSize = false + // // not sure if this is correct + // if width > 512 || height > 512 { + // detail = "high" + // } else { + // detail = "low" + // } + // } + + // However, in my test, it seems to be always the same as "high". + // The following image, which is 125x50, is still treated as high-res, taken + // 255 tokens in the response of non-stream chat completion api. + // https://upload.wikimedia.org/wikipedia/commons/1/10/18_Infantry_Division_Messina.jpg + if detail == "" || detail == "auto" { + // assume by test, not sure if this is correct + detail = "high" + } + switch detail { + case "low": + if strings.HasPrefix(model, "gpt-4o-mini") { + return gpt4oMiniLowDetailCost, nil + } + return lowDetailCost, nil + case "high": + if fetchSize { + width, height, err = image.GetImageSize(url) + if err != nil { + return 0, err + } + } + if width > 2048 || height > 2048 { // max(width, height) > 2048 + ratio := float64(2048) / math.Max(float64(width), float64(height)) + width = int(float64(width) * ratio) + height = int(float64(height) * ratio) + } + if width > 768 && height > 768 { // min(width, height) > 768 + ratio := float64(768) / math.Min(float64(width), float64(height)) + width = int(float64(width) * ratio) + height = int(float64(height) * ratio) + } + numSquares := int(math.Ceil(float64(width)/512) * math.Ceil(float64(height)/512)) + if strings.HasPrefix(model, "gpt-4o-mini") { + return numSquares*gpt4oMiniHighDetailCost + gpt4oMiniAdditionalCost, nil + } + result := numSquares*highDetailCostPerTile + additionalCost + return result, nil + default: + return 0, errors.New("invalid detail option") + } +} + +func CountTokenInput(input any, model string) int { + switch v := input.(type) { + case string: + return CountTokenText(v, model) + case []string: + text := "" + for _, s := range v { + text += s + } + return CountTokenText(text, model) + } + return 0 +} + +func CountTokenText(text string, model string) int { + if strings.HasPrefix(model, "tts") { + return utf8.RuneCountInString(text) + } + tokenEncoder := getTokenEncoder(model) + return getTokenNum(tokenEncoder, text) +} + +func CountToken(text string) int { + return CountTokenInput(text, "gpt-3.5-turbo") +} diff --git a/service/aiproxy/relay/adaptor/openai/util.go b/service/aiproxy/relay/adaptor/openai/util.go new file mode 100644 index 00000000000..b37dd52571e --- /dev/null +++ b/service/aiproxy/relay/adaptor/openai/util.go @@ -0,0 +1,15 @@ +package openai + +import "github.com/labring/sealos/service/aiproxy/relay/model" + +func ErrorWrapper(err error, code string, statusCode int) *model.ErrorWithStatusCode { + Error := model.Error{ + Message: err.Error(), + Type: "aiproxy_error", + Code: code, + } + return &model.ErrorWithStatusCode{ + Error: Error, + StatusCode: statusCode, + } +} diff --git a/service/aiproxy/relay/adaptor/palm/adaptor.go b/service/aiproxy/relay/adaptor/palm/adaptor.go new file mode 100644 index 00000000000..e65dde874a1 --- /dev/null +++ b/service/aiproxy/relay/adaptor/palm/adaptor.go @@ -0,0 +1,73 @@ +package palm + +import ( + "errors" + "io" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type Adaptor struct{} + +func (a *Adaptor) Init(_ *meta.Meta) { +} + +func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { + return meta.BaseURL + "/v1beta2/models/chat-bison-001:generateMessage", nil +} + +func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { + adaptor.SetupCommonRequestHeader(c, req, meta) + req.Header.Set("X-Goog-Api-Key", meta.APIKey) + return nil +} + +func (a *Adaptor) ConvertRequest(_ *gin.Context, _ int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + return ConvertRequest(request), nil +} + +func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + return request, nil +} + +func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + return adaptor.DoRequestHelper(a, c, meta, requestBody) +} + +func (a *Adaptor) ConvertSTTRequest(*http.Request) (io.ReadCloser, error) { + return nil, nil +} + +func (a *Adaptor) ConvertTTSRequest(*model.TextToSpeechRequest) (any, error) { + return nil, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + var responseText string + err, responseText = StreamHandler(c, resp) + usage = openai.ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens) + } else { + err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName) + } + return +} + +func (a *Adaptor) GetModelList() []string { + return ModelList +} + +func (a *Adaptor) GetChannelName() string { + return "google palm" +} diff --git a/service/aiproxy/relay/adaptor/palm/constants.go b/service/aiproxy/relay/adaptor/palm/constants.go new file mode 100644 index 00000000000..a8349362c25 --- /dev/null +++ b/service/aiproxy/relay/adaptor/palm/constants.go @@ -0,0 +1,5 @@ +package palm + +var ModelList = []string{ + "PaLM-2", +} diff --git a/service/aiproxy/relay/adaptor/palm/model.go b/service/aiproxy/relay/adaptor/palm/model.go new file mode 100644 index 00000000000..5f46f82f485 --- /dev/null +++ b/service/aiproxy/relay/adaptor/palm/model.go @@ -0,0 +1,40 @@ +package palm + +import ( + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type ChatMessage struct { + Author string `json:"author"` + Content string `json:"content"` +} + +type Filter struct { + Reason string `json:"reason"` + Message string `json:"message"` +} + +type Prompt struct { + Messages []ChatMessage `json:"messages"` +} + +type ChatRequest struct { + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"topP,omitempty"` + Prompt Prompt `json:"prompt"` + CandidateCount int `json:"candidateCount,omitempty"` + TopK int `json:"topK,omitempty"` +} + +type Error struct { + Message string `json:"message"` + Status string `json:"status"` + Code int `json:"code"` +} + +type ChatResponse struct { + Candidates []ChatMessage `json:"candidates"` + Messages []model.Message `json:"messages"` + Filters []Filter `json:"filters"` + Error Error `json:"error"` +} diff --git a/service/aiproxy/relay/adaptor/palm/palm.go b/service/aiproxy/relay/adaptor/palm/palm.go new file mode 100644 index 00000000000..41921a93894 --- /dev/null +++ b/service/aiproxy/relay/adaptor/palm/palm.go @@ -0,0 +1,147 @@ +package palm + +import ( + "net/http" + + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/render" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/common/random" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/constant" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +// https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage#request-body +// https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage#response-body + +func ConvertRequest(textRequest *model.GeneralOpenAIRequest) *ChatRequest { + palmRequest := ChatRequest{ + Prompt: Prompt{ + Messages: make([]ChatMessage, 0, len(textRequest.Messages)), + }, + Temperature: textRequest.Temperature, + CandidateCount: textRequest.N, + TopP: textRequest.TopP, + TopK: textRequest.MaxTokens, + } + for _, message := range textRequest.Messages { + palmMessage := ChatMessage{ + Content: message.StringContent(), + } + if message.Role == "user" { + palmMessage.Author = "0" + } else { + palmMessage.Author = "1" + } + palmRequest.Prompt.Messages = append(palmRequest.Prompt.Messages, palmMessage) + } + return &palmRequest +} + +func responsePaLM2OpenAI(response *ChatResponse) *openai.TextResponse { + fullTextResponse := openai.TextResponse{ + Choices: make([]openai.TextResponseChoice, 0, len(response.Candidates)), + } + for i, candidate := range response.Candidates { + choice := openai.TextResponseChoice{ + Index: i, + Message: model.Message{ + Role: "assistant", + Content: candidate.Content, + }, + FinishReason: "stop", + } + fullTextResponse.Choices = append(fullTextResponse.Choices, choice) + } + return &fullTextResponse +} + +func streamResponsePaLM2OpenAI(palmResponse *ChatResponse) *openai.ChatCompletionsStreamResponse { + var choice openai.ChatCompletionsStreamResponseChoice + if len(palmResponse.Candidates) > 0 { + choice.Delta.Content = palmResponse.Candidates[0].Content + } + choice.FinishReason = &constant.StopFinishReason + var response openai.ChatCompletionsStreamResponse + response.Object = "chat.completion.chunk" + response.Model = "palm2" + response.Choices = []openai.ChatCompletionsStreamResponseChoice{choice} + return &response +} + +func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) { + defer resp.Body.Close() + + responseText := "" + responseID := "chatcmpl-" + random.GetUUID() + createdTime := helper.GetTimestamp() + + var palmResponse ChatResponse + err := json.NewDecoder(resp.Body).Decode(&palmResponse) + if err != nil { + logger.SysError("error unmarshalling stream response: " + err.Error()) + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), "" + } + + common.SetEventStreamHeaders(c) + + fullTextResponse := streamResponsePaLM2OpenAI(&palmResponse) + fullTextResponse.ID = responseID + fullTextResponse.Created = createdTime + if len(palmResponse.Candidates) > 0 { + responseText = palmResponse.Candidates[0].Content + } + + err = render.ObjectData(c, fullTextResponse) + if err != nil { + logger.SysError("error stream response: " + err.Error()) + return openai.ErrorWrapper(err, "stream_response_failed", http.StatusInternalServerError), "" + } + + render.Done(c) + + return nil, responseText +} + +func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var palmResponse ChatResponse + err := json.NewDecoder(resp.Body).Decode(&palmResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + if palmResponse.Error.Code != 0 || len(palmResponse.Candidates) == 0 { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: palmResponse.Error.Message, + Type: palmResponse.Error.Status, + Param: "", + Code: palmResponse.Error.Code, + }, + StatusCode: resp.StatusCode, + }, nil + } + fullTextResponse := responsePaLM2OpenAI(&palmResponse) + fullTextResponse.Model = modelName + completionTokens := openai.CountTokenText(palmResponse.Candidates[0].Content, modelName) + usage := model.Usage{ + PromptTokens: promptTokens, + CompletionTokens: completionTokens, + TotalTokens: promptTokens + completionTokens, + } + fullTextResponse.Usage = usage + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil, &usage +} diff --git a/service/aiproxy/relay/adaptor/siliconflow/constants.go b/service/aiproxy/relay/adaptor/siliconflow/constants.go new file mode 100644 index 00000000000..0bf547611a9 --- /dev/null +++ b/service/aiproxy/relay/adaptor/siliconflow/constants.go @@ -0,0 +1,36 @@ +package siliconflow + +// https://docs.siliconflow.cn/docs/getting-started + +var ModelList = []string{ + "deepseek-ai/deepseek-llm-67b-chat", + "Qwen/Qwen1.5-14B-Chat", + "Qwen/Qwen1.5-7B-Chat", + "Qwen/Qwen1.5-110B-Chat", + "Qwen/Qwen1.5-32B-Chat", + "01-ai/Yi-1.5-6B-Chat", + "01-ai/Yi-1.5-9B-Chat-16K", + "01-ai/Yi-1.5-34B-Chat-16K", + "THUDM/chatglm3-6b", + "deepseek-ai/DeepSeek-V2-Chat", + "THUDM/glm-4-9b-chat", + "Qwen/Qwen2-72B-Instruct", + "Qwen/Qwen2-7B-Instruct", + "Qwen/Qwen2-57B-A14B-Instruct", + "deepseek-ai/DeepSeek-Coder-V2-Instruct", + "Qwen/Qwen2-1.5B-Instruct", + "internlm/internlm2_5-7b-chat", + "BAAI/bge-large-en-v1.5", + "BAAI/bge-large-zh-v1.5", + "Pro/Qwen/Qwen2-7B-Instruct", + "Pro/Qwen/Qwen2-1.5B-Instruct", + "Pro/Qwen/Qwen1.5-7B-Chat", + "Pro/THUDM/glm-4-9b-chat", + "Pro/THUDM/chatglm3-6b", + "Pro/01-ai/Yi-1.5-9B-Chat-16K", + "Pro/01-ai/Yi-1.5-6B-Chat", + "Pro/google/gemma-2-9b-it", + "Pro/internlm/internlm2_5-7b-chat", + "Pro/meta-llama/Meta-Llama-3-8B-Instruct", + "Pro/mistralai/Mistral-7B-Instruct-v0.2", +} diff --git a/service/aiproxy/relay/adaptor/stepfun/constants.go b/service/aiproxy/relay/adaptor/stepfun/constants.go new file mode 100644 index 00000000000..6a2346cac5b --- /dev/null +++ b/service/aiproxy/relay/adaptor/stepfun/constants.go @@ -0,0 +1,13 @@ +package stepfun + +var ModelList = []string{ + "step-1-8k", + "step-1-32k", + "step-1-128k", + "step-1-256k", + "step-1-flash", + "step-2-16k", + "step-1v-8k", + "step-1v-32k", + "step-1x-medium", +} diff --git a/service/aiproxy/relay/adaptor/tencent/adaptor.go b/service/aiproxy/relay/adaptor/tencent/adaptor.go new file mode 100644 index 00000000000..f900c483589 --- /dev/null +++ b/service/aiproxy/relay/adaptor/tencent/adaptor.go @@ -0,0 +1,96 @@ +package tencent + +import ( + "errors" + "io" + "net/http" + "strconv" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +// https://cloud.tencent.com/document/api/1729/101837 + +type Adaptor struct { + meta *meta.Meta + Sign string + Action string + Version string + Timestamp int64 +} + +func (a *Adaptor) Init(meta *meta.Meta) { + a.Action = "ChatCompletions" + a.Version = "2023-09-01" + a.Timestamp = helper.GetTimestamp() + a.meta = meta +} + +func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { + return meta.BaseURL + "/", nil +} + +func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { + adaptor.SetupCommonRequestHeader(c, req, meta) + req.Header.Set("Authorization", a.Sign) + req.Header.Set("X-Tc-Action", a.Action) + req.Header.Set("X-Tc-Version", a.Version) + req.Header.Set("X-Tc-Timestamp", strconv.FormatInt(a.Timestamp, 10)) + return nil +} + +func (a *Adaptor) ConvertRequest(_ *gin.Context, _ int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + _, secretID, secretKey, err := ParseConfig(a.meta.APIKey) + if err != nil { + return nil, err + } + // we have to calculate the sign here + a.Sign = GetSign(request, a, secretID, secretKey) + return request, nil +} + +func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + return request, nil +} + +func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + return adaptor.DoRequestHelper(a, c, meta, requestBody) +} + +func (a *Adaptor) ConvertSTTRequest(*http.Request) (io.ReadCloser, error) { + return nil, nil +} + +func (a *Adaptor) ConvertTTSRequest(*model.TextToSpeechRequest) (any, error) { + return nil, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + var responseText string + err, responseText = StreamHandler(c, resp) + usage = openai.ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens) + } else { + err, usage = Handler(c, resp) + } + return +} + +func (a *Adaptor) GetModelList() []string { + return ModelList +} + +func (a *Adaptor) GetChannelName() string { + return "tencent" +} diff --git a/service/aiproxy/relay/adaptor/tencent/constants.go b/service/aiproxy/relay/adaptor/tencent/constants.go new file mode 100644 index 00000000000..e8631e5f476 --- /dev/null +++ b/service/aiproxy/relay/adaptor/tencent/constants.go @@ -0,0 +1,9 @@ +package tencent + +var ModelList = []string{ + "hunyuan-lite", + "hunyuan-standard", + "hunyuan-standard-256K", + "hunyuan-pro", + "hunyuan-vision", +} diff --git a/service/aiproxy/relay/adaptor/tencent/main.go b/service/aiproxy/relay/adaptor/tencent/main.go new file mode 100644 index 00000000000..6d81f5a8f05 --- /dev/null +++ b/service/aiproxy/relay/adaptor/tencent/main.go @@ -0,0 +1,221 @@ +package tencent + +import ( + "bufio" + "crypto/hmac" + "crypto/sha256" + "encoding/hex" + "errors" + "fmt" + "net/http" + "strconv" + "strings" + "time" + + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/render" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/common/random" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/constant" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +func responseTencent2OpenAI(response *ChatResponse) *openai.TextResponse { + fullTextResponse := openai.TextResponse{ + Object: "chat.completion", + Created: helper.GetTimestamp(), + Usage: model.Usage{ + PromptTokens: response.Usage.PromptTokens, + CompletionTokens: response.Usage.CompletionTokens, + TotalTokens: response.Usage.TotalTokens, + }, + } + if len(response.Choices) > 0 { + choice := openai.TextResponseChoice{ + Index: 0, + Message: model.Message{ + Role: "assistant", + Content: response.Choices[0].Messages.Content, + }, + FinishReason: response.Choices[0].FinishReason, + } + fullTextResponse.Choices = append(fullTextResponse.Choices, choice) + } + return &fullTextResponse +} + +func streamResponseTencent2OpenAI(tencentResponse *ChatResponse) *openai.ChatCompletionsStreamResponse { + response := openai.ChatCompletionsStreamResponse{ + ID: "chatcmpl-" + random.GetUUID(), + Object: "chat.completion.chunk", + Created: helper.GetTimestamp(), + Model: "tencent-hunyuan", + } + if len(tencentResponse.Choices) > 0 { + var choice openai.ChatCompletionsStreamResponseChoice + choice.Delta.Content = tencentResponse.Choices[0].Delta.Content + if tencentResponse.Choices[0].FinishReason == "stop" { + choice.FinishReason = &constant.StopFinishReason + } + response.Choices = append(response.Choices, choice) + } + return &response +} + +func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) { + defer resp.Body.Close() + + var responseText string + scanner := bufio.NewScanner(resp.Body) + scanner.Split(bufio.ScanLines) + + common.SetEventStreamHeaders(c) + + for scanner.Scan() { + data := scanner.Bytes() + if len(data) < 6 || conv.BytesToString(data[:6]) != "data: " { + continue + } + data = data[6:] + + if conv.BytesToString(data) == "[DONE]" { + break + } + + var tencentResponse ChatResponse + err := json.Unmarshal(data, &tencentResponse) + if err != nil { + logger.SysErrorf("error unmarshalling stream response: %s, data: %s", err.Error(), conv.BytesToString(data)) + continue + } + + response := streamResponseTencent2OpenAI(&tencentResponse) + if len(response.Choices) != 0 { + responseText += conv.AsString(response.Choices[0].Delta.Content) + } + + err = render.ObjectData(c, response) + if err != nil { + logger.SysError(err.Error()) + } + } + + if err := scanner.Err(); err != nil { + logger.SysError("error reading stream: " + err.Error()) + } + + render.Done(c) + + return nil, responseText +} + +func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var responseP ChatResponseP + err := json.NewDecoder(resp.Body).Decode(&responseP) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + + if responseP.Response.Error.Code != 0 { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: responseP.Response.Error.Message, + Code: responseP.Response.Error.Code, + }, + StatusCode: resp.StatusCode, + }, nil + } + fullTextResponse := responseTencent2OpenAI(&responseP.Response) + fullTextResponse.Model = "hunyuan" + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, err = c.Writer.Write(jsonResponse) + if err != nil { + return openai.ErrorWrapper(err, "write_response_body_failed", http.StatusInternalServerError), nil + } + return nil, &fullTextResponse.Usage +} + +func ParseConfig(config string) (appID int64, secretID string, secretKey string, err error) { + parts := strings.Split(config, "|") + if len(parts) != 3 { + err = errors.New("invalid tencent config") + return + } + appID, err = strconv.ParseInt(parts[0], 10, 64) + secretID = parts[1] + secretKey = parts[2] + return +} + +func sha256hex(s string) string { + b := sha256.Sum256(conv.StringToBytes(s)) + return hex.EncodeToString(b[:]) +} + +func hmacSha256(s, key string) string { + hashed := hmac.New(sha256.New, conv.StringToBytes(key)) + hashed.Write(conv.StringToBytes(s)) + return conv.BytesToString(hashed.Sum(nil)) +} + +func GetSign(req *model.GeneralOpenAIRequest, adaptor *Adaptor, secID, secKey string) string { + // build canonical request string + host := "hunyuan.tencentcloudapi.com" + httpRequestMethod := "POST" + canonicalURI := "/" + canonicalQueryString := "" + canonicalHeaders := fmt.Sprintf("content-type:%s\nhost:%s\nx-tc-action:%s\n", + "application/json", host, strings.ToLower(adaptor.Action)) + signedHeaders := "content-type;host;x-tc-action" + payload, _ := json.Marshal(req) + hashedRequestPayload := sha256hex(conv.BytesToString(payload)) + canonicalRequest := fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n%s", + httpRequestMethod, + canonicalURI, + canonicalQueryString, + canonicalHeaders, + signedHeaders, + hashedRequestPayload) + // build string to sign + algorithm := "TC3-HMAC-SHA256" + requestTimestamp := strconv.FormatInt(adaptor.Timestamp, 10) + timestamp, _ := strconv.ParseInt(requestTimestamp, 10, 64) + t := time.Unix(timestamp, 0).UTC() + // must be the format 2006-01-02, ref to package time for more info + date := t.Format("2006-01-02") + credentialScope := fmt.Sprintf("%s/%s/tc3_request", date, "hunyuan") + hashedCanonicalRequest := sha256hex(canonicalRequest) + string2sign := fmt.Sprintf("%s\n%s\n%s\n%s", + algorithm, + requestTimestamp, + credentialScope, + hashedCanonicalRequest) + + // sign string + secretDate := hmacSha256(date, "TC3"+secKey) + secretService := hmacSha256("hunyuan", secretDate) + secretKey := hmacSha256("tc3_request", secretService) + signature := hex.EncodeToString(conv.StringToBytes(hmacSha256(string2sign, secretKey))) + + // build authorization + authorization := fmt.Sprintf("%s Credential=%s/%s, SignedHeaders=%s, Signature=%s", + algorithm, + secID, + credentialScope, + signedHeaders, + signature) + return authorization +} diff --git a/service/aiproxy/relay/adaptor/tencent/model.go b/service/aiproxy/relay/adaptor/tencent/model.go new file mode 100644 index 00000000000..1e3f1ae61b1 --- /dev/null +++ b/service/aiproxy/relay/adaptor/tencent/model.go @@ -0,0 +1,34 @@ +package tencent + +import "github.com/labring/sealos/service/aiproxy/relay/model" + +type Error struct { + Message string `json:"Message"` + Code int `json:"Code"` +} + +type Usage struct { + PromptTokens int `json:"PromptTokens"` + CompletionTokens int `json:"CompletionTokens"` + TotalTokens int `json:"TotalTokens"` +} + +type ResponseChoices struct { + FinishReason string `json:"FinishReason,omitempty"` // 流式结束标志位,为 stop 则表示尾包 + Messages model.Message `json:"Message,omitempty"` // 内容,同步模式返回内容,流模式为 null 输出 content 内容总数最多支持 1024token。 + Delta model.Message `json:"Delta,omitempty"` // 内容,流模式返回内容,同步模式为 null 输出 content 内容总数最多支持 1024token。 +} + +type ChatResponse struct { + ID string `json:"Id,omitempty"` + Note string `json:"Note,omitempty"` + ReqID string `json:"Req_id,omitempty"` + Choices []ResponseChoices `json:"Choices,omitempty"` + Error Error `json:"Error,omitempty"` + Usage Usage `json:"Usage,omitempty"` + Created int64 `json:"Created,omitempty"` +} + +type ChatResponseP struct { + Response ChatResponse `json:"Response,omitempty"` +} diff --git a/service/aiproxy/relay/adaptor/togetherai/constants.go b/service/aiproxy/relay/adaptor/togetherai/constants.go new file mode 100644 index 00000000000..0a79fbdcc5a --- /dev/null +++ b/service/aiproxy/relay/adaptor/togetherai/constants.go @@ -0,0 +1,10 @@ +package togetherai + +// https://docs.together.ai/docs/inference-models + +var ModelList = []string{ + "meta-llama/Llama-3-70b-chat-hf", + "deepseek-ai/deepseek-coder-33b-instruct", + "mistralai/Mixtral-8x22B-Instruct-v0.1", + "Qwen/Qwen1.5-72B-Chat", +} diff --git a/service/aiproxy/relay/adaptor/vertexai/adaptor.go b/service/aiproxy/relay/adaptor/vertexai/adaptor.go new file mode 100644 index 00000000000..829cfc6465d --- /dev/null +++ b/service/aiproxy/relay/adaptor/vertexai/adaptor.go @@ -0,0 +1,123 @@ +package vertexai + +import ( + "errors" + "fmt" + "io" + "net/http" + "strings" + + "github.com/gin-gonic/gin" + channelhelper "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/meta" + relaymodel "github.com/labring/sealos/service/aiproxy/relay/model" +) + +var _ channelhelper.Adaptor = new(Adaptor) + +const channelName = "vertexai" + +type Adaptor struct{} + +func (a *Adaptor) ConvertSTTRequest(*http.Request) (io.ReadCloser, error) { + return nil, nil +} + +func (a *Adaptor) ConvertTTSRequest(*relaymodel.TextToSpeechRequest) (any, error) { + return nil, nil +} + +func (a *Adaptor) Init(_ *meta.Meta) { +} + +func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *relaymodel.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + + adaptor := GetAdaptor(request.Model) + if adaptor == nil { + return nil, errors.New("adaptor not found") + } + + return adaptor.ConvertRequest(c, relayMode, request) +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *relaymodel.Usage, err *relaymodel.ErrorWithStatusCode) { + adaptor := GetAdaptor(meta.ActualModelName) + if adaptor == nil { + return nil, &relaymodel.ErrorWithStatusCode{ + StatusCode: http.StatusInternalServerError, + Error: relaymodel.Error{ + Message: "adaptor not found", + }, + } + } + return adaptor.DoResponse(c, resp, meta) +} + +func (a *Adaptor) GetModelList() (models []string) { + models = modelList + return +} + +func (a *Adaptor) GetChannelName() string { + return channelName +} + +func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { + var suffix string + if strings.HasPrefix(meta.ActualModelName, "gemini") { + if meta.IsStream { + suffix = "streamGenerateContent?alt=sse" + } else { + suffix = "generateContent" + } + } else { + if meta.IsStream { + suffix = "streamRawPredict?alt=sse" + } else { + suffix = "rawPredict" + } + } + + if meta.BaseURL != "" { + return fmt.Sprintf( + "%s/v1/projects/%s/locations/%s/publishers/google/models/%s:%s", + meta.BaseURL, + meta.Config.VertexAIProjectID, + meta.Config.Region, + meta.ActualModelName, + suffix, + ), nil + } + return fmt.Sprintf( + "https://%s-aiplatform.googleapis.com/v1/projects/%s/locations/%s/publishers/google/models/%s:%s", + meta.Config.Region, + meta.Config.VertexAIProjectID, + meta.Config.Region, + meta.ActualModelName, + suffix, + ), nil +} + +func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { + channelhelper.SetupCommonRequestHeader(c, req, meta) + token, err := getToken(c, meta.ChannelID, meta.Config.VertexAIADC) + if err != nil { + return err + } + req.Header.Set("Authorization", "Bearer "+token) + return nil +} + +func (a *Adaptor) ConvertImageRequest(request *relaymodel.ImageRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + return request, nil +} + +func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + return channelhelper.DoRequestHelper(a, c, meta, requestBody) +} diff --git a/service/aiproxy/relay/adaptor/vertexai/claude/adapter.go b/service/aiproxy/relay/adaptor/vertexai/claude/adapter.go new file mode 100644 index 00000000000..bb55f4dbf24 --- /dev/null +++ b/service/aiproxy/relay/adaptor/vertexai/claude/adapter.go @@ -0,0 +1,59 @@ +package vertexai + +import ( + "net/http" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/anthropic" + "github.com/pkg/errors" + + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +var ModelList = []string{ + "claude-3-haiku@20240307", + "claude-3-sonnet@20240229", + "claude-3-opus@20240229", + "claude-3-5-sonnet@20240620", + "claude-3-5-sonnet-v2@20241022", + "claude-3-5-haiku@20241022", +} + +const anthropicVersion = "vertex-2023-10-16" + +type Adaptor struct{} + +func (a *Adaptor) ConvertRequest(c *gin.Context, _ int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + + claudeReq := anthropic.ConvertRequest(request) + req := Request{ + AnthropicVersion: anthropicVersion, + // Model: claudeReq.Model, + Messages: claudeReq.Messages, + System: claudeReq.System, + MaxTokens: claudeReq.MaxTokens, + Temperature: claudeReq.Temperature, + TopP: claudeReq.TopP, + TopK: claudeReq.TopK, + Stream: claudeReq.Stream, + Tools: claudeReq.Tools, + } + + c.Set(ctxkey.RequestModel, request.Model) + c.Set(ctxkey.ConvertedRequest, req) + return req, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + err, usage = anthropic.StreamHandler(c, resp) + } else { + err, usage = anthropic.Handler(c, resp, meta.PromptTokens, meta.ActualModelName) + } + return +} diff --git a/service/aiproxy/relay/adaptor/vertexai/claude/model.go b/service/aiproxy/relay/adaptor/vertexai/claude/model.go new file mode 100644 index 00000000000..eda799ec8f0 --- /dev/null +++ b/service/aiproxy/relay/adaptor/vertexai/claude/model.go @@ -0,0 +1,17 @@ +package vertexai + +import "github.com/labring/sealos/service/aiproxy/relay/adaptor/anthropic" + +type Request struct { + ToolChoice any `json:"tool_choice,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"top_p,omitempty"` + AnthropicVersion string `json:"anthropic_version"` + System string `json:"system,omitempty"` + Messages []anthropic.Message `json:"messages"` + StopSequences []string `json:"stop_sequences,omitempty"` + Tools []anthropic.Tool `json:"tools,omitempty"` + MaxTokens int `json:"max_tokens,omitempty"` + TopK int `json:"top_k,omitempty"` + Stream bool `json:"stream,omitempty"` +} diff --git a/service/aiproxy/relay/adaptor/vertexai/gemini/adapter.go b/service/aiproxy/relay/adaptor/vertexai/gemini/adapter.go new file mode 100644 index 00000000000..861accadf5f --- /dev/null +++ b/service/aiproxy/relay/adaptor/vertexai/gemini/adapter.go @@ -0,0 +1,48 @@ +package vertexai + +import ( + "net/http" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/gemini" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" + "github.com/pkg/errors" + + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +var ModelList = []string{ + "gemini-1.5-pro-001", "gemini-1.5-flash-001", "gemini-pro", "gemini-pro-vision", +} + +type Adaptor struct{} + +func (a *Adaptor) ConvertRequest(c *gin.Context, _ int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + + geminiRequest := gemini.ConvertRequest(request) + c.Set(ctxkey.RequestModel, request.Model) + c.Set(ctxkey.ConvertedRequest, geminiRequest) + return geminiRequest, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + var responseText string + err, responseText = gemini.StreamHandler(c, resp) + usage = openai.ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens) + } else { + switch meta.Mode { + case relaymode.Embeddings: + err, usage = gemini.EmbeddingHandler(c, resp) + default: + err, usage = gemini.Handler(c, resp, meta.PromptTokens, meta.ActualModelName) + } + } + return +} diff --git a/service/aiproxy/relay/adaptor/vertexai/registry.go b/service/aiproxy/relay/adaptor/vertexai/registry.go new file mode 100644 index 00000000000..ee95a19a91d --- /dev/null +++ b/service/aiproxy/relay/adaptor/vertexai/registry.go @@ -0,0 +1,52 @@ +package vertexai + +import ( + "net/http" + + "github.com/gin-gonic/gin" + claude "github.com/labring/sealos/service/aiproxy/relay/adaptor/vertexai/claude" + gemini "github.com/labring/sealos/service/aiproxy/relay/adaptor/vertexai/gemini" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type ModelType int + +const ( + VerterAIClaude ModelType = iota + 1 + VerterAIGemini +) + +var ( + modelMapping = map[string]ModelType{} + modelList = []string{} +) + +func init() { + modelList = append(modelList, claude.ModelList...) + for _, model := range claude.ModelList { + modelMapping[model] = VerterAIClaude + } + + modelList = append(modelList, gemini.ModelList...) + for _, model := range gemini.ModelList { + modelMapping[model] = VerterAIGemini + } +} + +type innerAIAdapter interface { + ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) + DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) +} + +func GetAdaptor(model string) innerAIAdapter { + adaptorType := modelMapping[model] + switch adaptorType { + case VerterAIClaude: + return &claude.Adaptor{} + case VerterAIGemini: + return &gemini.Adaptor{} + default: + return nil + } +} diff --git a/service/aiproxy/relay/adaptor/vertexai/token.go b/service/aiproxy/relay/adaptor/vertexai/token.go new file mode 100644 index 00000000000..77b64ba9db3 --- /dev/null +++ b/service/aiproxy/relay/adaptor/vertexai/token.go @@ -0,0 +1,64 @@ +package vertexai + +import ( + "context" + "fmt" + "time" + + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/conv" + + credentials "cloud.google.com/go/iam/credentials/apiv1" + "cloud.google.com/go/iam/credentials/apiv1/credentialspb" + "github.com/patrickmn/go-cache" + "google.golang.org/api/option" +) + +type ApplicationDefaultCredentials struct { + Type string `json:"type"` + ProjectID string `json:"project_id"` + PrivateKeyID string `json:"private_key_id"` + PrivateKey string `json:"private_key"` + ClientEmail string `json:"client_email"` + ClientID string `json:"client_id"` + AuthURI string `json:"auth_uri"` + TokenURI string `json:"token_uri"` + AuthProviderX509CertURL string `json:"auth_provider_x509_cert_url"` + ClientX509CertURL string `json:"client_x509_cert_url"` + UniverseDomain string `json:"universe_domain"` +} + +var Cache = cache.New(50*time.Minute, 55*time.Minute) + +const defaultScope = "https://www.googleapis.com/auth/cloud-platform" + +func getToken(ctx context.Context, channelID int, adcJSON string) (string, error) { + cacheKey := fmt.Sprintf("vertexai-token-%d", channelID) + if token, found := Cache.Get(cacheKey); found { + return token.(string), nil + } + adc := &ApplicationDefaultCredentials{} + if err := json.Unmarshal(conv.StringToBytes(adcJSON), adc); err != nil { + return "", fmt.Errorf("failed to decode credentials file: %w", err) + } + + c, err := credentials.NewIamCredentialsClient(ctx, option.WithCredentialsJSON(conv.StringToBytes(adcJSON))) + if err != nil { + return "", fmt.Errorf("failed to create client: %w", err) + } + defer c.Close() + + req := &credentialspb.GenerateAccessTokenRequest{ + // See https://pkg.go.dev/cloud.google.com/go/iam/credentials/apiv1/credentialspb#GenerateAccessTokenRequest. + Name: "projects/-/serviceAccounts/" + adc.ClientEmail, + Scope: []string{defaultScope}, + } + resp, err := c.GenerateAccessToken(ctx, req) + if err != nil { + return "", fmt.Errorf("failed to generate access token: %w", err) + } + _ = resp + + Cache.Set(cacheKey, resp.AccessToken, cache.DefaultExpiration) + return resp.AccessToken, nil +} diff --git a/service/aiproxy/relay/adaptor/xunfei/adaptor.go b/service/aiproxy/relay/adaptor/xunfei/adaptor.go new file mode 100644 index 00000000000..0e6b917e8b6 --- /dev/null +++ b/service/aiproxy/relay/adaptor/xunfei/adaptor.go @@ -0,0 +1,76 @@ +package xunfei + +import ( + "io" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type Adaptor struct { + meta *meta.Meta +} + +func (a *Adaptor) Init(meta *meta.Meta) { + a.meta = meta +} + +func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { + return meta.BaseURL + "/v1/chat/completions", nil +} + +func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { + adaptor.SetupCommonRequestHeader(c, req, meta) + req.Header.Set("Authorization", "Bearer "+meta.APIKey) + return nil +} + +func (a *Adaptor) ConvertRequest(_ *gin.Context, _ int, request *model.GeneralOpenAIRequest) (any, error) { + domain, err := getXunfeiDomain(request.Model) + if err != nil { + return nil, err + } + request.Model = domain + return request, nil +} + +func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) { + domain, err := getXunfeiDomain(request.Model) + if err != nil { + return nil, err + } + request.Model = domain + return request, nil +} + +func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + return adaptor.DoRequestHelper(a, c, meta, requestBody) +} + +func (a *Adaptor) ConvertSTTRequest(*http.Request) (io.ReadCloser, error) { + return nil, nil +} + +func (a *Adaptor) ConvertTTSRequest(*model.TextToSpeechRequest) (any, error) { + return nil, nil +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + err, usage = StreamHandler(c, resp, meta.PromptTokens, meta.ActualModelName) + } else { + err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName) + } + return +} + +func (a *Adaptor) GetModelList() []string { + return ModelList +} + +func (a *Adaptor) GetChannelName() string { + return "xunfei" +} diff --git a/service/aiproxy/relay/adaptor/xunfei/constants.go b/service/aiproxy/relay/adaptor/xunfei/constants.go new file mode 100644 index 00000000000..f39f5515260 --- /dev/null +++ b/service/aiproxy/relay/adaptor/xunfei/constants.go @@ -0,0 +1,10 @@ +package xunfei + +var ModelList = []string{ + "SparkDesk-Lite", + "SparkDesk-Pro", + "SparkDesk-Pro-128K", + "SparkDesk-Max", + "SparkDesk-Max-32k", + "SparkDesk-4.0-Ultra", +} diff --git a/service/aiproxy/relay/adaptor/xunfei/main.go b/service/aiproxy/relay/adaptor/xunfei/main.go new file mode 100644 index 00000000000..e14f4342197 --- /dev/null +++ b/service/aiproxy/relay/adaptor/xunfei/main.go @@ -0,0 +1,131 @@ +package xunfei + +import ( + "bufio" + "errors" + "net/http" + "strings" + + json "github.com/json-iterator/go" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/common/render" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +// https://console.xfyun.cn/services/cbm +// https://www.xfyun.cn/doc/spark/HTTP%E8%B0%83%E7%94%A8%E6%96%87%E6%A1%A3.html + +func StreamHandler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + scanner := bufio.NewScanner(resp.Body) + scanner.Split(bufio.ScanLines) + + common.SetEventStreamHeaders(c) + id := helper.GetResponseID(c) + responseModel := c.GetString(ctxkey.OriginalModel) + var responseText string + + var usage *model.Usage + + for scanner.Scan() { + data := scanner.Bytes() + if len(data) < 6 || conv.BytesToString(data[:6]) != "data: " { + continue + } + data = data[6:] + + if conv.BytesToString(data) == "[DONE]" { + break + } + + var response openai.ChatCompletionsStreamResponse + err := json.Unmarshal(data, &response) + if err != nil { + logger.SysErrorf("error unmarshalling stream response: %s, data: %s", err.Error(), conv.BytesToString(data)) + continue + } + + if response.Usage != nil { + usage = response.Usage + } + + for _, v := range response.Choices { + v.Delta.Role = "assistant" + responseText += v.Delta.StringContent() + } + response.ID = id + response.Model = modelName + err = render.ObjectData(c, response) + if err != nil { + logger.SysError(err.Error()) + } + } + + if err := scanner.Err(); err != nil { + logger.SysError("error reading stream: " + err.Error()) + } + + render.Done(c) + + if usage == nil { + usage = openai.ResponseText2Usage(responseText, responseModel, promptTokens) + } + return nil, usage +} + +func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var response openai.TextResponse + err := json.NewDecoder(resp.Body).Decode(&response) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + + response.Model = modelName + var responseText string + for _, v := range response.Choices { + responseText += v.Message.Content.(string) + } + usage := openai.ResponseText2Usage(responseText, modelName, promptTokens) + response.Usage = *usage + response.ID = helper.GetResponseID(c) + jsonResponse, err := json.Marshal(response) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil, usage +} + +func getXunfeiDomain(modelName string) (string, error) { + _, s, ok := strings.Cut(modelName, "-") + if !ok { + return "", errors.New("invalid model name") + } + switch strings.ToLower(s) { + case "lite": + return "lite", nil + case "pro": + return "generalv3", nil + case "pro-128k": + return "pro-128k", nil + case "max": + return "generalv3.5", nil + case "max-32k": + return "max-32k", nil + case "4.0-ultra": + return "4.0Ultra", nil + } + return "", errors.New("invalid model name") +} diff --git a/service/aiproxy/relay/adaptor/zhipu/adaptor.go b/service/aiproxy/relay/adaptor/zhipu/adaptor.go new file mode 100644 index 00000000000..968d40ff3b7 --- /dev/null +++ b/service/aiproxy/relay/adaptor/zhipu/adaptor.go @@ -0,0 +1,157 @@ +package zhipu + +import ( + "errors" + "fmt" + "io" + "math" + "net/http" + "strings" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" +) + +type Adaptor struct { + APIVersion string +} + +func (a *Adaptor) Init(_ *meta.Meta) { +} + +func (a *Adaptor) SetVersionByModeName(modelName string) { + if strings.HasPrefix(modelName, "glm-") { + a.APIVersion = "v4" + } else { + a.APIVersion = "v3" + } +} + +func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { + switch meta.Mode { + case relaymode.ImagesGenerations: + return meta.BaseURL + "/api/paas/v4/images/generations", nil + case relaymode.Embeddings: + return meta.BaseURL + "/api/paas/v4/embeddings", nil + } + a.SetVersionByModeName(meta.ActualModelName) + if a.APIVersion == "v4" { + return meta.BaseURL + "/api/paas/v4/chat/completions", nil + } + method := "invoke" + if meta.IsStream { + method = "sse-invoke" + } + return fmt.Sprintf("%s/api/paas/v3/model-api/%s/%s", meta.BaseURL, meta.ActualModelName, method), nil +} + +func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { + adaptor.SetupCommonRequestHeader(c, req, meta) + token := GetToken(meta.APIKey) + req.Header.Set("Authorization", token) + return nil +} + +func (a *Adaptor) ConvertRequest(_ *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + switch relayMode { + case relaymode.Embeddings: + baiduEmbeddingRequest, err := ConvertEmbeddingRequest(*request) + return baiduEmbeddingRequest, err + default: + // TopP (0.0, 1.0) + if request.TopP != nil { + *request.TopP = math.Min(0.99, *request.TopP) + *request.TopP = math.Max(0.01, *request.TopP) + } + + // Temperature (0.0, 1.0) + if request.Temperature != nil { + *request.Temperature = math.Min(0.99, *request.Temperature) + *request.Temperature = math.Max(0.01, *request.Temperature) + } + a.SetVersionByModeName(request.Model) + if a.APIVersion == "v4" { + return request, nil + } + return ConvertRequest(request), nil + } +} + +func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) { + if request == nil { + return nil, errors.New("request is nil") + } + newRequest := ImageRequest{ + Model: request.Model, + Prompt: request.Prompt, + UserID: request.User, + } + return newRequest, nil +} + +func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) { + return adaptor.DoRequestHelper(a, c, meta, requestBody) +} + +func (a *Adaptor) ConvertSTTRequest(*http.Request) (io.ReadCloser, error) { + return nil, nil +} + +func (a *Adaptor) ConvertTTSRequest(*model.TextToSpeechRequest) (any, error) { + return nil, nil +} + +func (a *Adaptor) DoResponseV4(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + if meta.IsStream { + err, _, usage = openai.StreamHandler(c, resp, meta.Mode) + } else { + err, usage = openai.Handler(c, resp, meta.PromptTokens, meta.ActualModelName) + } + return +} + +func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { + switch meta.Mode { + case relaymode.Embeddings: + err, usage = EmbeddingsHandler(c, resp) + return + case relaymode.ImagesGenerations: + err, usage = openai.ImageHandler(c, resp) + return + } + if a.APIVersion == "v4" { + return a.DoResponseV4(c, resp, meta) + } + if meta.IsStream { + err, usage = StreamHandler(c, resp) + } else { + if meta.Mode == relaymode.Embeddings { + err, usage = EmbeddingsHandler(c, resp) + } else { + err, usage = Handler(c, resp) + } + } + return +} + +func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) (*EmbeddingRequest, error) { + return &EmbeddingRequest{ + Model: request.Model, + Input: request.Input, + }, nil +} + +func (a *Adaptor) GetModelList() []string { + return ModelList +} + +func (a *Adaptor) GetChannelName() string { + return "zhipu" +} diff --git a/service/aiproxy/relay/adaptor/zhipu/constants.go b/service/aiproxy/relay/adaptor/zhipu/constants.go new file mode 100644 index 00000000000..e11921230cd --- /dev/null +++ b/service/aiproxy/relay/adaptor/zhipu/constants.go @@ -0,0 +1,7 @@ +package zhipu + +var ModelList = []string{ + "chatglm_turbo", "chatglm_pro", "chatglm_std", "chatglm_lite", + "glm-4", "glm-4v", "glm-3-turbo", "embedding-2", + "cogview-3", +} diff --git a/service/aiproxy/relay/adaptor/zhipu/main.go b/service/aiproxy/relay/adaptor/zhipu/main.go new file mode 100644 index 00000000000..5924e3a9acd --- /dev/null +++ b/service/aiproxy/relay/adaptor/zhipu/main.go @@ -0,0 +1,276 @@ +package zhipu + +import ( + "bufio" + "net/http" + "slices" + "strings" + "sync" + "time" + + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/render" + + "github.com/gin-gonic/gin" + "github.com/golang-jwt/jwt" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/helper" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/constant" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +// https://open.bigmodel.cn/doc/api#chatglm_std +// chatglm_std, chatglm_lite +// https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/invoke +// https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/sse-invoke + +var ( + zhipuTokens sync.Map + expSeconds int64 = 24 * 3600 +) + +func GetToken(apikey string) string { + data, ok := zhipuTokens.Load(apikey) + if ok { + td := data.(tokenData) + if time.Now().Before(td.ExpiryTime) { + return td.Token + } + } + + split := strings.Split(apikey, ".") + if len(split) != 2 { + logger.SysError("invalid zhipu key: " + apikey) + return "" + } + + id := split[0] + secret := split[1] + + expMillis := time.Now().Add(time.Duration(expSeconds)*time.Second).UnixNano() / 1e6 + expiryTime := time.Now().Add(time.Duration(expSeconds) * time.Second) + + timestamp := time.Now().UnixNano() / 1e6 + + payload := jwt.MapClaims{ + "api_key": id, + "exp": expMillis, + "timestamp": timestamp, + } + + token := jwt.NewWithClaims(jwt.SigningMethodHS256, payload) + + token.Header["alg"] = "HS256" + token.Header["sign_type"] = "SIGN" + + tokenString, err := token.SignedString(conv.StringToBytes(secret)) + if err != nil { + return "" + } + + zhipuTokens.Store(apikey, tokenData{ + Token: tokenString, + ExpiryTime: expiryTime, + }) + + return tokenString +} + +func ConvertRequest(request *model.GeneralOpenAIRequest) *Request { + return &Request{ + Prompt: request.Messages, + Temperature: request.Temperature, + TopP: request.TopP, + Incremental: false, + } +} + +func responseZhipu2OpenAI(response *Response) *openai.TextResponse { + fullTextResponse := openai.TextResponse{ + ID: response.Data.TaskID, + Object: "chat.completion", + Created: helper.GetTimestamp(), + Choices: make([]openai.TextResponseChoice, 0, len(response.Data.Choices)), + Usage: response.Data.Usage, + } + for i, choice := range response.Data.Choices { + openaiChoice := openai.TextResponseChoice{ + Index: i, + Message: model.Message{ + Role: choice.Role, + Content: strings.Trim(choice.Content.(string), "\""), + }, + FinishReason: "", + } + if i == len(response.Data.Choices)-1 { + openaiChoice.FinishReason = "stop" + } + fullTextResponse.Choices = append(fullTextResponse.Choices, openaiChoice) + } + return &fullTextResponse +} + +func streamResponseZhipu2OpenAI(zhipuResponse string) *openai.ChatCompletionsStreamResponse { + var choice openai.ChatCompletionsStreamResponseChoice + choice.Delta.Content = zhipuResponse + response := openai.ChatCompletionsStreamResponse{ + Object: "chat.completion.chunk", + Created: helper.GetTimestamp(), + Model: "chatglm", + Choices: []openai.ChatCompletionsStreamResponseChoice{choice}, + } + return &response +} + +func streamMetaResponseZhipu2OpenAI(zhipuResponse *StreamMetaResponse) (*openai.ChatCompletionsStreamResponse, *model.Usage) { + var choice openai.ChatCompletionsStreamResponseChoice + choice.Delta.Content = "" + choice.FinishReason = &constant.StopFinishReason + response := openai.ChatCompletionsStreamResponse{ + ID: zhipuResponse.RequestID, + Object: "chat.completion.chunk", + Created: helper.GetTimestamp(), + Model: "chatglm", + Choices: []openai.ChatCompletionsStreamResponseChoice{choice}, + } + return &response, &zhipuResponse.Usage +} + +func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var usage *model.Usage + scanner := bufio.NewScanner(resp.Body) + scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { + if atEOF && len(data) == 0 { + return 0, nil, nil + } + if i := strings.Index(conv.BytesToString(data), "\n\n"); i >= 0 && slices.Contains(data, ':') { + return i + 2, data[0:i], nil + } + if atEOF { + return len(data), data, nil + } + return 0, nil, nil + }) + + common.SetEventStreamHeaders(c) + + for scanner.Scan() { + data := scanner.Text() + lines := strings.Split(data, "\n") + for i, line := range lines { + if len(line) < 6 { + continue + } + if strings.HasPrefix(line, "data: ") { + dataSegment := line[6:] + if i != len(lines)-1 { + dataSegment += "\n" + } + response := streamResponseZhipu2OpenAI(dataSegment) + err := render.ObjectData(c, response) + if err != nil { + logger.SysError("error marshalling stream response: " + err.Error()) + } + } else if strings.HasPrefix(line, "meta: ") { + metaSegment := line[6:] + var zhipuResponse StreamMetaResponse + err := json.Unmarshal(conv.StringToBytes(metaSegment), &zhipuResponse) + if err != nil { + logger.SysError("error unmarshalling stream response: " + err.Error()) + continue + } + response, zhipuUsage := streamMetaResponseZhipu2OpenAI(&zhipuResponse) + err = render.ObjectData(c, response) + if err != nil { + logger.SysError("error marshalling stream response: " + err.Error()) + } + usage = zhipuUsage + } + } + } + + if err := scanner.Err(); err != nil { + logger.SysError("error reading stream: " + err.Error()) + } + + render.Done(c) + + return nil, usage +} + +func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var zhipuResponse Response + err := json.NewDecoder(resp.Body).Decode(&zhipuResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + if !zhipuResponse.Success { + return &model.ErrorWithStatusCode{ + Error: model.Error{ + Message: zhipuResponse.Msg, + Type: "zhipu_error", + Param: "", + Code: zhipuResponse.Code, + }, + StatusCode: resp.StatusCode, + }, nil + } + fullTextResponse := responseZhipu2OpenAI(&zhipuResponse) + fullTextResponse.Model = "chatglm" + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil, &fullTextResponse.Usage +} + +func EmbeddingsHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { + defer resp.Body.Close() + + var zhipuResponse EmbeddingResponse + err := json.NewDecoder(resp.Body).Decode(&zhipuResponse) + if err != nil { + return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil + } + fullTextResponse := embeddingResponseZhipu2OpenAI(&zhipuResponse) + jsonResponse, err := json.Marshal(fullTextResponse) + if err != nil { + return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil + } + c.Writer.Header().Set("Content-Type", "application/json") + c.Writer.WriteHeader(resp.StatusCode) + _, _ = c.Writer.Write(jsonResponse) + return nil, &fullTextResponse.Usage +} + +func embeddingResponseZhipu2OpenAI(response *EmbeddingResponse) *openai.EmbeddingResponse { + openAIEmbeddingResponse := openai.EmbeddingResponse{ + Object: "list", + Data: make([]openai.EmbeddingResponseItem, 0, len(response.Embeddings)), + Model: response.Model, + Usage: model.Usage{ + PromptTokens: response.PromptTokens, + CompletionTokens: response.CompletionTokens, + TotalTokens: response.Usage.TotalTokens, + }, + } + + for _, item := range response.Embeddings { + openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, openai.EmbeddingResponseItem{ + Object: `embedding`, + Index: item.Index, + Embedding: item.Embedding, + }) + } + return &openAIEmbeddingResponse +} diff --git a/service/aiproxy/relay/adaptor/zhipu/model.go b/service/aiproxy/relay/adaptor/zhipu/model.go new file mode 100644 index 00000000000..e773812cc5b --- /dev/null +++ b/service/aiproxy/relay/adaptor/zhipu/model.go @@ -0,0 +1,66 @@ +package zhipu + +import ( + "time" + + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type Request struct { + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"top_p,omitempty"` + RequestID string `json:"request_id,omitempty"` + Prompt []model.Message `json:"prompt"` + Incremental bool `json:"incremental,omitempty"` +} + +type ResponseData struct { + TaskID string `json:"task_id"` + RequestID string `json:"request_id"` + TaskStatus string `json:"task_status"` + Choices []model.Message `json:"choices"` + model.Usage `json:"usage"` +} + +type Response struct { + Msg string `json:"msg"` + Data ResponseData `json:"data"` + Code int `json:"code"` + Success bool `json:"success"` +} + +type StreamMetaResponse struct { + RequestID string `json:"request_id"` + TaskID string `json:"task_id"` + TaskStatus string `json:"task_status"` + model.Usage `json:"usage"` +} + +type tokenData struct { + ExpiryTime time.Time + Token string +} + +type EmbeddingRequest struct { + Input any `json:"input"` + Model string `json:"model"` +} + +type EmbeddingResponse struct { + Model string `json:"model"` + Object string `json:"object"` + Embeddings []EmbeddingData `json:"data"` + model.Usage `json:"usage"` +} + +type EmbeddingData struct { + Object string `json:"object"` + Embedding []float64 `json:"embedding"` + Index int `json:"index"` +} + +type ImageRequest struct { + Model string `json:"model"` + Prompt string `json:"prompt"` + UserID string `json:"user_id,omitempty"` +} diff --git a/service/aiproxy/relay/adaptor_test.go b/service/aiproxy/relay/adaptor_test.go new file mode 100644 index 00000000000..14c7eb92cdc --- /dev/null +++ b/service/aiproxy/relay/adaptor_test.go @@ -0,0 +1,17 @@ +package relay + +import ( + "testing" + + "github.com/labring/sealos/service/aiproxy/relay/apitype" + "github.com/smartystreets/goconvey/convey" +) + +func TestGetAdaptor(t *testing.T) { + convey.Convey("get adaptor", t, func() { + for i := 0; i < apitype.Dummy; i++ { + a := GetAdaptor(i) + convey.So(a, convey.ShouldNotBeNil) + } + }) +} diff --git a/service/aiproxy/relay/apitype/define.go b/service/aiproxy/relay/apitype/define.go new file mode 100644 index 00000000000..212a1b6b1c3 --- /dev/null +++ b/service/aiproxy/relay/apitype/define.go @@ -0,0 +1,23 @@ +package apitype + +const ( + OpenAI = iota + Anthropic + PaLM + Baidu + Zhipu + Ali + Xunfei + AIProxyLibrary + Tencent + Gemini + Ollama + AwsClaude + Coze + Cohere + Cloudflare + DeepL + VertexAI + + Dummy // this one is only for count, do not add any channel after this +) diff --git a/service/aiproxy/relay/channeltype/define.go b/service/aiproxy/relay/channeltype/define.go new file mode 100644 index 00000000000..cf82655c7dc --- /dev/null +++ b/service/aiproxy/relay/channeltype/define.go @@ -0,0 +1,50 @@ +package channeltype + +const ( + Unknown = iota + OpenAI + API2D + Azure + CloseAI + OpenAISB + OpenAIMax + OhMyGPT + Custom + Ails + AIProxy + PaLM + API2GPT + AIGC2D + Anthropic + Baidu + Zhipu + Ali + Xunfei + AI360 + OpenRouter + AIProxyLibrary + FastGPT + Tencent + Gemini + Moonshot + Baichuan + Minimax + Mistral + Groq + Ollama + LingYiWanWu + StepFun + AwsClaude + Coze + Cohere + DeepSeek + Cloudflare + DeepL + TogetherAI + Doubao + Novita + VertextAI + SiliconFlow + + Dummy +) diff --git a/service/aiproxy/relay/channeltype/helper.go b/service/aiproxy/relay/channeltype/helper.go new file mode 100644 index 00000000000..87ad194a4c9 --- /dev/null +++ b/service/aiproxy/relay/channeltype/helper.go @@ -0,0 +1,42 @@ +package channeltype + +import "github.com/labring/sealos/service/aiproxy/relay/apitype" + +func ToAPIType(channelType int) int { + switch channelType { + case Anthropic: + return apitype.Anthropic + case Baidu: + return apitype.Baidu + case PaLM: + return apitype.PaLM + case Zhipu: + return apitype.Zhipu + case Ali: + return apitype.Ali + case Xunfei: + return apitype.Xunfei + case AIProxyLibrary: + return apitype.AIProxyLibrary + case Tencent: + return apitype.Tencent + case Gemini: + return apitype.Gemini + case Ollama: + return apitype.Ollama + case AwsClaude: + return apitype.AwsClaude + case Coze: + return apitype.Coze + case Cohere: + return apitype.Cohere + case Cloudflare: + return apitype.Cloudflare + case DeepL: + return apitype.DeepL + case VertextAI: + return apitype.VertexAI + default: + return apitype.OpenAI + } +} diff --git a/service/aiproxy/relay/channeltype/url.go b/service/aiproxy/relay/channeltype/url.go new file mode 100644 index 00000000000..5a485df485b --- /dev/null +++ b/service/aiproxy/relay/channeltype/url.go @@ -0,0 +1,53 @@ +package channeltype + +var ChannelBaseURLs = map[int]string{ + OpenAI: "https://api.openai.com", + API2D: "https://oa.api2d.net", + Azure: "", + CloseAI: "https://api.closeai-proxy.xyz", + OpenAISB: "https://api.openai-sb.com", + OpenAIMax: "https://api.openaimax.com", + OhMyGPT: "https://api.ohmygpt.com", + Custom: "", + Ails: "https://api.caipacity.com", + AIProxy: "https://api.aiproxy.io", + PaLM: "https://generativelanguage.googleapis.com", + API2GPT: "https://api.api2gpt.com", + AIGC2D: "https://api.aigc2d.com", + Anthropic: "https://api.anthropic.com", + Baidu: "https://aip.baidubce.com", + Zhipu: "https://open.bigmodel.cn", + Ali: "https://dashscope.aliyuncs.com", + Xunfei: "https://spark-api-open.xf-yun.com", + AI360: "https://ai.360.cn", + OpenRouter: "https://openrouter.ai/api", + AIProxyLibrary: "https://api.aiproxy.io", + FastGPT: "https://fastgpt.run/api/openapi", + Tencent: "https://hunyuan.tencentcloudapi.com", + Gemini: "https://generativelanguage.googleapis.com", + Moonshot: "https://api.moonshot.cn", + Baichuan: "https://api.baichuan-ai.com", + Minimax: "https://api.minimax.chat", + Mistral: "https://api.mistral.ai", + Groq: "https://api.groq.com/openai", + Ollama: "http://localhost:11434", + LingYiWanWu: "https://api.lingyiwanwu.com", + StepFun: "https://api.stepfun.com", + AwsClaude: "", + Coze: "https://api.coze.com", + Cohere: "https://api.cohere.ai", + DeepSeek: "https://api.deepseek.com", + Cloudflare: "https://api.cloudflare.com", + DeepL: "https://api-free.deepl.com", + TogetherAI: "https://api.together.xyz", + Doubao: "https://ark.cn-beijing.volces.com", + Novita: "https://api.novita.ai/v3/openai", + VertextAI: "", + SiliconFlow: "https://api.siliconflow.cn", +} + +func init() { + if len(ChannelBaseURLs) != Dummy-1 { + panic("channel base urls length not match") + } +} diff --git a/service/aiproxy/relay/channeltype/url_test.go b/service/aiproxy/relay/channeltype/url_test.go new file mode 100644 index 00000000000..9406d8d912f --- /dev/null +++ b/service/aiproxy/relay/channeltype/url_test.go @@ -0,0 +1,13 @@ +package channeltype + +import ( + "testing" + + "github.com/smartystreets/goconvey/convey" +) + +func TestChannelBaseURLs(t *testing.T) { + convey.Convey("channel base urls", t, func() { + convey.So(len(ChannelBaseURLs), convey.ShouldEqual, Dummy) + }) +} diff --git a/service/aiproxy/relay/constant/common.go b/service/aiproxy/relay/constant/common.go new file mode 100644 index 00000000000..03544fd3c93 --- /dev/null +++ b/service/aiproxy/relay/constant/common.go @@ -0,0 +1,7 @@ +package constant + +var ( + StopFinishReason = "stop" + StreamObject = "chat.completion.chunk" + NonStreamObject = "chat.completion" +) diff --git a/service/aiproxy/relay/constant/finishreason/define.go b/service/aiproxy/relay/constant/finishreason/define.go new file mode 100644 index 00000000000..1ed9c425533 --- /dev/null +++ b/service/aiproxy/relay/constant/finishreason/define.go @@ -0,0 +1,5 @@ +package finishreason + +const ( + Stop = "stop" +) diff --git a/service/aiproxy/relay/constant/role/define.go b/service/aiproxy/relay/constant/role/define.go new file mode 100644 index 00000000000..972488c5c9d --- /dev/null +++ b/service/aiproxy/relay/constant/role/define.go @@ -0,0 +1,5 @@ +package role + +const ( + Assistant = "assistant" +) diff --git a/service/aiproxy/relay/controller/audio.go b/service/aiproxy/relay/controller/audio.go new file mode 100644 index 00000000000..fdae8522bfb --- /dev/null +++ b/service/aiproxy/relay/controller/audio.go @@ -0,0 +1,114 @@ +package controller + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "net/http" + + json "github.com/json-iterator/go" + "github.com/shopspring/decimal" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/balance" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/relay" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/meta" + relaymodel "github.com/labring/sealos/service/aiproxy/relay/model" + billingprice "github.com/labring/sealos/service/aiproxy/relay/price" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" +) + +func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatusCode { + meta := meta.GetByContext(c) + + channelType := c.GetInt(ctxkey.Channel) + group := c.GetString(ctxkey.Group) + + adaptor := relay.GetAdaptor(meta.APIType) + if adaptor == nil { + return openai.ErrorWrapper(fmt.Errorf("invalid api type: %d", meta.APIType), "invalid_api_type", http.StatusBadRequest) + } + adaptor.Init(meta) + + meta.ActualModelName, _ = getMappedModelName(meta.OriginModelName, c.GetStringMapString(ctxkey.ModelMapping)) + + price, ok := billingprice.GetModelPrice(meta.OriginModelName, meta.ActualModelName, channelType) + if !ok { + return openai.ErrorWrapper(fmt.Errorf("model price not found: %s", meta.OriginModelName), "model_price_not_found", http.StatusInternalServerError) + } + completionPrice, ok := billingprice.GetModelPrice(meta.OriginModelName, meta.ActualModelName, channelType) + if !ok { + return openai.ErrorWrapper(fmt.Errorf("model price not found: %s", meta.OriginModelName), "model_price_not_found", http.StatusInternalServerError) + } + + var body io.ReadCloser + switch relayMode { + case relaymode.AudioSpeech: + var ttsRequest relaymodel.TextToSpeechRequest + err := common.UnmarshalBodyReusable(c, &ttsRequest) + if err != nil { + return openai.ErrorWrapper(err, "invalid_json", http.StatusBadRequest) + } + ttsRequest.Model = meta.ActualModelName + data, err := adaptor.ConvertTTSRequest(&ttsRequest) + if err != nil { + return openai.ErrorWrapper(err, "convert_tts_request_failed", http.StatusBadRequest) + } + jsonBody, err := json.Marshal(data) + if err != nil { + return openai.ErrorWrapper(err, "marshal_request_body_failed", http.StatusInternalServerError) + } + body = io.NopCloser(bytes.NewReader(jsonBody)) + meta.PromptTokens = openai.CountTokenText(ttsRequest.Input, meta.ActualModelName) + case relaymode.AudioTranscription: + var err error + body, err = adaptor.ConvertSTTRequest(c.Request) + if err != nil { + return openai.ErrorWrapper(err, "convert_stt_request_failed", http.StatusBadRequest) + } + default: + return openai.ErrorWrapper(fmt.Errorf("invalid relay mode: %d", relayMode), "invalid_relay_mode", http.StatusBadRequest) + } + + groupRemainBalance, postGroupConsumer, err := balance.Default.GetGroupRemainBalance(c.Request.Context(), group) + if err != nil { + return openai.ErrorWrapper(err, "get_group_balance_failed", http.StatusInternalServerError) + } + + preConsumedAmount := decimal.NewFromInt(int64(meta.PromptTokens)). + Mul(decimal.NewFromFloat(price)). + Div(decimal.NewFromInt(billingprice.PriceUnit)). + InexactFloat64() + // Check if group balance is enough + if groupRemainBalance < preConsumedAmount { + return openai.ErrorWrapper(errors.New("group balance is not enough"), "insufficient_group_balance", http.StatusForbidden) + } + + resp, err := adaptor.DoRequest(c, meta, body) + if err != nil { + return openai.ErrorWrapper(err, "do_request_failed", http.StatusInternalServerError) + } + + if resp.StatusCode != http.StatusOK { + err := RelayErrorHandler(resp) + go postConsumeAmount(context.Background(), postGroupConsumer, resp.StatusCode, c.Request.URL.Path, &relaymodel.Usage{ + PromptTokens: 0, + CompletionTokens: 0, + }, meta, price, completionPrice, err.Message) + return err + } + + usage, respErr := adaptor.DoResponse(c, resp, meta) + if respErr != nil { + return respErr + } + + go postConsumeAmount(context.Background(), postGroupConsumer, resp.StatusCode, c.Request.URL.Path, usage, meta, price, completionPrice, "") + + return nil +} diff --git a/service/aiproxy/relay/controller/error.go b/service/aiproxy/relay/controller/error.go new file mode 100644 index 00000000000..1b268c424f1 --- /dev/null +++ b/service/aiproxy/relay/controller/error.go @@ -0,0 +1,95 @@ +package controller + +import ( + "fmt" + "net/http" + "strconv" + + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/config" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/relay/model" +) + +type GeneralErrorResponse struct { + Error model.Error `json:"error"` + Message string `json:"message"` + Msg string `json:"msg"` + Err string `json:"err"` + ErrorMsg string `json:"error_msg"` + Header struct { + Message string `json:"message"` + } `json:"header"` + Response struct { + Error struct { + Message string `json:"message"` + } `json:"error"` + } `json:"response"` +} + +func (e GeneralErrorResponse) ToMessage() string { + if e.Error.Message != "" { + return e.Error.Message + } + if e.Message != "" { + return e.Message + } + if e.Msg != "" { + return e.Msg + } + if e.Err != "" { + return e.Err + } + if e.ErrorMsg != "" { + return e.ErrorMsg + } + if e.Header.Message != "" { + return e.Header.Message + } + if e.Response.Error.Message != "" { + return e.Response.Error.Message + } + return "" +} + +func RelayErrorHandler(resp *http.Response) *model.ErrorWithStatusCode { + if resp == nil { + return &model.ErrorWithStatusCode{ + StatusCode: 500, + Error: model.Error{ + Message: "resp is nil", + Type: "upstream_error", + Code: "bad_response", + }, + } + } + defer resp.Body.Close() + + ErrorWithStatusCode := &model.ErrorWithStatusCode{ + StatusCode: resp.StatusCode, + Error: model.Error{ + Message: "", + Type: "upstream_error", + Code: "bad_response_status_code", + Param: strconv.Itoa(resp.StatusCode), + }, + } + var errResponse GeneralErrorResponse + err := json.NewDecoder(resp.Body).Decode(&errResponse) + if err != nil { + return ErrorWithStatusCode + } + if config.DebugEnabled { + logger.SysLogf("error happened, status code: %d, response: \n%+v", resp.StatusCode, errResponse) + } + if errResponse.Error.Message != "" { + // OpenAI format error, so we override the default one + ErrorWithStatusCode.Error = errResponse.Error + } else { + ErrorWithStatusCode.Error.Message = errResponse.ToMessage() + } + if ErrorWithStatusCode.Error.Message == "" { + ErrorWithStatusCode.Error.Message = fmt.Sprintf("bad response status code %d", resp.StatusCode) + } + return ErrorWithStatusCode +} diff --git a/service/aiproxy/relay/controller/helper.go b/service/aiproxy/relay/controller/helper.go new file mode 100644 index 00000000000..9986f40ef93 --- /dev/null +++ b/service/aiproxy/relay/controller/helper.go @@ -0,0 +1,141 @@ +package controller + +import ( + "context" + "net/http" + "strings" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/balance" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/model" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/channeltype" + "github.com/labring/sealos/service/aiproxy/relay/controller/validator" + "github.com/labring/sealos/service/aiproxy/relay/meta" + relaymodel "github.com/labring/sealos/service/aiproxy/relay/model" + billingprice "github.com/labring/sealos/service/aiproxy/relay/price" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" + "github.com/shopspring/decimal" +) + +func getAndValidateTextRequest(c *gin.Context, relayMode int) (*relaymodel.GeneralOpenAIRequest, error) { + textRequest := &relaymodel.GeneralOpenAIRequest{} + err := common.UnmarshalBodyReusable(c, textRequest) + if err != nil { + return nil, err + } + if relayMode == relaymode.Moderations && textRequest.Model == "" { + textRequest.Model = "text-moderation-latest" + } + if relayMode == relaymode.Embeddings && textRequest.Model == "" { + textRequest.Model = c.Param("model") + } + err = validator.ValidateTextRequest(textRequest, relayMode) + if err != nil { + return nil, err + } + return textRequest, nil +} + +func getPromptTokens(textRequest *relaymodel.GeneralOpenAIRequest, relayMode int) int { + switch relayMode { + case relaymode.ChatCompletions: + return openai.CountTokenMessages(textRequest.Messages, textRequest.Model) + case relaymode.Completions: + return openai.CountTokenInput(textRequest.Prompt, textRequest.Model) + case relaymode.Moderations: + return openai.CountTokenInput(textRequest.Input, textRequest.Model) + } + return 0 +} + +func getPreConsumedAmount(textRequest *relaymodel.GeneralOpenAIRequest, promptTokens int, price float64) float64 { + preConsumedTokens := int64(promptTokens) + if textRequest.MaxTokens != 0 { + preConsumedTokens += int64(textRequest.MaxTokens) + } + return decimal. + NewFromInt(preConsumedTokens). + Mul(decimal.NewFromFloat(price)). + Div(decimal.NewFromInt(billingprice.PriceUnit)). + InexactFloat64() +} + +func preCheckGroupBalance(ctx context.Context, textRequest *relaymodel.GeneralOpenAIRequest, promptTokens int, price float64, meta *meta.Meta) (bool, balance.PostGroupConsumer, *relaymodel.ErrorWithStatusCode) { + preConsumedAmount := getPreConsumedAmount(textRequest, promptTokens, price) + + groupRemainBalance, postGroupConsumer, err := balance.Default.GetGroupRemainBalance(ctx, meta.Group) + if err != nil { + return false, nil, openai.ErrorWrapper(err, "get_group_quota_failed", http.StatusInternalServerError) + } + if groupRemainBalance < preConsumedAmount { + return false, nil, nil + } + return true, postGroupConsumer, nil +} + +func postConsumeAmount(ctx context.Context, postGroupConsumer balance.PostGroupConsumer, code int, endpoint string, usage *relaymodel.Usage, meta *meta.Meta, price, completionPrice float64, content string) { + if usage == nil { + err := model.BatchRecordConsume(ctx, meta.Group, code, meta.ChannelID, 0, 0, meta.OriginModelName, meta.TokenID, meta.TokenName, 0, price, completionPrice, endpoint, content) + if err != nil { + logger.Error(ctx, "error batch record consume: "+err.Error()) + } + return + } + promptTokens := usage.PromptTokens + completionTokens := usage.CompletionTokens + var amount float64 + totalTokens := promptTokens + completionTokens + if totalTokens != 0 { + // amount = (float64(promptTokens)*price + float64(completionTokens)*completionPrice) / billingPrice.PriceUnit + promptAmount := decimal.NewFromInt(int64(promptTokens)).Mul(decimal.NewFromFloat(price)).Div(decimal.NewFromInt(billingprice.PriceUnit)) + completionAmount := decimal.NewFromInt(int64(completionTokens)).Mul(decimal.NewFromFloat(completionPrice)).Div(decimal.NewFromInt(billingprice.PriceUnit)) + amount = promptAmount.Add(completionAmount).InexactFloat64() + if amount > 0 { + _amount, err := postGroupConsumer.PostGroupConsume(ctx, meta.TokenName, amount) + if err != nil { + logger.Error(ctx, "error consuming token remain amount: "+err.Error()) + err = model.CreateConsumeError(meta.Group, meta.TokenName, meta.OriginModelName, err.Error(), amount, meta.TokenID) + if err != nil { + logger.Error(ctx, "failed to create consume error: "+err.Error()) + } + } else { + amount = _amount + } + } + } + err := model.BatchRecordConsume(ctx, meta.Group, code, meta.ChannelID, promptTokens, completionTokens, meta.OriginModelName, meta.TokenID, meta.TokenName, amount, price, completionPrice, endpoint, content) + if err != nil { + logger.Error(ctx, "error batch record consume: "+err.Error()) + } +} + +func getMappedModelName(modelName string, mapping map[string]string) (string, bool) { + if mapping == nil { + return modelName, false + } + mappedModelName := mapping[modelName] + if mappedModelName != "" { + return mappedModelName, true + } + return modelName, false +} + +func isErrorHappened(meta *meta.Meta, resp *http.Response) bool { + if resp == nil { + return meta.ChannelType != channeltype.AwsClaude + } + if resp.StatusCode != http.StatusOK { + return true + } + if meta.ChannelType == channeltype.DeepL { + // skip stream check for deepl + return false + } + if meta.IsStream && strings.HasPrefix(resp.Header.Get("Content-Type"), "application/json") { + return true + } + return false +} diff --git a/service/aiproxy/relay/controller/image.go b/service/aiproxy/relay/controller/image.go new file mode 100644 index 00000000000..40a25b4d91d --- /dev/null +++ b/service/aiproxy/relay/controller/image.go @@ -0,0 +1,192 @@ +package controller + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "net/http" + + json "github.com/json-iterator/go" + + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common" + "github.com/labring/sealos/service/aiproxy/common/balance" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/model" + "github.com/labring/sealos/service/aiproxy/relay" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/channeltype" + "github.com/labring/sealos/service/aiproxy/relay/meta" + relaymodel "github.com/labring/sealos/service/aiproxy/relay/model" + billingprice "github.com/labring/sealos/service/aiproxy/relay/price" + "github.com/shopspring/decimal" +) + +func getImageRequest(c *gin.Context, _ int) (*relaymodel.ImageRequest, error) { + imageRequest := &relaymodel.ImageRequest{} + err := common.UnmarshalBodyReusable(c, imageRequest) + if err != nil { + return nil, err + } + if imageRequest.N == 0 { + imageRequest.N = 1 + } + if imageRequest.Size == "" { + imageRequest.Size = "1024x1024" + } + if imageRequest.Model == "" { + imageRequest.Model = "dall-e-2" + } + return imageRequest, nil +} + +func validateImageRequest(imageRequest *relaymodel.ImageRequest, _ *meta.Meta) *relaymodel.ErrorWithStatusCode { + // check prompt length + if imageRequest.Prompt == "" { + return openai.ErrorWrapper(errors.New("prompt is required"), "prompt_missing", http.StatusBadRequest) + } + + // model validation + if !billingprice.IsValidImageSize(imageRequest.Model, imageRequest.Size) { + return openai.ErrorWrapper(errors.New("size not supported for this image model"), "size_not_supported", http.StatusBadRequest) + } + + if !billingprice.IsValidImagePromptLength(imageRequest.Model, len(imageRequest.Prompt)) { + return openai.ErrorWrapper(errors.New("prompt is too long"), "prompt_too_long", http.StatusBadRequest) + } + + // Number of generated images validation + if !billingprice.IsWithinRange(imageRequest.Model, imageRequest.N) { + return openai.ErrorWrapper(errors.New("invalid value of n"), "n_not_within_range", http.StatusBadRequest) + } + return nil +} + +func getImageCostPrice(imageRequest *relaymodel.ImageRequest) (float64, error) { + if imageRequest == nil { + return 0, errors.New("imageRequest is nil") + } + imageCostPrice := billingprice.GetImageSizePrice(imageRequest.Model, imageRequest.Size) + if imageRequest.Quality == "hd" && imageRequest.Model == "dall-e-3" { + if imageRequest.Size == "1024x1024" { + imageCostPrice *= 2 + } else { + imageCostPrice *= 1.5 + } + } + return imageCostPrice, nil +} + +func RelayImageHelper(c *gin.Context, _ int) *relaymodel.ErrorWithStatusCode { + ctx := c.Request.Context() + meta := meta.GetByContext(c) + imageRequest, err := getImageRequest(c, meta.Mode) + if err != nil { + logger.Errorf(ctx, "getImageRequest failed: %s", err.Error()) + return openai.ErrorWrapper(err, "invalid_image_request", http.StatusBadRequest) + } + + // map model name + var isModelMapped bool + meta.OriginModelName = imageRequest.Model + imageRequest.Model, isModelMapped = getMappedModelName(imageRequest.Model, meta.ModelMapping) + meta.ActualModelName = imageRequest.Model + + // model validation + bizErr := validateImageRequest(imageRequest, meta) + if bizErr != nil { + return bizErr + } + + imageCostPrice, err := getImageCostPrice(imageRequest) + if err != nil { + return openai.ErrorWrapper(err, "get_image_cost_price_failed", http.StatusInternalServerError) + } + + // Convert the original image model + imageRequest.Model, _ = getMappedModelName(imageRequest.Model, billingprice.GetImageOriginModelName()) + c.Set("response_format", imageRequest.ResponseFormat) + + adaptor := relay.GetAdaptor(meta.APIType) + if adaptor == nil { + return openai.ErrorWrapper(fmt.Errorf("invalid api type: %d", meta.APIType), "invalid_api_type", http.StatusBadRequest) + } + adaptor.Init(meta) + + var requestBody io.Reader + switch meta.ChannelType { + case channeltype.Ali, + channeltype.Baidu, + channeltype.Zhipu: + finalRequest, err := adaptor.ConvertImageRequest(imageRequest) + if err != nil { + return openai.ErrorWrapper(err, "convert_image_request_failed", http.StatusInternalServerError) + } + jsonStr, err := json.Marshal(finalRequest) + if err != nil { + return openai.ErrorWrapper(err, "marshal_image_request_failed", http.StatusInternalServerError) + } + requestBody = bytes.NewReader(jsonStr) + default: + if isModelMapped || meta.ChannelType == channeltype.Azure { // make Azure channel request body + jsonStr, err := json.Marshal(imageRequest) + if err != nil { + return openai.ErrorWrapper(err, "marshal_image_request_failed", http.StatusInternalServerError) + } + requestBody = bytes.NewReader(jsonStr) + } else { + requestBody = c.Request.Body + } + } + + groupRemainBalance, postGroupConsumer, err := balance.Default.GetGroupRemainBalance(ctx, meta.Group) + if err != nil { + return openai.ErrorWrapper(err, "get_group_remain_balance_failed", http.StatusInternalServerError) + } + + amount := decimal.NewFromFloat(imageCostPrice).Mul(decimal.NewFromInt(int64(imageRequest.N))).InexactFloat64() + + if groupRemainBalance-amount < 0 { + return openai.ErrorWrapper(errors.New("group balance is not enough"), "insufficient_group_balance", http.StatusForbidden) + } + + // do request + resp, err := adaptor.DoRequest(c, meta, requestBody) + if err != nil { + logger.Errorf(ctx, "DoRequest failed: %s", err.Error()) + return openai.ErrorWrapper(err, "do_request_failed", http.StatusInternalServerError) + } + + defer func(ctx context.Context) { + if resp == nil || resp.StatusCode != http.StatusOK { + _ = model.RecordConsumeLog(ctx, meta.Group, resp.StatusCode, meta.ChannelID, imageRequest.N, 0, imageRequest.Model, meta.TokenID, meta.TokenName, 0, imageCostPrice, 0, c.Request.URL.Path, imageRequest.Size) + return + } + + _amount, err := postGroupConsumer.PostGroupConsume(ctx, meta.TokenName, amount) + if err != nil { + logger.Error(ctx, "error consuming token remain balance: "+err.Error()) + err = model.CreateConsumeError(meta.Group, meta.TokenName, imageRequest.Model, err.Error(), amount, meta.TokenID) + if err != nil { + logger.Error(ctx, "failed to create consume error: "+err.Error()) + } + } else { + amount = _amount + } + err = model.BatchRecordConsume(ctx, meta.Group, resp.StatusCode, meta.ChannelID, imageRequest.N, 0, imageRequest.Model, meta.TokenID, meta.TokenName, amount, imageCostPrice, 0, c.Request.URL.Path, imageRequest.Size) + if err != nil { + logger.Error(ctx, "failed to record consume log: "+err.Error()) + } + }(c.Request.Context()) + + // do response + _, respErr := adaptor.DoResponse(c, resp, meta) + if respErr != nil { + logger.Errorf(ctx, "respErr is not nil: %+v", respErr) + return respErr + } + + return nil +} diff --git a/service/aiproxy/relay/controller/text.go b/service/aiproxy/relay/controller/text.go new file mode 100644 index 00000000000..92cbb7e5758 --- /dev/null +++ b/service/aiproxy/relay/controller/text.go @@ -0,0 +1,108 @@ +package controller + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "net/http" + + "github.com/gin-gonic/gin" + json "github.com/json-iterator/go" + "github.com/labring/sealos/service/aiproxy/common/logger" + "github.com/labring/sealos/service/aiproxy/relay" + "github.com/labring/sealos/service/aiproxy/relay/adaptor" + "github.com/labring/sealos/service/aiproxy/relay/adaptor/openai" + "github.com/labring/sealos/service/aiproxy/relay/meta" + "github.com/labring/sealos/service/aiproxy/relay/model" + billingprice "github.com/labring/sealos/service/aiproxy/relay/price" +) + +func RelayTextHelper(c *gin.Context) *model.ErrorWithStatusCode { + ctx := c.Request.Context() + meta := meta.GetByContext(c) + // get & validate textRequest + textRequest, err := getAndValidateTextRequest(c, meta.Mode) + if err != nil { + logger.Errorf(ctx, "getAndValidateTextRequest failed: %s", err.Error()) + return openai.ErrorWrapper(err, "invalid_text_request", http.StatusBadRequest) + } + meta.IsStream = textRequest.Stream + + // map model name + meta.OriginModelName = textRequest.Model + textRequest.Model, _ = getMappedModelName(textRequest.Model, meta.ModelMapping) + meta.ActualModelName = textRequest.Model + + // get model price + price, ok := billingprice.GetModelPrice(meta.OriginModelName, meta.ActualModelName, meta.ChannelType) + if !ok { + return openai.ErrorWrapper(fmt.Errorf("model price not found: %s", meta.OriginModelName), "model_price_not_found", http.StatusInternalServerError) + } + completionPrice, ok := billingprice.GetCompletionPrice(meta.OriginModelName, meta.ActualModelName, meta.ChannelType) + if !ok { + return openai.ErrorWrapper(fmt.Errorf("completion price not found: %s", meta.OriginModelName), "completion_price_not_found", http.StatusInternalServerError) + } + // pre-consume balance + promptTokens := getPromptTokens(textRequest, meta.Mode) + meta.PromptTokens = promptTokens + ok, postGroupConsume, bizErr := preCheckGroupBalance(ctx, textRequest, promptTokens, price, meta) + if bizErr != nil { + logger.Warnf(ctx, "preConsumeAmount failed: %+v", *bizErr) + return bizErr + } + if !ok { + return openai.ErrorWrapper(errors.New("group balance is not enough"), "insufficient_group_balance", http.StatusForbidden) + } + + adaptor := relay.GetAdaptor(meta.APIType) + if adaptor == nil { + return openai.ErrorWrapper(fmt.Errorf("invalid api type: %d", meta.APIType), "invalid_api_type", http.StatusBadRequest) + } + adaptor.Init(meta) + + // get request body + requestBody, err := getRequestBody(c, meta, textRequest, adaptor) + if err != nil { + return openai.ErrorWrapper(err, "convert_request_failed", http.StatusInternalServerError) + } + + // do request + resp, err := adaptor.DoRequest(c, meta, requestBody) + if err != nil { + logger.Errorf(ctx, "DoRequest failed: %s", err.Error()) + return openai.ErrorWrapper(err, "do_request_failed", http.StatusInternalServerError) + } + if isErrorHappened(meta, resp) { + err := RelayErrorHandler(resp) + go postConsumeAmount(context.Background(), postGroupConsume, resp.StatusCode, c.Request.URL.Path, nil, meta, price, completionPrice, err.Error.Message) + return err + } + + // do response + usage, respErr := adaptor.DoResponse(c, resp, meta) + if respErr != nil { + logger.Errorf(ctx, "respErr is not nil: %+v", respErr) + go postConsumeAmount(context.Background(), postGroupConsume, respErr.StatusCode, c.Request.URL.Path, usage, meta, price, completionPrice, respErr.Error.Message) + return respErr + } + // post-consume amount + go postConsumeAmount(context.Background(), postGroupConsume, resp.StatusCode, c.Request.URL.Path, usage, meta, price, completionPrice, "") + return nil +} + +func getRequestBody(c *gin.Context, meta *meta.Meta, textRequest *model.GeneralOpenAIRequest, adaptor adaptor.Adaptor) (io.Reader, error) { + convertedRequest, err := adaptor.ConvertRequest(c, meta.Mode, textRequest) + if err != nil { + logger.Debugf(c.Request.Context(), "converted request failed: %s\n", err.Error()) + return nil, err + } + jsonData, err := json.Marshal(convertedRequest) + if err != nil { + logger.Debugf(c.Request.Context(), "converted request json_marshal_failed: %s\n", err.Error()) + return nil, err + } + logger.Debugf(c.Request.Context(), "converted request: \n%s", jsonData) + return bytes.NewReader(jsonData), nil +} diff --git a/service/aiproxy/relay/controller/validator/validation.go b/service/aiproxy/relay/controller/validator/validation.go new file mode 100644 index 00000000000..4f29c84a86d --- /dev/null +++ b/service/aiproxy/relay/controller/validator/validation.go @@ -0,0 +1,38 @@ +package validator + +import ( + "errors" + "math" + + "github.com/labring/sealos/service/aiproxy/relay/model" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" +) + +func ValidateTextRequest(textRequest *model.GeneralOpenAIRequest, relayMode int) error { + if textRequest.MaxTokens < 0 || textRequest.MaxTokens > math.MaxInt32/2 { + return errors.New("max_tokens is invalid") + } + if textRequest.Model == "" { + return errors.New("model is required") + } + switch relayMode { + case relaymode.Completions: + if textRequest.Prompt == "" { + return errors.New("field prompt is required") + } + case relaymode.ChatCompletions: + if len(textRequest.Messages) == 0 { + return errors.New("field messages is required") + } + case relaymode.Embeddings: + case relaymode.Moderations: + if textRequest.Input == "" { + return errors.New("field input is required") + } + case relaymode.Edits: + if textRequest.Instruction == "" { + return errors.New("field instruction is required") + } + } + return nil +} diff --git a/service/aiproxy/relay/meta/relay_meta.go b/service/aiproxy/relay/meta/relay_meta.go new file mode 100644 index 00000000000..f07c862636d --- /dev/null +++ b/service/aiproxy/relay/meta/relay_meta.go @@ -0,0 +1,53 @@ +package meta + +import ( + "github.com/gin-gonic/gin" + "github.com/labring/sealos/service/aiproxy/common/ctxkey" + "github.com/labring/sealos/service/aiproxy/model" + "github.com/labring/sealos/service/aiproxy/relay/channeltype" + "github.com/labring/sealos/service/aiproxy/relay/relaymode" +) + +type Meta struct { + ModelMapping map[string]string + Config model.ChannelConfig + APIKey string + OriginModelName string + TokenName string + Group string + RequestURLPath string + BaseURL string + ActualModelName string + ChannelID int + ChannelType int + APIType int + Mode int + TokenID int + PromptTokens int + IsStream bool +} + +func GetByContext(c *gin.Context) *Meta { + meta := Meta{ + Mode: relaymode.GetByPath(c.Request.URL.Path), + ChannelType: c.GetInt(ctxkey.Channel), + ChannelID: c.GetInt(ctxkey.ChannelID), + TokenID: c.GetInt(ctxkey.TokenID), + TokenName: c.GetString(ctxkey.TokenName), + Group: c.GetString(ctxkey.Group), + ModelMapping: c.GetStringMapString(ctxkey.ModelMapping), + OriginModelName: c.GetString(ctxkey.RequestModel), + BaseURL: c.GetString(ctxkey.BaseURL), + APIKey: c.GetString(ctxkey.APIKey), + RequestURLPath: c.Request.URL.String(), + } + cfg, ok := c.Get(ctxkey.Config) + if ok { + meta.Config = cfg.(model.ChannelConfig) + } + if meta.BaseURL == "" { + meta.BaseURL = channeltype.ChannelBaseURLs[meta.ChannelType] + } + meta.APIType = channeltype.ToAPIType(meta.ChannelType) + return &meta +} diff --git a/service/aiproxy/relay/model/constant.go b/service/aiproxy/relay/model/constant.go new file mode 100644 index 00000000000..c9d6d645c69 --- /dev/null +++ b/service/aiproxy/relay/model/constant.go @@ -0,0 +1,7 @@ +package model + +const ( + ContentTypeText = "text" + ContentTypeImageURL = "image_url" + ContentTypeInputAudio = "input_audio" +) diff --git a/service/aiproxy/relay/model/general.go b/service/aiproxy/relay/model/general.go new file mode 100644 index 00000000000..8038f5ab751 --- /dev/null +++ b/service/aiproxy/relay/model/general.go @@ -0,0 +1,91 @@ +package model + +type ResponseFormat struct { + JSONSchema *JSONSchema `json:"json_schema,omitempty"` + Type string `json:"type,omitempty"` +} + +type JSONSchema struct { + Schema map[string]interface{} `json:"schema,omitempty"` + Strict *bool `json:"strict,omitempty"` + Description string `json:"description,omitempty"` + Name string `json:"name"` +} + +type Audio struct { + Voice string `json:"voice,omitempty"` + Format string `json:"format,omitempty"` +} + +type StreamOptions struct { + IncludeUsage bool `json:"include_usage,omitempty"` +} + +type GeneralOpenAIRequest struct { + Prediction any `json:"prediction,omitempty"` + Prompt any `json:"prompt,omitempty"` + Input any `json:"input,omitempty"` + Metadata any `json:"metadata,omitempty"` + Functions any `json:"functions,omitempty"` + LogitBias any `json:"logit_bias,omitempty"` + FunctionCall any `json:"function_call,omitempty"` + ToolChoice any `json:"tool_choice,omitempty"` + Stop any `json:"stop,omitempty"` + MaxCompletionTokens *int `json:"max_completion_tokens,omitempty"` + TopLogprobs *int `json:"top_logprobs,omitempty"` + Style *string `json:"style,omitempty"` + Quality *string `json:"quality,omitempty"` + Audio *Audio `json:"audio,omitempty"` + PresencePenalty *float64 `json:"presence_penalty,omitempty"` + ResponseFormat *ResponseFormat `json:"response_format,omitempty"` + Store *bool `json:"store,omitempty"` + ServiceTier *string `json:"service_tier,omitempty"` + FrequencyPenalty *float64 `json:"frequency_penalty,omitempty"` + Logprobs *bool `json:"logprobs,omitempty"` + StreamOptions *StreamOptions `json:"stream_options,omitempty"` + Temperature *float64 `json:"temperature,omitempty"` + TopP *float64 `json:"top_p,omitempty"` + ParallelTooCalls *bool `json:"parallel_tool_calls,omitempty"` + EncodingFormat string `json:"encoding_format,omitempty"` + Model string `json:"model,omitempty"` + Instruction string `json:"instruction,omitempty"` + User string `json:"user,omitempty"` + Size string `json:"size,omitempty"` + Modalities []string `json:"modalities,omitempty"` + Messages []Message `json:"messages,omitempty"` + Tools []Tool `json:"tools,omitempty"` + N int `json:"n,omitempty"` + Dimensions int `json:"dimensions,omitempty"` + Seed float64 `json:"seed,omitempty"` + MaxTokens int `json:"max_tokens,omitempty"` + TopK int `json:"top_k,omitempty"` + NumCtx int `json:"num_ctx,omitempty"` + Stream bool `json:"stream,omitempty"` +} + +func (r GeneralOpenAIRequest) ParseInput() []string { + if r.Input == nil { + return nil + } + var input []string + switch v := r.Input.(type) { + case string: + input = []string{v} + case []any: + input = make([]string, 0, len(v)) + for _, item := range v { + if str, ok := item.(string); ok { + input = append(input, str) + } + } + } + return input +} + +type TextToSpeechRequest struct { + Model string `binding:"required" json:"model"` + Input string `binding:"required" json:"input"` + Voice string `binding:"required" json:"voice"` + ResponseFormat string `json:"response_format"` + Speed float64 `json:"speed"` +} diff --git a/service/aiproxy/relay/model/image.go b/service/aiproxy/relay/model/image.go new file mode 100644 index 00000000000..1ba51218c36 --- /dev/null +++ b/service/aiproxy/relay/model/image.go @@ -0,0 +1,12 @@ +package model + +type ImageRequest struct { + Model string `json:"model"` + Prompt string `binding:"required" json:"prompt"` + Size string `json:"size,omitempty"` + Quality string `json:"quality,omitempty"` + ResponseFormat string `json:"response_format,omitempty"` + Style string `json:"style,omitempty"` + User string `json:"user,omitempty"` + N int `json:"n,omitempty"` +} diff --git a/service/aiproxy/relay/model/message.go b/service/aiproxy/relay/model/message.go new file mode 100644 index 00000000000..4e5def601e7 --- /dev/null +++ b/service/aiproxy/relay/model/message.go @@ -0,0 +1,90 @@ +package model + +type Message struct { + Content any `json:"content,omitempty"` + Name *string `json:"name,omitempty"` + Role string `json:"role,omitempty"` + ToolCallID string `json:"tool_call_id,omitempty"` + ToolCalls []Tool `json:"tool_calls,omitempty"` +} + +func (m Message) IsStringContent() bool { + _, ok := m.Content.(string) + return ok +} + +func (m Message) StringContent() string { + content, ok := m.Content.(string) + if ok { + return content + } + contentList, ok := m.Content.([]any) + if ok { + var contentStr string + for _, contentItem := range contentList { + contentMap, ok := contentItem.(map[string]any) + if !ok { + continue + } + if contentMap["type"] == ContentTypeText { + if subStr, ok := contentMap["text"].(string); ok { + contentStr += subStr + } + } + } + return contentStr + } + return "" +} + +func (m Message) ParseContent() []MessageContent { + var contentList []MessageContent + content, ok := m.Content.(string) + if ok { + contentList = append(contentList, MessageContent{ + Type: ContentTypeText, + Text: content, + }) + return contentList + } + anyList, ok := m.Content.([]any) + if ok { + for _, contentItem := range anyList { + contentMap, ok := contentItem.(map[string]any) + if !ok { + continue + } + switch contentMap["type"] { + case ContentTypeText: + if subStr, ok := contentMap["text"].(string); ok { + contentList = append(contentList, MessageContent{ + Type: ContentTypeText, + Text: subStr, + }) + } + case ContentTypeImageURL: + if subObj, ok := contentMap["image_url"].(map[string]any); ok { + contentList = append(contentList, MessageContent{ + Type: ContentTypeImageURL, + ImageURL: &ImageURL{ + URL: subObj["url"].(string), + }, + }) + } + } + } + return contentList + } + return nil +} + +type ImageURL struct { + URL string `json:"url,omitempty"` + Detail string `json:"detail,omitempty"` +} + +type MessageContent struct { + ImageURL *ImageURL `json:"image_url,omitempty"` + Type string `json:"type,omitempty"` + Text string `json:"text"` +} diff --git a/service/aiproxy/relay/model/misc.go b/service/aiproxy/relay/model/misc.go new file mode 100644 index 00000000000..1007d97e237 --- /dev/null +++ b/service/aiproxy/relay/model/misc.go @@ -0,0 +1,19 @@ +package model + +type Usage struct { + PromptTokens int `json:"prompt_tokens"` + CompletionTokens int `json:"completion_tokens"` + TotalTokens int `json:"total_tokens"` +} + +type Error struct { + Code any `json:"code"` + Message string `json:"message"` + Type string `json:"type"` + Param string `json:"param"` +} + +type ErrorWithStatusCode struct { + Error + StatusCode int `json:"status_code"` +} diff --git a/service/aiproxy/relay/model/tool.go b/service/aiproxy/relay/model/tool.go new file mode 100644 index 00000000000..5a25e419dc9 --- /dev/null +++ b/service/aiproxy/relay/model/tool.go @@ -0,0 +1,14 @@ +package model + +type Tool struct { + ID string `json:"id,omitempty"` + Type string `json:"type,omitempty"` // when splicing claude tools stream messages, it is empty + Function Function `json:"function"` +} + +type Function struct { + Parameters any `json:"parameters,omitempty"` + Arguments string `json:"arguments,omitempty"` + Description string `json:"description,omitempty"` + Name string `json:"name,omitempty"` +} diff --git a/service/aiproxy/relay/price/image.go b/service/aiproxy/relay/price/image.go new file mode 100644 index 00000000000..cd40bc01db7 --- /dev/null +++ b/service/aiproxy/relay/price/image.go @@ -0,0 +1,108 @@ +package price + +// 单个图片的价格 +var imageSizePrices = map[string]map[string]float64{ + "dall-e-2": { + "256x256": 1, + "512x512": 1.125, + "1024x1024": 1.25, + }, + "dall-e-3": { + "1024x1024": 1, + "1024x1792": 2, + "1792x1024": 2, + }, + "ali-stable-diffusion-xl": { + "512x1024": 1, + "1024x768": 1, + "1024x1024": 1, + "576x1024": 1, + "1024x576": 1, + }, + "ali-stable-diffusion-v1.5": { + "512x1024": 1, + "1024x768": 1, + "1024x1024": 1, + "576x1024": 1, + "1024x576": 1, + }, + "wanx-v1": { + "1024x1024": 1, + "720x1280": 1, + "1280x720": 1, + }, + "step-1x-medium": { + "256x256": 1, + "512x512": 1, + "768x768": 1, + "1024x1024": 1, + "1280x800": 1, + "800x1280": 1, + }, +} + +var imageGenerationAmounts = map[string][2]int{ + "dall-e-2": {1, 10}, + "dall-e-3": {1, 1}, // OpenAI allows n=1 currently. + "ali-stable-diffusion-xl": {1, 4}, // Ali + "ali-stable-diffusion-v1.5": {1, 4}, // Ali + "wanx-v1": {1, 4}, // Ali + "cogview-3": {1, 1}, + "step-1x-medium": {1, 1}, +} + +var imagePromptLengthLimitations = map[string]int{ + "dall-e-2": 1000, + "dall-e-3": 4000, + "ali-stable-diffusion-xl": 4000, + "ali-stable-diffusion-v1.5": 4000, + "wanx-v1": 4000, + "cogview-3": 833, + "step-1x-medium": 4000, +} + +var imageOriginModelName = map[string]string{ + "ali-stable-diffusion-xl": "stable-diffusion-xl", + "ali-stable-diffusion-v1.5": "stable-diffusion-v1.5", +} + +func GetImageOriginModelName() map[string]string { + return imageOriginModelName +} + +func IsValidImageSize(model string, size string) bool { + if !GetBillingEnabled() { + return true + } + if model == "cogview-3" || imageSizePrices[model] == nil { + return true + } + _, ok := imageSizePrices[model][size] + return ok +} + +func IsValidImagePromptLength(model string, promptLength int) bool { + if !GetBillingEnabled() { + return true + } + maxPromptLength, ok := imagePromptLengthLimitations[model] + return !ok || promptLength <= maxPromptLength +} + +func IsWithinRange(element string, value int) bool { + if !GetBillingEnabled() { + return true + } + amounts, ok := imageGenerationAmounts[element] + return !ok || (value >= amounts[0] && value <= amounts[1]) +} + +func GetImageSizePrice(model string, size string) float64 { + if !GetBillingEnabled() { + return 0 + } + if price, ok := imageSizePrices[model][size]; ok { + return price + } + return 1 +} diff --git a/service/aiproxy/relay/price/model.go b/service/aiproxy/relay/price/model.go new file mode 100644 index 00000000000..ed6104a5c35 --- /dev/null +++ b/service/aiproxy/relay/price/model.go @@ -0,0 +1,206 @@ +package price + +import ( + "fmt" + "sync" + "sync/atomic" + + json "github.com/json-iterator/go" + + "github.com/labring/sealos/service/aiproxy/common/conv" + "github.com/labring/sealos/service/aiproxy/common/logger" +) + +const ( + // /1K tokens + PriceUnit = 1000 +) + +// ModelPrice +// https://platform.openai.com/docs/models/model-endpoint-compatibility +// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Blfmc9dlf +// https://openai.com/pricing +// 价格单位:人民币/1K tokens +var ( + modelPrice = map[string]float64{} + completionPrice = map[string]float64{} + modelPriceMu sync.RWMutex + completionPriceMu sync.RWMutex +) + +var ( + DefaultModelPrice map[string]float64 + DefaultCompletionPrice map[string]float64 +) + +func init() { + DefaultModelPrice = make(map[string]float64) + modelPriceMu.RLock() + for k, v := range modelPrice { + DefaultModelPrice[k] = v + } + modelPriceMu.RUnlock() + + DefaultCompletionPrice = make(map[string]float64) + completionPriceMu.RLock() + for k, v := range completionPrice { + DefaultCompletionPrice[k] = v + } + completionPriceMu.RUnlock() +} + +func AddNewMissingPrice(oldPrice string) string { + newPrice := make(map[string]float64) + err := json.Unmarshal(conv.StringToBytes(oldPrice), &newPrice) + if err != nil { + logger.SysError("error unmarshalling old price: " + err.Error()) + return oldPrice + } + for k, v := range DefaultModelPrice { + if _, ok := newPrice[k]; !ok { + newPrice[k] = v + } + } + jsonBytes, err := json.Marshal(newPrice) + if err != nil { + logger.SysError("error marshalling new price: " + err.Error()) + return oldPrice + } + return conv.BytesToString(jsonBytes) +} + +func ModelPrice2JSONString() string { + modelPriceMu.RLock() + jsonBytes, err := json.Marshal(modelPrice) + modelPriceMu.RUnlock() + if err != nil { + logger.SysError("error marshalling model price: " + err.Error()) + } + return conv.BytesToString(jsonBytes) +} + +var billingEnabled atomic.Bool + +func init() { + billingEnabled.Store(true) +} + +func GetBillingEnabled() bool { + return billingEnabled.Load() +} + +func SetBillingEnabled(enabled bool) { + billingEnabled.Store(enabled) +} + +func UpdateModelPriceByJSONString(jsonStr string) error { + newModelPrice := make(map[string]float64) + err := json.Unmarshal(conv.StringToBytes(jsonStr), &newModelPrice) + if err != nil { + logger.SysError("error unmarshalling model price: " + err.Error()) + return err + } + modelPriceMu.Lock() + modelPrice = newModelPrice + modelPriceMu.Unlock() + return nil +} + +func GetModelPrice(mapedName string, reqModel string, channelType int) (float64, bool) { + if !GetBillingEnabled() { + return 0, true + } + price, ok := getModelPrice(mapedName, channelType) + if !ok && reqModel != "" { + price, ok = getModelPrice(reqModel, channelType) + } + return price, ok +} + +func getModelPrice(modelName string, channelType int) (float64, bool) { + model := fmt.Sprintf("%s(%d)", modelName, channelType) + modelPriceMu.RLock() + defer modelPriceMu.RUnlock() + price, ok := modelPrice[model] + if ok { + return price, true + } + if price, ok := DefaultModelPrice[model]; ok { + return price, true + } + price, ok = modelPrice[modelName] + if ok { + return price, true + } + if price, ok := DefaultModelPrice[modelName]; ok { + return price, true + } + return 0, false +} + +func CompletionPrice2JSONString() string { + completionPriceMu.RLock() + jsonBytes, err := json.Marshal(completionPrice) + completionPriceMu.RUnlock() + if err != nil { + logger.SysError("error marshalling completion price: " + err.Error()) + } + return conv.BytesToString(jsonBytes) +} + +func UpdateCompletionPriceByJSONString(jsonStr string) error { + newCompletionPrice := make(map[string]float64) + err := json.Unmarshal(conv.StringToBytes(jsonStr), &newCompletionPrice) + if err != nil { + logger.SysError("error unmarshalling completion price: " + err.Error()) + return err + } + completionPriceMu.Lock() + completionPrice = newCompletionPrice + completionPriceMu.Unlock() + return nil +} + +func GetCompletionPrice(name string, reqModel string, channelType int) (float64, bool) { + if !GetBillingEnabled() { + return 0, true + } + price, ok := getCompletionPrice(name, channelType) + if !ok && reqModel != "" { + price, ok = getCompletionPrice(reqModel, channelType) + } + return price, ok +} + +func getCompletionPrice(name string, channelType int) (float64, bool) { + model := fmt.Sprintf("%s(%d)", name, channelType) + completionPriceMu.RLock() + defer completionPriceMu.RUnlock() + price, ok := completionPrice[model] + if ok { + return price, true + } + if price, ok := DefaultCompletionPrice[model]; ok { + return price, true + } + price, ok = completionPrice[name] + if ok { + return price, true + } + if price, ok := DefaultCompletionPrice[name]; ok { + return price, true + } + return getModelPrice(name, channelType) +} + +func GetModelPriceMap() map[string]float64 { + modelPriceMu.RLock() + defer modelPriceMu.RUnlock() + return modelPrice +} + +func GetCompletionPriceMap() map[string]float64 { + completionPriceMu.RLock() + defer completionPriceMu.RUnlock() + return completionPrice +} diff --git a/service/aiproxy/relay/relaymode/define.go b/service/aiproxy/relay/relaymode/define.go new file mode 100644 index 00000000000..96d094382ca --- /dev/null +++ b/service/aiproxy/relay/relaymode/define.go @@ -0,0 +1,14 @@ +package relaymode + +const ( + Unknown = iota + ChatCompletions + Completions + Embeddings + Moderations + ImagesGenerations + Edits + AudioSpeech + AudioTranscription + AudioTranslation +) diff --git a/service/aiproxy/relay/relaymode/helper.go b/service/aiproxy/relay/relaymode/helper.go new file mode 100644 index 00000000000..5dc188b3f47 --- /dev/null +++ b/service/aiproxy/relay/relaymode/helper.go @@ -0,0 +1,28 @@ +package relaymode + +import "strings" + +func GetByPath(path string) int { + switch { + case strings.HasPrefix(path, "/v1/chat/completions"): + return ChatCompletions + case strings.HasPrefix(path, "/v1/completions"): + return Completions + case strings.HasSuffix(path, "embeddings"): + return Embeddings + case strings.HasPrefix(path, "/v1/moderations"): + return Moderations + case strings.HasPrefix(path, "/v1/images/generations"): + return ImagesGenerations + case strings.HasPrefix(path, "/v1/edits"): + return Edits + case strings.HasPrefix(path, "/v1/audio/speech"): + return AudioSpeech + case strings.HasPrefix(path, "/v1/audio/transcriptions"): + return AudioTranscription + case strings.HasPrefix(path, "/v1/audio/translations"): + return AudioTranslation + default: + return Unknown + } +} diff --git a/service/aiproxy/router/api.go b/service/aiproxy/router/api.go new file mode 100644 index 00000000000..9888da543cd --- /dev/null +++ b/service/aiproxy/router/api.go @@ -0,0 +1,102 @@ +package router + +import ( + "github.com/gin-contrib/gzip" + "github.com/labring/sealos/service/aiproxy/common/env" + "github.com/labring/sealos/service/aiproxy/controller" + "github.com/labring/sealos/service/aiproxy/middleware" + + "github.com/gin-gonic/gin" +) + +func SetAPIRouter(router *gin.Engine) { + apiRouter := router.Group("/api") + if env.Bool("GZIP_ENABLED", false) { + apiRouter.Use(gzip.Gzip(gzip.DefaultCompression)) + } + apiRouter.Use(middleware.AdminAuth) + { + apiRouter.GET("/status", controller.GetStatus) + apiRouter.GET("/models", controller.BuiltinModels) + apiRouter.GET("/models/price", controller.ModelPrice) + apiRouter.GET("/models/enabled", controller.EnabledModels) + apiRouter.GET("/models/enabled/price", controller.EnabledModelsAndPrice) + apiRouter.GET("/models/enabled/channel", controller.EnabledType2Models) + apiRouter.GET("/models/enabled/channel/price", controller.EnabledType2ModelsAndPrice) + apiRouter.GET("/models/enabled/default", controller.ChannelDefaultModels) + apiRouter.GET("/models/enabled/default/:type", controller.ChannelDefaultModelsByType) + + groupsRoute := apiRouter.Group("/groups") + { + groupsRoute.GET("/", controller.GetGroups) + groupsRoute.GET("/search", controller.SearchGroups) + } + groupRoute := apiRouter.Group("/group") + { + groupRoute.POST("/", controller.CreateGroup) + groupRoute.GET("/:id", controller.GetGroup) + groupRoute.DELETE("/:id", controller.DeleteGroup) + groupRoute.POST("/:id/status", controller.UpdateGroupStatus) + groupRoute.POST("/:id/qpm", controller.UpdateGroupQPM) + } + optionRoute := apiRouter.Group("/option") + { + optionRoute.GET("/", controller.GetOptions) + optionRoute.PUT("/", controller.UpdateOption) + optionRoute.PUT("/batch", controller.UpdateOptions) + } + channelsRoute := apiRouter.Group("/channels") + { + channelsRoute.GET("/", controller.GetChannels) + channelsRoute.GET("/all", controller.GetAllChannels) + channelsRoute.POST("/", controller.AddChannels) + channelsRoute.GET("/search", controller.SearchChannels) + channelsRoute.GET("/test", controller.TestChannels) + channelsRoute.GET("/update_balance", controller.UpdateAllChannelsBalance) + } + channelRoute := apiRouter.Group("/channel") + { + channelRoute.GET("/:id", controller.GetChannel) + channelRoute.POST("/", controller.AddChannel) + channelRoute.PUT("/", controller.UpdateChannel) + channelRoute.POST("/:id/status", controller.UpdateChannelStatus) + channelRoute.DELETE("/:id", controller.DeleteChannel) + channelRoute.GET("/test/:id", controller.TestChannel) + channelRoute.GET("/update_balance/:id", controller.UpdateChannelBalance) + } + tokensRoute := apiRouter.Group("/tokens") + { + tokensRoute.GET("/", controller.GetTokens) + tokensRoute.GET("/:id", controller.GetToken) + tokensRoute.PUT("/:id", controller.UpdateToken) + tokensRoute.POST("/:id/status", controller.UpdateTokenStatus) + tokensRoute.POST("/:id/name", controller.UpdateTokenName) + tokensRoute.DELETE("/:id", controller.DeleteToken) + tokensRoute.GET("/search", controller.SearchTokens) + } + tokenRoute := apiRouter.Group("/token") + { + tokenRoute.GET("/:group/search", controller.SearchGroupTokens) + tokenRoute.GET("/:group", controller.GetGroupTokens) + tokenRoute.GET("/:group/:id", controller.GetGroupToken) + tokenRoute.POST("/:group", controller.AddToken) + tokenRoute.PUT("/:group/:id", controller.UpdateGroupToken) + tokenRoute.POST("/:group/:id/status", controller.UpdateGroupTokenStatus) + tokenRoute.POST("/:group/:id/name", controller.UpdateGroupTokenName) + tokenRoute.DELETE("/:group/:id", controller.DeleteGroupToken) + } + logsRoute := apiRouter.Group("/logs") + { + logsRoute.GET("/", controller.GetLogs) + logsRoute.DELETE("/", controller.DeleteHistoryLogs) + logsRoute.GET("/stat", controller.GetLogsStat) + logsRoute.GET("/search", controller.SearchLogs) + logsRoute.GET("/consume_error", controller.SearchConsumeError) + } + logRoute := apiRouter.Group("/log") + { + logRoute.GET("/:group/search", controller.SearchGroupLogs) + logRoute.GET("/:group", controller.GetGroupLogs) + } + } +} diff --git a/service/aiproxy/router/main.go b/service/aiproxy/router/main.go new file mode 100644 index 00000000000..a704ab8ecf2 --- /dev/null +++ b/service/aiproxy/router/main.go @@ -0,0 +1,10 @@ +package router + +import ( + "github.com/gin-gonic/gin" +) + +func SetRouter(router *gin.Engine) { + SetAPIRouter(router) + SetRelayRouter(router) +} diff --git a/service/aiproxy/router/relay.go b/service/aiproxy/router/relay.go new file mode 100644 index 00000000000..f1ff1c85e7f --- /dev/null +++ b/service/aiproxy/router/relay.go @@ -0,0 +1,79 @@ +package router + +import ( + "github.com/labring/sealos/service/aiproxy/controller" + "github.com/labring/sealos/service/aiproxy/middleware" + + "github.com/gin-gonic/gin" +) + +func SetRelayRouter(router *gin.Engine) { + router.Use(middleware.CORS()) + router.Use(middleware.GlobalAPIRateLimit) + // https://platform.openai.com/docs/api-reference/introduction + modelsRouter := router.Group("/v1/models") + modelsRouter.Use(middleware.TokenAuth) + { + modelsRouter.GET("", controller.ListModels) + modelsRouter.GET("/:model", controller.RetrieveModel) + } + dashboardRouter := router.Group("/v1/dashboard") + dashboardRouter.Use(middleware.TokenAuth) + { + dashboardRouter.GET("/billing/subscription", controller.GetSubscription) + dashboardRouter.GET("/billing/usage", controller.GetUsage) + } + relayV1Router := router.Group("/v1") + relayV1Router.Use(middleware.RelayPanicRecover, middleware.TokenAuth, middleware.Distribute) + { + relayV1Router.POST("/completions", controller.Relay) + relayV1Router.POST("/chat/completions", controller.Relay) + relayV1Router.POST("/edits", controller.Relay) + relayV1Router.POST("/images/generations", controller.Relay) + relayV1Router.POST("/images/edits", controller.RelayNotImplemented) + relayV1Router.POST("/images/variations", controller.RelayNotImplemented) + relayV1Router.POST("/embeddings", controller.Relay) + relayV1Router.POST("/engines/:model/embeddings", controller.Relay) + relayV1Router.POST("/audio/transcriptions", controller.Relay) + relayV1Router.POST("/audio/translations", controller.Relay) + relayV1Router.POST("/audio/speech", controller.Relay) + relayV1Router.GET("/files", controller.RelayNotImplemented) + relayV1Router.POST("/files", controller.RelayNotImplemented) + relayV1Router.DELETE("/files/:id", controller.RelayNotImplemented) + relayV1Router.GET("/files/:id", controller.RelayNotImplemented) + relayV1Router.GET("/files/:id/content", controller.RelayNotImplemented) + relayV1Router.POST("/fine_tuning/jobs", controller.RelayNotImplemented) + relayV1Router.GET("/fine_tuning/jobs", controller.RelayNotImplemented) + relayV1Router.GET("/fine_tuning/jobs/:id", controller.RelayNotImplemented) + relayV1Router.POST("/fine_tuning/jobs/:id/cancel", controller.RelayNotImplemented) + relayV1Router.GET("/fine_tuning/jobs/:id/events", controller.RelayNotImplemented) + relayV1Router.DELETE("/models/:model", controller.RelayNotImplemented) + relayV1Router.POST("/moderations", controller.Relay) + relayV1Router.POST("/assistants", controller.RelayNotImplemented) + relayV1Router.GET("/assistants/:id", controller.RelayNotImplemented) + relayV1Router.POST("/assistants/:id", controller.RelayNotImplemented) + relayV1Router.DELETE("/assistants/:id", controller.RelayNotImplemented) + relayV1Router.GET("/assistants", controller.RelayNotImplemented) + relayV1Router.POST("/assistants/:id/files", controller.RelayNotImplemented) + relayV1Router.GET("/assistants/:id/files/:fileId", controller.RelayNotImplemented) + relayV1Router.DELETE("/assistants/:id/files/:fileId", controller.RelayNotImplemented) + relayV1Router.GET("/assistants/:id/files", controller.RelayNotImplemented) + relayV1Router.POST("/threads", controller.RelayNotImplemented) + relayV1Router.GET("/threads/:id", controller.RelayNotImplemented) + relayV1Router.POST("/threads/:id", controller.RelayNotImplemented) + relayV1Router.DELETE("/threads/:id", controller.RelayNotImplemented) + relayV1Router.POST("/threads/:id/messages", controller.RelayNotImplemented) + relayV1Router.GET("/threads/:id/messages/:messageId", controller.RelayNotImplemented) + relayV1Router.POST("/threads/:id/messages/:messageId", controller.RelayNotImplemented) + relayV1Router.GET("/threads/:id/messages/:messageId/files/:filesId", controller.RelayNotImplemented) + relayV1Router.GET("/threads/:id/messages/:messageId/files", controller.RelayNotImplemented) + relayV1Router.POST("/threads/:id/runs", controller.RelayNotImplemented) + relayV1Router.GET("/threads/:id/runs/:runsId", controller.RelayNotImplemented) + relayV1Router.POST("/threads/:id/runs/:runsId", controller.RelayNotImplemented) + relayV1Router.GET("/threads/:id/runs", controller.RelayNotImplemented) + relayV1Router.POST("/threads/:id/runs/:runsId/submit_tool_outputs", controller.RelayNotImplemented) + relayV1Router.POST("/threads/:id/runs/:runsId/cancel", controller.RelayNotImplemented) + relayV1Router.GET("/threads/:id/runs/:runsId/steps/:stepId", controller.RelayNotImplemented) + relayV1Router.GET("/threads/:id/runs/:runsId/steps", controller.RelayNotImplemented) + } +} diff --git a/service/go.work b/service/go.work index e9b0e93f801..1978383b13f 100644 --- a/service/go.work +++ b/service/go.work @@ -1,15 +1,16 @@ -go 1.22 +go 1.22.7 use ( - ./database - ./pay + . ./account - ./launchpad + ./aiproxy + ./database ./exceptionmonitor - . + ./launchpad + ./pay ) replace ( github.com/labring/sealos/controllers/account => ../controllers/account github.com/labring/sealos/controllers/user => ../controllers/user -) \ No newline at end of file +) diff --git a/service/go.work.sum b/service/go.work.sum index 577fc5c95c1..893f229853f 100644 --- a/service/go.work.sum +++ b/service/go.work.sum @@ -1,6 +1,24 @@ -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cel.dev/expr v0.16.1/go.mod h1:AsGA5zb3WruAEQeQng1RZdGEXmBj0jvMWh6l5SnNuC8= cloud.google.com/go v0.34.0 h1:eOI3/cP2VTU6uZLDYAoic+eyzzB9YyGmJ7eIjl8rOPg= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= cloud.google.com/go v0.97.0 h1:3DXvAyifywvq64LfkKaMOmkWPS1CikIQdMe2lY9vxU8= cloud.google.com/go v0.110.0 h1:Zc8gqp3+a9/Eyph2KDmcGaPtbKRIoqq4YTlL4NMD0Ys= cloud.google.com/go v0.110.0/go.mod h1:SJnCLqQ0FCFGSZMUNUf84MV3Aia54kn7pi8st7tMzaY= @@ -10,6 +28,8 @@ cloud.google.com/go v0.110.6/go.mod h1:+EYjdK8e5RME/VY/qLCAtuyALQ9q67dvuum8i+H5x cloud.google.com/go v0.110.8/go.mod h1:Iz8AkXJf1qmxC3Oxoep8R1T36w8B92yU29PcBhHO5fk= cloud.google.com/go v0.111.0 h1:YHLKNupSD1KqjDbQ3+LVdQ81h/UJbJyZG203cEfnQgM= cloud.google.com/go v0.111.0/go.mod h1:0mibmpKP1TyOOFYQY5izo0LnT+ecvOQ0Sg3OdmMiNRU= +cloud.google.com/go v0.116.0 h1:B3fRrSDkLRt5qSHWe40ERJvhvnQwdZiHu0bJOpldweE= +cloud.google.com/go v0.116.0/go.mod h1:cEPSRWPzZEswwdr9BxE6ChEn01dWlTaF05LiC2Xs70U= cloud.google.com/go/accessapproval v1.6.0 h1:x0cEHro/JFPd7eS4BlEWNTMecIj2HdXjOVB5BtvwER0= cloud.google.com/go/accessapproval v1.6.0/go.mod h1:R0EiYnwV5fsRFiKZkPHr6mwyk2wxUJ30nL4j2pcFY2E= cloud.google.com/go/accessapproval v1.7.1/go.mod h1:JYczztsHRMK7NTXb6Xw+dwbs/WnOJxbo/2mTI+Kgg68= @@ -92,6 +112,12 @@ cloud.google.com/go/beyondcorp v0.5.0/go.mod h1:uFqj9X+dSfrheVp7ssLTaRHd2EHqSL4Q cloud.google.com/go/beyondcorp v1.0.0/go.mod h1:YhxDWw946SCbmcWo3fAhw3V4XZMSpQ/VYfcKGAEU8/4= cloud.google.com/go/beyondcorp v1.0.3 h1:VXf9SnrnSmj2BF2cHkoTHvOUp8gjsz1KJFOMW7czdsY= cloud.google.com/go/beyondcorp v1.0.3/go.mod h1:HcBvnEd7eYr+HGDd5ZbuVmBYX019C6CEXBonXbCVwJo= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/bigquery v1.50.0 h1:RscMV6LbnAmhAzD893Lv9nXXy2WCaJmbxYPWDLbGqNQ= cloud.google.com/go/bigquery v1.50.0/go.mod h1:YrleYEh2pSEbgTBZYMJ5SuSr0ML3ypjRB1zgf7pvQLU= cloud.google.com/go/bigquery v1.53.0/go.mod h1:3b/iXjRQGU4nKa87cXeg6/gogLjO8C6PmuM8i5Bi/u4= @@ -139,6 +165,7 @@ cloud.google.com/go/compute v1.19.1/go.mod h1:6ylj3a05WF8leseCdIf77NK0g1ey+nj5IK cloud.google.com/go/compute v1.19.3/go.mod h1:qxvISKp/gYnXkSAD1ppcSOveRAmzxicEv/JlizULFrI= cloud.google.com/go/compute v1.20.1 h1:6aKEtlUiwEpJzM001l0yFkpXmUVXaN8W+fbkb2AZNbg= cloud.google.com/go/compute v1.20.1/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdiEZc9FEIbM= +cloud.google.com/go/compute v1.21.0 h1:JNBsyXVoOoNJtTQcnEY5uYpZIbeCTYIeDe0Xh1bySMk= cloud.google.com/go/compute v1.21.0/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdiEZc9FEIbM= cloud.google.com/go/compute v1.23.0/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdiEZc9FEIbM= cloud.google.com/go/compute v1.23.1/go.mod h1:CqB3xpmPKKt3OJpW2ndFIXnA9A4xAy/F3Xp1ixncW78= @@ -148,6 +175,8 @@ cloud.google.com/go/compute/metadata v0.2.0 h1:nBbNSZyDpkNlo3DepaaLKVuO7ClyifSAm cloud.google.com/go/compute/metadata v0.2.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= +cloud.google.com/go/compute/metadata v0.3.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= +cloud.google.com/go/compute/metadata v0.5.0/go.mod h1:aHnloV2TPI38yx4s9+wAZhHykWvVCfu7hQbF+9CWoiY= cloud.google.com/go/contactcenterinsights v1.6.0 h1:jXIpfcH/VYSE1SYcPzO0n1VVb+sAamiLOgCw45JbOQk= cloud.google.com/go/contactcenterinsights v1.6.0/go.mod h1:IIDlT6CLcDoyv79kDv8iWxMSTZhLxSCofVV5W6YFM/w= cloud.google.com/go/contactcenterinsights v1.10.0/go.mod h1:bsg/R7zGLYMVxFFzfh9ooLTruLRCG9fnzhH9KznHhbM= @@ -203,6 +232,8 @@ cloud.google.com/go/dataqna v0.7.0/go.mod h1:Lx9OcIIeqCrw1a6KdO3/5KMP1wAmTc0slZW cloud.google.com/go/dataqna v0.8.1/go.mod h1:zxZM0Bl6liMePWsHA8RMGAfmTG34vJMapbHAxQ5+WA8= cloud.google.com/go/dataqna v0.8.4 h1:NJnu1kAPamZDs/if3bJ3+Wb6tjADHKL83NUWsaIp2zg= cloud.google.com/go/dataqna v0.8.4/go.mod h1:mySRKjKg5Lz784P6sCov3p1QD+RZQONRMRjzGNcFd0c= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/datastore v1.11.0 h1:iF6I/HaLs3Ado8uRKMvZRvF/ZLkWaWE9i8AiHzbC774= cloud.google.com/go/datastore v1.11.0/go.mod h1:TvGxBIHCS50u8jzG+AW/ppf87v1of8nwzFNgEZU1D3c= cloud.google.com/go/datastore v1.13.0/go.mod h1:KjdB88W897MRITkvWWJrg2OUtrR5XVj1EoLgSp6/N70= @@ -260,6 +291,7 @@ cloud.google.com/go/filestore v1.6.0/go.mod h1:di5unNuss/qfZTw2U9nhFqo8/ZDSc466d cloud.google.com/go/filestore v1.7.1/go.mod h1:y10jsorq40JJnjR/lQ8AfFbbcGlw3g+Dp8oN7i7FjV4= cloud.google.com/go/filestore v1.8.0 h1:/+wUEGwk3x3Kxomi2cP5dsR8+SIXxo7M0THDjreFSYo= cloud.google.com/go/filestore v1.8.0/go.mod h1:S5JCxIbFjeBhWMTfIYH2Jx24J6BqjwpkkPl+nBA5DlI= +cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= cloud.google.com/go/firestore v1.9.0 h1:IBlRyxgGySXu5VuW0RgGFlTtLukSnNkpDiEOMkQkmpA= cloud.google.com/go/firestore v1.9.0/go.mod h1:HMkjKHNTtRyZNiMzu7YAsLr9K3X2udY2AMwDaMEQiiE= cloud.google.com/go/firestore v1.11.0/go.mod h1:b38dKhgzlmNNGTNZZwe7ZRFEuRab1Hay3/DBsIGKKy4= @@ -349,6 +381,7 @@ cloud.google.com/go/longrunning v0.5.1/go.mod h1:spvimkwdz6SPWKEt/XBij79E9fiTkHS cloud.google.com/go/longrunning v0.5.2/go.mod h1:nqo6DQbNV2pXhGDbDMoN2bWz68MjZUzqv2YttZiveCs= cloud.google.com/go/longrunning v0.5.4 h1:w8xEcbZodnA2BbW6sVirkkoC+1gP8wS57EUUgGS0GVg= cloud.google.com/go/longrunning v0.5.4/go.mod h1:zqNVncI0BOP8ST6XQD1+VcvuShMmq7+xFSzOL++V0dI= +cloud.google.com/go/longrunning v0.6.1/go.mod h1:nHISoOZpBcmlwbJmiVk5oDRz0qG/ZxPynEGs1iZ79s0= cloud.google.com/go/managedidentities v1.5.0 h1:ZRQ4k21/jAhrHBVKl/AY7SjgzeJwG1iZa+mJ82P+VNg= cloud.google.com/go/managedidentities v1.5.0/go.mod h1:+dWcZ0JlUmpuxpIDfyP5pP5y0bLdRwOS4Lp7gMni/LA= cloud.google.com/go/managedidentities v1.6.1/go.mod h1:h/irGhTN2SkZ64F43tfGPMbHnypMbu4RB3yl8YcuEak= @@ -439,6 +472,10 @@ cloud.google.com/go/privatecatalog v0.8.0/go.mod h1:nQ6pfaegeDAq/Q5lrfCQzQLhubPi cloud.google.com/go/privatecatalog v0.9.1/go.mod h1:0XlDXW2unJXdf9zFz968Hp35gl/bhF4twwpXZAW50JA= cloud.google.com/go/privatecatalog v0.9.4 h1:Vo10IpWKbNvc/z/QZPVXgCiwfjpWoZ/wbgful4Uh/4E= cloud.google.com/go/privatecatalog v0.9.4/go.mod h1:SOjm93f+5hp/U3PqMZAHTtBtluqLygrDrVO8X8tYtG0= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= cloud.google.com/go/pubsub v1.30.0 h1:vCge8m7aUKBJYOgrZp7EsNDf6QMd2CAlXZqWTn3yq6s= cloud.google.com/go/pubsub v1.30.0/go.mod h1:qWi1OPS0B+b5L+Sg6Gmc9zD1Y+HaM0MdUr7LsupY1P4= cloud.google.com/go/pubsub v1.33.0 h1:6SPCPvWav64tj0sVX/+npCBKhUi/UjJehy9op/V3p2g= @@ -535,6 +572,11 @@ cloud.google.com/go/speech v1.15.0/go.mod h1:y6oH7GhqCaZANH7+Oe0BhgIogsNInLlz542 cloud.google.com/go/speech v1.19.0/go.mod h1:8rVNzU43tQvxDaGvqOhpDqgkJTFowBpDvCJ14kGlJYo= cloud.google.com/go/speech v1.21.0 h1:qkxNao58oF8ghAHE1Eghen7XepawYEN5zuZXYWaUTA4= cloud.google.com/go/speech v1.21.0/go.mod h1:wwolycgONvfz2EDU8rKuHRW3+wc9ILPsAWoikBEWavY= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.29.0 h1:6weCgzRvMg7lzuUurI4697AqIRPU1SvzHhynwpW31jI= cloud.google.com/go/storage v1.29.0/go.mod h1:4puEjyTKnku6gfKoTfNOU/W+a9JyuVNxjpS5GBrB8h4= cloud.google.com/go/storage v1.30.1 h1:uOdMxAs8HExqBlnLtnQyP0YkvbiDpdGShGKtx6U/oNM= @@ -569,6 +611,7 @@ cloud.google.com/go/translate v1.7.0/go.mod h1:lMGRudH1pu7I3n3PETiOB2507gf3HnfLV cloud.google.com/go/translate v1.8.2/go.mod h1:d1ZH5aaOA0CNhWeXeC8ujd4tdCFw8XoNWRljklu5RHs= cloud.google.com/go/translate v1.10.0 h1:tncNaKmlZnayMMRX/mMM2d5AJftecznnxVBD4w070NI= cloud.google.com/go/translate v1.10.0/go.mod h1:Kbq9RggWsbqZ9W5YpM94Q1Xv4dshw/gr/SHfsl5yCZ0= +cloud.google.com/go/translate v1.10.3/go.mod h1:GW0vC1qvPtd3pgtypCv4k4U8B7EdgK9/QEF2aJEUovs= cloud.google.com/go/video v1.15.0 h1:upIbnGI0ZgACm58HPjAeBMleW3sl5cT84AbYQ8PWOgM= cloud.google.com/go/video v1.15.0/go.mod h1:SkgaXwT+lIIAKqWAJfktHT/RbgjSuY6DobxEp0C5yTQ= cloud.google.com/go/video v1.19.0/go.mod h1:9qmqPqw/Ib2tLqaeHgtakU+l5TcJxCJbhFXM7UJjVzU= @@ -616,6 +659,7 @@ cloud.google.com/go/workflows v1.10.0/go.mod h1:fZ8LmRmZQWacon9UCX1r/g/DfAXx5VcP cloud.google.com/go/workflows v1.11.1/go.mod h1:Z+t10G1wF7h8LgdY/EmRcQY8ptBD/nvofaL6FqlET6g= cloud.google.com/go/workflows v1.12.3 h1:qocsqETmLAl34mSa01hKZjcqAvt699gaoFbooGGMvaM= cloud.google.com/go/workflows v1.12.3/go.mod h1:fmOUeeqEwPzIU81foMjTRQIdwQHADi/vEr1cx9R1m5g= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs= @@ -631,9 +675,9 @@ github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZ github.com/Azure/go-autorest/tracing v0.6.0 h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo= github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= github.com/Microsoft/hcsshim v0.12.0-rc.0 h1:wX/F5huJxH9APBkhKSEAqaiZsuBvbbDnyBROZAqsSaY= @@ -678,25 +722,38 @@ github.com/apache/thrift v0.16.0 h1:qEy6UW60iVOlUy+b9ZR0d5WzUWYGOo4HfopoyBaNmoY= github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU= github.com/apecloud/kubeblocks v0.8.4 h1:8esK2e9iiziPXTlGXmX2uFTU/YGFXFvyvqnCBODqWM4= github.com/apecloud/kubeblocks v0.8.4/go.mod h1:xQpzfMy4V+WJI5IKBWB02qsKAlVR3nAE71CPkAs2uOs= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a h1:idn718Q4B6AGu/h5Sxe66HYVdqdGu2l9Iebqhi/AEoA= github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= github.com/astaxie/beego v1.12.3 h1:SAQkdD2ePye+v8Gn1r4X6IKZM1wd28EyUOVQ3PDSOOQ= github.com/astaxie/beego v1.12.3/go.mod h1:p3qIm0Ryx7zeBHLljmd7omloyca1s4yu1a8kM1FkpIA= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.19/go.mod h1:zminj5ucw7w0r65bP6nhyOd3xL6veAUMc3ElGMoLVb4= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0/go.mod h1:0jp+ltwkf+SwG2fm/PKo8t4y8pJSgOCO4D8Lz3k0aHQ= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.4/go.mod h1:4GQbF1vJzG60poZqWatZlhP31y8PGCCVTvIGPdaaYJ0= +github.com/aws/aws-sdk-go-v2/service/sso v1.24.5/go.mod h1:wrMCEwjFPms+V86TCQQeOxQF/If4vT44FGIOFiMC2ck= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.4/go.mod h1:Tp/ly1cTjRLGBBmNccFumbZ8oqpZlpdhFf80SrRh4is= +github.com/aws/aws-sdk-go-v2/service/sts v1.32.4/go.mod h1:9XEUty5v5UAsMiFOBJrNibZgwCeOma73jgGwwhgffa8= +github.com/bazelbuild/rules_go v0.49.0/go.mod h1:Dhcz716Kqg1RHNWos+N6MlXNkjNP2EwZQ0LukRKJfMs= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM= github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= +github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4= +github.com/bytedance/sonic/loader v0.2.0/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= github.com/cenkalti/backoff/v4 v4.1.3 h1:cFAlzYUlVYDysBEH2T5hyJZMh3+5+WCBvSnK6Q8UtC4= github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/census-instrumentation/opencensus-proto v0.2.1 h1:glEXhBS5PSLLv4IXzLA5yPRVX4bilULVyxxbrfOtDAk= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.4.1 h1:iKLQ0xPNFxR/2hzXZMrBo8f1j86j5WHzznCCQxV/b8g= github.com/census-instrumentation/opencensus-proto v0.4.1/go.mod h1:4T9NM4+4Vw91VeyqjLS6ao50K5bOcLKN6Q42XnYaRYw= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= @@ -706,6 +763,9 @@ github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/checkpoint-restore/go-criu/v5 v5.3.0 h1:wpFFOoomK3389ue2lAb0Boag6XPht5QYpipxmSNL4d8= github.com/checkpoint-restore/go-criu/v5 v5.3.0/go.mod h1:E/eQpaFtUKGOOSEBZgmKAcn+zUUwWxqcaKZlF54wK8E= +github.com/chromedp/cdproto v0.0.0-20230802225258-3cf4e6d46a89/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs= +github.com/chromedp/chromedp v0.9.2/go.mod h1:LkSXJKONWTCHAfQasKFUZI+mxqS4tZqhmtGzzhLsnLs= +github.com/chromedp/sysutil v1.0.0/go.mod h1:kgWmDdq8fTzXYcKIBqIYvRRTnYb9aNS9moAV0xufSww= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/logex v1.2.1 h1:XHDu3E6q+gdHgsdTPH6ImJMIp436vR6MPtH8gP05QzM= github.com/chzyer/logex v1.2.1/go.mod h1:JLbx6lG2kDbNRFnfkgvh4eRJRPX1QCoOIWomwysCBrQ= @@ -720,8 +780,7 @@ github.com/cilium/ebpf v0.7.0/go.mod h1:/oI2+1shJiTGAMgl6/RgJr36Eo1jzrRcAWbcXO2u github.com/clbanning/mxj/v2 v2.5.7 h1:7q5lvUpaPF/WOkqgIDiwjBJaznaLCCBd78pi8ZyAnE0= github.com/clbanning/mxj/v2 v2.5.7/go.mod h1:hNiWqW14h+kc+MdF9C6/YoRfjEJoR3ou6tn/Qo+ve2s= github.com/client9/misspell v0.3.4 h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJI= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403 h1:cqQfy1jclcSy/FwLjemeg3SR1yaINm74aQyupQ0Bl8M= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= @@ -736,6 +795,7 @@ github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4 h1:/inchEIKaYC1Akx+H+g github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20231109132714-523115ebc101 h1:7To3pQ+pZo0i3dsWEbinPNFs5gPSBOsJtx3wTT94VBY= github.com/cncf/xds/go v0.0.0-20231109132714-523115ebc101/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20240905190251-b4127c9b8d78/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/containerd/cgroups/v3 v3.0.2 h1:f5WFqIVSgo5IZmtTT3qVBo6TzI1ON6sycSBKkymb9L0= github.com/containerd/cgroups/v3 v3.0.2/go.mod h1:JUgITrzdFqp42uI2ryGA+ge0ap/nxzYgkGmIcetmErE= github.com/containerd/console v1.0.3 h1:lIr7SlA5PxZyMV30bDW0MGbiOPXwc63yRuCP0ARubLw= @@ -751,12 +811,15 @@ github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmf github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-semver v0.3.1 h1:yi21YpKnrx1gt5R+la8n5WgS0kCrsPp33dmEyHReZr4= github.com/coreos/go-semver v0.3.1/go.mod h1:irMmmIw/7yzSRPWryHsK7EYSg09caPQL03VsM8rvUec= +github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/go-systemd/v22 v22.4.0 h1:y9YHcjnjynCd/DVbg5j9L/33jQM3MxJlbj/zWskzfGU= github.com/coreos/go-systemd/v22 v22.4.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/go-systemd/v22 v22.5.0 h1:RrqgGjYQKalulkV8NGVIfkXQf6YYmOyiJKk8iXXhfZs= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d h1:U+s90UTSYgptZMwQh2aRr3LuazLJIa+Pg3Kc1ylSYVY= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9 h1:uDmaGzcdjhF4i/plgjmEsriH11Y0o7RKapEf/LDaM3w= github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= @@ -770,9 +833,7 @@ github.com/emicklei/go-restful/v3 v3.8.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry github.com/emicklei/go-restful/v3 v3.9.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= github.com/emicklei/go-restful/v3 v3.10.2 h1:hIovbnmBTLjHXkqEBUz3HGpXZdM7ZrE9fJIZIqlJLqE= github.com/emicklei/go-restful/v3 v3.10.2/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0 h1:dulLQAYQFYtG5MTplgNGHWuV2D+OBD+Z8lmDBmbLg+s= @@ -782,21 +843,28 @@ github.com/envoyproxy/go-control-plane v0.11.1-0.20230524094728-9239064ad72f h1: github.com/envoyproxy/go-control-plane v0.11.1-0.20230524094728-9239064ad72f/go.mod h1:sfYdkwUW4BA3PbKjySwjJy+O4Pu0h62rlqCMHNk+K+Q= github.com/envoyproxy/go-control-plane v0.11.1 h1:wSUXTlLfiAQRWs2F+p+EKOY9rUyis1MyGqJ2DIk5HpM= github.com/envoyproxy/go-control-plane v0.11.1/go.mod h1:uhMcXKCQMEJHiAb0w+YGefQLaTEw+YhGluxZkrTmD0g= +github.com/envoyproxy/go-control-plane v0.13.0/go.mod h1:GRaKG3dwvFoTg4nj7aXdZnvMg4d7nvT/wl9WgVXn3Q8= github.com/envoyproxy/protoc-gen-validate v0.1.0 h1:EQciDnbrYxy13PgWoY8AqoxGiPrpgBZ1R8UNe3ddc+A= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/envoyproxy/protoc-gen-validate v0.10.1 h1:c0g45+xCJhdgFGw7a5QAfdS4byAbud7miNWJ1WwEVf8= github.com/envoyproxy/protoc-gen-validate v0.10.1/go.mod h1:DRjgyB0I43LtJapqN6NiRwroiAU2PaFuvk/vjgh61ss= github.com/envoyproxy/protoc-gen-validate v1.0.2 h1:QkIBuU5k+x7/QXPvPPnWXWlCdaBFApVqftFV6k087DA= github.com/envoyproxy/protoc-gen-validate v1.0.2/go.mod h1:GpiZQP3dDbg4JouG/NNS7QWXpgx6x8QiMKdmN72jogE= +github.com/envoyproxy/protoc-gen-validate v1.1.0/go.mod h1:sXRDRVmzEbkM7CVcM06s9shE/m23dg3wzjl0UWqJ2q4= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk= github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/flowstack/go-jsonschema v0.1.1 h1:dCrjGJRXIlbDsLAgTJZTjhwUJnnxVWl1OgNyYh5nyDc= github.com/flowstack/go-jsonschema v0.1.1/go.mod h1:yL7fNggx1o8rm9RlgXv7hTBWxdBM0rVwpMwimd3F3N0= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= +github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gomail/gomail v0.0.0-20160411212932-81ebce5c23df h1:Bao6dhmbTA1KFVxmJ6nBoMuOJit2yjEgLJpIMYpop0E= github.com/go-gomail/gomail v0.0.0-20160411212932-81ebce5c23df/go.mod h1:GJr+FCSXshIwgHBtLglIg9M2l2kQSi6QjVAngtzI08Y= github.com/go-kit/log v0.2.1 h1:MRVx0/zhvdseW+Gza6N9rVzU/IVzaeE1SFI4raAhmBU= @@ -805,43 +873,46 @@ github.com/go-logfmt/logfmt v0.5.1 h1:otpy5pqBCBZ1ng9RQ0dPu4PN7ba75Y/aA+UpowDyNV github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= -github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= -github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-logr/zapr v1.2.4 h1:QHVo+6stLbfJmYGkQ7uGHUCu5hnAFAj6mDe6Ea0SeOo= github.com/go-logr/zapr v1.2.4/go.mod h1:FyHWQIzQORZ0QVE1BtVHv3cKtNLuXsbNLtpuhNapBOA= github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-openapi/jsonreference v0.20.1/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= +github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= +github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM= +github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= +github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.0.6 h1:mkgN1ofwASrYnJ5W6U/BxG15eXXXjirgZc7CLqkcaro= github.com/godbus/dbus/v5 v5.0.6/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v1.1.0 h1:/d3pCKDPWNnvIWe0vVUpNP32qc8U3PDVxySP/y360qE= github.com/golang/glog v1.1.0/go.mod h1:pfYeQZ3JWZoXTV5sFc986z3HTpwQs9At6P4ImfuP3NQ= github.com/golang/glog v1.1.2 h1:DVjP2PbBOzHyzA+dn3WhHIq4NdVu3Q+pvivFICf/7fo= github.com/golang/glog v1.1.2/go.mod h1:zR+okUeTbrL6EL3xHUDxZuEtGv04p5shwip1+mL/rLQ= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/glog v1.2.2/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1 h1:G5FRp8JnTd7RQH5kemVNlMeyXQAztQ3mOWV95KxsXH8= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.1 h1:gK4Kx5IaGY9CD5sPJ36FHiBJ6ZXl0kilRiiCj+jdYp4= github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA= github.com/google/cel-go v0.12.6 h1:kjeKudqV0OygrAqA9fX6J55S8gj+Jre2tckIm5RoG4M= @@ -853,33 +924,48 @@ github.com/google/flatbuffers v2.0.8+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6 github.com/google/gnostic v0.5.7-v3refs/go.mod h1:73MKFl6jIHelAJNaBGFzt3SPtZULs9dYrGFt8OiIsHQ= github.com/google/gnostic v0.6.9 h1:ZK/5VhkoX835RikCHpSUJV9a+S3e1zLh59YnyWeBW+0= github.com/google/gnostic v0.6.9/go.mod h1:Nm8234We1lq6iB9OmlgNv3nH91XLLVZHCDayfA3xq+E= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-intervals v0.0.2 h1:FGrVEiUnTRKR8yE04qzXYaJMtnIYqobR5QbblK3ixcM= github.com/google/go-intervals v0.0.2/go.mod h1:MkaR3LNRfeKLPmqgJYs4E66z5InYjmCjbbr4TQlcT6Y= github.com/google/go-pkcs11 v0.2.1-0.20230907215043-c6f79328ddf9 h1:OF1IPgv+F4NmqmJ98KTjdN97Vs1JxDPB3vbmYzV2dpk= github.com/google/go-pkcs11 v0.2.1-0.20230907215043-c6f79328ddf9/go.mod h1:6eQoGcuNJpa7jnd5pMGdkSaQpNDYvPlXWMcjXXThLlY= +github.com/google/go-pkcs11 v0.3.0/go.mod h1:6eQoGcuNJpa7jnd5pMGdkSaQpNDYvPlXWMcjXXThLlY= github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/martian/v3 v3.3.2 h1:IqNFLAmvJOgVlpdEBiQbDc2EwKW77amAycfTuWKdfvw= github.com/google/martian/v3 v3.3.2/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/s2a-go v0.1.0/go.mod h1:OJpEgntRZo8ugHpF9hkoLJbS5dSI20XZeXJ9JVywLlM= github.com/google/s2a-go v0.1.4 h1:1kZ/sQM3srePvKs3tXAvQzo66XfcReoqFpIpIccE7Oc= github.com/google/s2a-go v0.1.4/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A= github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/enterprise-certificate-proxy v0.2.3 h1:yk9/cqRKtT9wXZSsRH9aurXEpJX+U6FLtpYTdC3R06k= github.com/googleapis/enterprise-certificate-proxy v0.2.3/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.7.1 h1:gF4c0zjUP2H/s/hEGyLA3I0fA2ZWjzYiONAD6cvPr8A= github.com/googleapis/gax-go/v2 v2.7.1/go.mod h1:4orTrqY6hXxxaUL4LHIPl6lGo8vAE38/qKbhSAKP6QI= github.com/googleapis/gax-go/v2 v2.8.0/go.mod h1:4orTrqY6hXxxaUL4LHIPl6lGo8vAE38/qKbhSAKP6QI= @@ -905,62 +991,99 @@ github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3 h1:lLT7ZLSzGLI08vc9cpd+tYmNWjd github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w= github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms= github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg= +github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= +github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/hcl v1.0.1-vault-5 h1:kI3hhbbyzr4dldA8UdTb7ZlVVlI2DACdCfz31RPDgJM= github.com/hashicorp/hcl v1.0.1-vault-5/go.mod h1:XYhtn6ijBSAj6n4YqAaf7RBPS4I06AItNorpy+MoQNM= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= github.com/iancoleman/strcase v0.2.0 h1:05I4QRnGpI0m37iZQRuskXh+w77mr6Z41lwQzuHLwW0= github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= +github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20220517205856-0058ec4f073c h1:rwmN+hgiyp8QyBqzdEX43lTjKAxaqCrYHaU5op5P9J8= github.com/ianlancetaylor/demangle v0.0.0-20220517205856-0058ec4f073c/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w= +github.com/ianlancetaylor/demangle v0.0.0-20240312041847-bd984b5ce465/go.mod h1:gx7rwoVhcfuVKG5uya9Hs3Sxj7EIvldVofAWIUtGouw= github.com/imdario/mergo v0.3.6/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc= github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jackc/pgx/v5 v5.5.4 h1:Xp2aQS8uXButQdnCMWNmvx6UysWQQC+u1EoizjguY+8= github.com/jackc/pgx/v5 v5.5.4/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A= +github.com/jackc/pgx/v5 v5.5.5/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiwgm1A= github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jonboulle/clockwork v0.2.2 h1:UOGuzwb1PwsrDAObMuhUnj0p5ULPj8V/xJ7Kx9qUBdQ= github.com/jonboulle/clockwork v0.2.2/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8= github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= -github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/julienschmidt/httprouter v1.3.0 h1:U0609e9tgbseu3rBINet9P48AI/D3oJs4dN7jwJOQ1U= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= github.com/kisielk/errcheck v1.5.0 h1:e8esj/e4R+SAOwFwN+n3zr0nYeCyeweozKfO23MvHzY= github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4= github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU= github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pty v1.1.1 h1:VkoXIwSboBpnk99O/KFauAEILuNHv5DVFKZMBN/gUgw= github.com/labring/operator-sdk v1.0.1 h1:JS+j9nF0lihkPJnMYJBZrH7Kfp/dKB2cnbBRMfkmE+g= github.com/labring/operator-sdk v1.0.1/go.mod h1:velfQ6SyrLXBeAShetQyR7q1zJNd8vGO6jjzbKcofj8= +github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs= github.com/lufia/plan9stats v0.0.0-20230110061619-bbe2e5e100de h1:V53FWzU6KAZVi1tPp5UIsMoUWJ2/PNwYIDXnu7QuBCE= github.com/lufia/plan9stats v0.0.0-20230110061619-bbe2e5e100de/go.mod h1:JKx41uQRwqlTZabZc+kILPrO/3jlKnQ2Z8b7YiVw5cE= github.com/lyft/protoc-gen-star/v2 v2.0.1 h1:keaAo8hRuAT0O3DfJ/wM3rufbAjGeJ1lAtWZHDjKGB0= github.com/lyft/protoc-gen-star/v2 v2.0.1/go.mod h1:RcCdONR2ScXaYnQC5tUzxzlpA3WVYF7/opLeUgcQs/o= github.com/lyft/protoc-gen-star/v2 v2.0.3 h1:/3+/2sWyXeMLzKd1bX+ixWKgEMsULrIivpDsuaF441o= github.com/lyft/protoc-gen-star/v2 v2.0.3/go.mod h1:amey7yeodaJhXSbf/TlLvWiqQfLOSpEk//mLlc+axEk= +github.com/lyft/protoc-gen-star/v2 v2.0.4-0.20230330145011-496ad1ac90a4/go.mod h1:amey7yeodaJhXSbf/TlLvWiqQfLOSpEk//mLlc+axEk= +github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/matoous/go-nanoid v1.5.0 h1:VRorl6uCngneC4oUQqOYtO3S0H5QKFtKuKycFG3euek= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-shellwords v1.0.12 h1:M2zGm7EW6UQJvDeQxo4T51eKPurbeFbe8WtebGE2xrk= github.com/mattn/go-shellwords v1.0.12/go.mod h1:EZzvwXDESEeg03EKmM+RmDnNOPKG4lLtQsUlTZDWQ8Y= github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 h1:jWpvCLoY8Z/e3VKvlsiIGKtc+UG6U5vzxaoagmhXfyg= github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0/go.mod h1:QUyp042oQthUoa9bqDv0ER0wrtXnBruoNd7aNjkbP+k= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs= github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY= github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI= @@ -975,7 +1098,14 @@ github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dz github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8= github.com/mistifyio/go-zfs/v3 v3.0.1 h1:YaoXgBePoMA12+S1u/ddkv+QqxcfiZK4prI6HPnkFiU= github.com/mistifyio/go-zfs/v3 v3.0.1/go.mod h1:CzVgeB0RvF2EGzQnytKVvVSDwmKJXxkOTUGbNrTja/k= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.5.1-0.20220423185008-bf980b35cac4 h1:BpfhmLKZf+SjVanKKhCgf3bg+511DmU9eDQTen7LLbY= @@ -993,6 +1123,8 @@ github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+ github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus= github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= +github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo= +github.com/neelance/sourcemap v0.0.0-20200213170602-2833bce08e4c/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= @@ -1015,6 +1147,11 @@ github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bl github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/selinux v1.11.0 h1:+5Zbo97w3Lbmb3PeqQtpmTkMwsW5nRI3YaLpt7tQ7oU= github.com/opencontainers/selinux v1.11.0/go.mod h1:E5dMC3VPuVvVHDYmi78qvhJp8+M586T4DlDRYpFkyec= +github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pelletier/go-toml v1.9.3 h1:zeC5b1GviRUyKYd6OJPvBU/mcVDVoL1OhT17FCt5dSQ= +github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= github.com/philhofer/fwd v1.1.2 h1:bnDivRJ1EWPjUIRXV5KfORO897HTbpFAQddBdE8t7Gw= @@ -1022,6 +1159,10 @@ github.com/philhofer/fwd v1.1.2/go.mod h1:qkPdfjR2SIEbspLqpe1tO4n5yICnr2DY7mqEx2 github.com/pierrec/lz4/v4 v4.1.15 h1:MO0/ucJhngq7299dKLwIMtgTfbkoSPF6AoMYDd8Q4q0= github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e h1:aoZm08cpOy4WuID//EZDgcC4zIxODThtZNPirFr42+A= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/power-devops/perfstat v0.0.0-20221212215047-62379fc7944b h1:0LFwY6Q3gMACTjAbMZBjXAqTOzOwFaj2Ld6cjeQ7Rig= github.com/power-devops/perfstat v0.0.0-20221212215047-62379fc7944b/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/pquerna/cachecontrol v0.1.0/go.mod h1:NrUG3Z7Rdu85UNR3vm7SOsl1nFIeSiQnrHV5K9mBcUI= @@ -1029,10 +1170,10 @@ github.com/prometheus/client_golang v1.15.1/go.mod h1:e9yaBhRPU2pPNsZwE+JdQl0KEt github.com/prometheus/client_golang v1.18.0/go.mod h1:T+GXkCk5wSJyOqMIzVgvvjFDlkOQntgjkJWKrN5txjA= github.com/prometheus/client_golang v1.19.0 h1:ygXvpU1AoN1MhdzckN+PyD9QJOSD4x7kmXYlnfbA6JU= github.com/prometheus/client_golang v1.19.0/go.mod h1:ZRM9uEAypZakd+q/x7+gmsvXdURP+DABIEIjnmDdp+k= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.4.0/go.mod h1:oMQmHW1/JoDwqLtg57MGgP/Fb1CJEYF2imWWhWtMkYU= github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= +github.com/prometheus/client_model v0.6.0/go.mod h1:NTQHnmxFpouOD0DpvP4XujX3CdOAGQPoaGhyTchlyt8= github.com/prometheus/common v0.42.0/go.mod h1:xBwqVerjNdUDjgODMpudtOMwlOwf2SaTr1yjz4b7Zbc= github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY= github.com/prometheus/common v0.45.0/go.mod h1:YJmSTw9BoKxJplESWWxlbyttQR4uaEcGyv9MZjVOJsY= @@ -1045,14 +1186,18 @@ github.com/prometheus/prom2json v1.3.3 h1:IYfSMiZ7sSOfliBoo89PcufjWO4eAR0gznGcET github.com/prometheus/prom2json v1.3.3/go.mod h1:Pv4yIPktEkK7btWsrUTWDDDrnpUrAELaOCj+oFwlgmc= github.com/rogpeppe/fastuuid v1.2.0 h1:Ppwyp6VYCF1nvBTXL3trRso7mXMlRrw9ooo375wvi2s= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/safchain/ethtool v0.3.0 h1:gimQJpsI6sc1yIqP/y8GYgiXn/NjgvpM0RNoWLVVmP0= github.com/safchain/ethtool v0.3.0/go.mod h1:SA9BwrgyAqNo7M+uaL6IYbxpm5wk3L7Mm6ocLW+CJUs= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646 h1:RpforrEYXWkmGwJHIGnLZ3tTWStkjVVstwzNGqxX2Ds= github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg= github.com/secure-io/sio-go v0.3.1 h1:dNvY9awjabXTYGsTF1PiCySl9Ltofk9GA3VdWlo7rRc= @@ -1064,8 +1209,12 @@ github.com/shirou/gopsutil/v3 v3.23.6 h1:5y46WPI9QBKBbK7EEccUPNXpJpNrvPuTD0O2zHE github.com/shirou/gopsutil/v3 v3.23.6/go.mod h1:j7QX50DrXYggrpN30W0Mo+I4/8U2UUIQrnrhqUeWrAU= github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shurcooL/go v0.0.0-20200502201357-93f07166e636/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= +github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg= github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw= +github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/smartystreets/assertions v1.1.0 h1:MkTeG1DMwsrdH7QtLXy5W+fUxWq+vmb6cLmyJ7aRtF0= @@ -1078,22 +1227,30 @@ github.com/spf13/afero v1.2.2 h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc= github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= github.com/spf13/afero v1.3.3 h1:p5gZEKLYoL7wh8VrJesMaYeNxdEd1v3cb4irOk9zB54= github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= +github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= github.com/spf13/afero v1.9.5 h1:stMpOSZFs//0Lv29HduCmli3GUfpFoF3Y1Q/aXj/wVM= github.com/spf13/afero v1.9.5/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= +github.com/spf13/afero v1.10.0/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA= github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48= +github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk= github.com/spf13/cobra v1.6.0 h1:42a0n6jwCot1pUmomAp4T7DeMD+20LFv4Q54pxLf2LI= github.com/spf13/cobra v1.6.0/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY= github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= +github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= github.com/spf13/viper v1.16.0 h1:rGGH0XDZhdUOryiDWjmIvUSWpbNqisK8Wk0Vyefw8hc= github.com/spf13/viper v1.16.0/go.mod h1:yg78JgCJcbrQOvV9YLXgkLaZqUidkY9K+Dd1FofRzQg= github.com/stoewer/go-strcase v1.2.0 h1:Z2iHWqGXH00XYgqDmNgQbIBxf3wrNq0F3feEy0ainaU= github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/subosito/gotenv v1.4.2 h1:X1TuBLAMDFbaTAChgCBLu3DU3UPyELpnF2jjJ2cz/S8= github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635 h1:kdXcSzyDtseVEc4yCz2qF8ZrQvIDBJLl4S1c3GCXmoI= @@ -1135,6 +1292,8 @@ github.com/xhit/go-str2duration/v2 v2.1.0 h1:lxklc02Drh6ynqX+DdPyp5pCKLUQpRT8bp8 github.com/xhit/go-str2duration/v2 v2.1.0/go.mod h1:ohY8p+0f07DiV6Em5LKB0s2YpLtXVyJfNt1+BlmyAsU= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2 h1:eY9dn8+vbi4tKz5Qo6v2eYzo7kUS51QINcR5jNpbZS8= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= github.com/yusufpapurcu/wmi v1.2.2 h1:KBNDSne4vP5mbSWnJbO+51IMOXJB67QiYCSBrubbPRg= @@ -1146,14 +1305,17 @@ go.etcd.io/bbolt v1.3.6 h1:/ecaJf0sk1l4l6V4awd65v2C3ILy7MSj+s/x1ADCIMU= go.etcd.io/bbolt v1.3.6/go.mod h1:qXsaaIqmgQH0T+OPdb99Bf+PKfBBQVAdyD6TY9G8XM4= go.etcd.io/bbolt v1.3.8 h1:xs88BrvEv273UsB79e0hcVrlUWmS0a8upikMFhSyAtA= go.etcd.io/bbolt v1.3.8/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= +go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= go.etcd.io/etcd/api/v3 v3.5.7 h1:sbcmosSVesNrWOJ58ZQFitHMdncusIifYcrBfwrlJSY= go.etcd.io/etcd/api/v3 v3.5.7/go.mod h1:9qew1gCdDDLu+VwmeG+iFpL+QlpHTo7iubavdVDgCAA= go.etcd.io/etcd/api/v3 v3.5.10 h1:szRajuUUbLyppkhs9K6BRtjY37l66XQQmw7oZRANE4k= go.etcd.io/etcd/api/v3 v3.5.10/go.mod h1:TidfmT4Uycad3NM/o25fG3J07odo4GBB9hoxaodFCtI= +go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= go.etcd.io/etcd/client/pkg/v3 v3.5.7 h1:y3kf5Gbp4e4q7egZdn5T7W9TSHUvkClN6u+Rq9mEOmg= go.etcd.io/etcd/client/pkg/v3 v3.5.7/go.mod h1:o0Abi1MK86iad3YrWhgUsbGx1pmTS+hrORWc2CamuhY= go.etcd.io/etcd/client/pkg/v3 v3.5.10 h1:kfYIdQftBnbAq8pUWFXfpuuxFSKzlmM5cSn76JByiT0= go.etcd.io/etcd/client/pkg/v3 v3.5.10/go.mod h1:DYivfIviIuQ8+/lCq4vcxuseg2P2XbHygkKwFo9fc8U= +go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= go.etcd.io/etcd/client/v2 v2.305.7 h1:AELPkjNR3/igjbO7CjyF1fPuVPjrblliiKj+Y6xSGOU= go.etcd.io/etcd/client/v2 v2.305.7/go.mod h1:GQGT5Z3TBuAQGvgPfhR7VPySu/SudxmEkRq9BgzFU6s= go.etcd.io/etcd/client/v2 v2.305.10 h1:MrmRktzv/XF8CvtQt+P6wLUlURaNpSDJHFZhe//2QE4= @@ -1174,8 +1336,13 @@ go.etcd.io/etcd/server/v3 v3.5.7 h1:BTBD8IJUV7YFgsczZMHhMTS67XuA4KpRquL0MFOJGRk= go.etcd.io/etcd/server/v3 v3.5.7/go.mod h1:gxBgT84issUVBRpZ3XkW1T55NjOb4vZZRI4wVvNhf4A= go.etcd.io/etcd/server/v3 v3.5.10 h1:4NOGyOwD5sUZ22PiWYKmfxqoeh72z6EhYjNosKGLmZg= go.etcd.io/etcd/server/v3 v3.5.10/go.mod h1:gBplPHfs6YI0L+RpGkTQO7buDbHv5HJGG/Bst0/zIPo= -go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= -go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.35.0 h1:xFSRQBbXF6VvYRf2lqMJXxoB72XI1K/azav8TekHHSw= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.35.0/go.mod h1:h8TWwRAhQpOd0aM5nYsRD8+flnkj+526GEIVlarH7eY= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.42.0 h1:ZOLJc06r4CB42laIXg/7udr0pbZyuAihN10A/XuiQRY= @@ -1206,6 +1373,7 @@ go.opentelemetry.io/otel/sdk v1.10.0 h1:jZ6K7sVn04kk/3DNUdJ4mqRlGDiXAVuIG+MMENpT go.opentelemetry.io/otel/sdk v1.10.0/go.mod h1:vO06iKzD5baltJz1zarxMCNHFpUlUiOy4s65ECtn6kE= go.opentelemetry.io/otel/sdk v1.19.0 h1:6USY6zH+L8uMH8L3t1enZPR3WFEmSTADlqldyHtJi3o= go.opentelemetry.io/otel/sdk v1.19.0/go.mod h1:NedEbbS4w3C6zElbLdPJKOpJQOrGUJ+GfzpjUvI0v1A= +go.opentelemetry.io/otel/sdk v1.29.0/go.mod h1:pM8Dx5WKnvxLCb+8lG1PRNIDxu9g9b9g59Qr7hfAAok= go.opentelemetry.io/otel/trace v1.10.0 h1:npQMbR8o7mum8uF95yFbOEJffhs1sbCOfDh8zAJiH5E= go.opentelemetry.io/otel/trace v1.10.0/go.mod h1:Sij3YYczqAdz+EhmGhE6TpTxUO5/F/AzrK+kxfGqySM= go.opentelemetry.io/otel/trace v1.19.0 h1:DFVQmlVbfVeOuBRrwdtaehRrWiL1JoVs9CPIQ1Dzxpg= @@ -1225,24 +1393,55 @@ go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A= go.uber.org/goleak v1.2.1/go.mod h1:qlT2yGI9QafXHhZZLxlSuNsMw3FFLxBr+tBRlmO1xH4= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg= +golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= +golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70= golang.org/x/exp v0.0.0-20190121172915-509febef88a4 h1:c2HOrn5iMezYjSlGPncknSEr/8x5LELb/ilJbXi9DEA= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6 h1:QE6XYQK6naiK1EPAe1g/ILLxN5RBoH5xkJk3CqlMI/Y= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e/go.mod h1:Kr81I6Kryrl9sr8s2FK3vxD90NdsKWRuOIl2O4CvYbA= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3 h1:XQyxROzUlZH+WIQwySDgnISgOivlhjIEwaQaJEJrrN0= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 h1:VLliZ0d+/avPrXXH+OakdXhpJuEoBZuwh1m2j7U6Iug= golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= @@ -1250,13 +1449,30 @@ golang.org/x/mod v0.10.0 h1:lFO9qtOdlre5W1jxS3r/4szv2/6iXxScdzjoBMXNhYk= golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.15.0 h1:SernR4v+D55NyBH2QiEQrlBAnj1ECL6AGrA5+dPaMY8= golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= @@ -1268,8 +1484,21 @@ golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ= golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= +golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg= +golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0= +golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.5.0/go.mod h1:9/XBHVqLaWO3/BRHs5jbpYCnOZVjj5V0ndyaAM7KB4I= golang.org/x/oauth2 v0.6.0/go.mod h1:ycmewcwgD4Rpr3eZJLSB4Kyyljb3qDh40vJ8STE5HKw= golang.org/x/oauth2 v0.7.0/go.mod h1:hPLQkd9LyjfXTiRohC/41GhcFqxisoUQ99sCUOHO9x4= @@ -1280,53 +1509,169 @@ golang.org/x/oauth2 v0.14.0/go.mod h1:lAtNWgaWfL4cm7j2OV8TxGi9Qb7ECORx8DktCY74Ow golang.org/x/oauth2 v0.16.0/go.mod h1:hqZ+0LWXsiVoZpeld6jVt06P3adbS2Uu911W1SsJv2o= golang.org/x/oauth2 v0.18.0 h1:09qnuIAgzdx1XplqJvW6CQqMCtGZykZWcXzPMPUusvI= golang.org/x/oauth2 v0.18.0/go.mod h1:Wf7knwG0MPoWIMMBgFlEaSUDaKskp0dCfrlJRJXbBi8= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/telemetry v0.0.0-20240208230135-b75ee8823808 h1:+Kc94D8UVEVxJnLXp/+FMfqQARZtWHfVrcRtcG8aT3g= golang.org/x/telemetry v0.0.0-20240208230135-b75ee8823808/go.mod h1:KG1lNk5ZFNssSZLrpVb4sMXKMpGwGXOxSG3rnu2gZQQ= +golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= +golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU= +golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200505023115-26f46d2f7ef8/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.4.0/go.mod h1:UE5sM2OK9E/d67R0ANs2xJizIymRP5gJU295PvKXxjQ= +golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= golang.org/x/tools v0.8.0/go.mod h1:JxBZ99ISMI5ViVkT1tr6tdNmXeTrcpVSD3vZ1RsRdN4= golang.org/x/tools v0.9.1/go.mod h1:owI94Op576fPu3cIGQeHs3joujW/2Oc6MtlxbF5dfNc= golang.org/x/tools v0.9.3/go.mod h1:owI94Op576fPu3cIGQeHs3joujW/2Oc6MtlxbF5dfNc= golang.org/x/tools v0.12.0/go.mod h1:Sc0INKfu04TlqNoRA1hgpFZbhYXHPr4V5DzpSBTPqQM= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.18.0/go.mod h1:GL7B4CwcLLeo59yx/9UWWuNOW1n3VZ4f5axWfML7Lcg= +golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= +golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= gomodules.xyz/jsonpatch/v2 v2.3.0/go.mod h1:AH3dM2RI6uoBZxn3LVrfvJ3E0/9dG4cSrbuBJT4moAY= gomodules.xyz/jsonpatch/v2 v2.4.0 h1:Ci3iUJyx9UeRx7CeFN8ARgGbkESwJK+KB9lLcWxY/Zw= gomodules.xyz/jsonpatch/v2 v2.4.0/go.mod h1:AH3dM2RI6uoBZxn3LVrfvJ3E0/9dG4cSrbuBJT4moAY= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= google.golang.org/api v0.114.0 h1:1xQPji6cO2E2vLiI+C/XiFAnsn1WV3mjaEwGLhi3grE= google.golang.org/api v0.114.0/go.mod h1:ifYI2ZsFK6/uGddGfAD5BMxlnkBqCmqHSDUVi45N5Yg= google.golang.org/api v0.118.0/go.mod h1:76TtD3vkgmZ66zZzp72bUUklpmQmKlhh6sYtIjYK+5E= @@ -1334,15 +1679,51 @@ google.golang.org/api v0.126.0 h1:q4GJq+cAdMAC7XP7njvQ4tvohGLiSlytuL4BQxbIZ+o= google.golang.org/api v0.126.0/go.mod h1:mBwVAtz+87bEN6CbA1GtZPDOqY2R5ONPqJeIlvyo4Aw= google.golang.org/api v0.149.0 h1:b2CqT6kG+zqJIVKRQ3ELJVLN1PwHZ6DJ3dW8yl82rgY= google.golang.org/api v0.149.0/go.mod h1:Mwn1B7JTXrzXtnvmzQE2BD6bYZQ8DShKZDZbeN9I7qI= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201019141844-1ed22bb0c154/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= google.golang.org/genproto v0.0.0-20220107163113-42d7afdf6368/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20230323212658-478b75c54725/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= @@ -1360,6 +1741,8 @@ google.golang.org/genproto v0.0.0-20231106174013-bbf56f31fb17/go.mod h1:J7XzRzVy google.golang.org/genproto v0.0.0-20231212172506-995d672761c0/go.mod h1:l/k7rMz0vFTBPy+tFSGvXEd3z+BcoG1k7EHbqm+YBsY= google.golang.org/genproto v0.0.0-20240116215550-a9fa1716bcac h1:ZL/Teoy/ZGnzyrqK/Optxxp2pmVh+fmJ97slxSRyzUg= google.golang.org/genproto v0.0.0-20240116215550-a9fa1716bcac/go.mod h1:+Rvu7ElI+aLzyDQhpHMFMMltsD6m7nqpuWDd2CwJw3k= +google.golang.org/genproto v0.0.0-20241021214115-324edc3d5d38 h1:Q3nlH8iSQSRUwOskjbcSMcF2jiYMNiQYZ0c2KEJLKKU= +google.golang.org/genproto v0.0.0-20241021214115-324edc3d5d38/go.mod h1:xBI+tzfqGGN2JBeSebfKXFSdBpWVQ7sLW40PTupVRm4= google.golang.org/genproto/googleapis/api v0.0.0-20230525234035-dd9d682886f9 h1:m8v1xLLLzMe1m5P+gCTF8nJB9epwZQUBERm20Oy1poQ= google.golang.org/genproto/googleapis/api v0.0.0-20230525234035-dd9d682886f9/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig= google.golang.org/genproto/googleapis/api v0.0.0-20230526203410-71b5a4ffd15e/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig= @@ -1371,10 +1754,12 @@ google.golang.org/genproto/googleapis/api v0.0.0-20231016165738-49dd2c1f3d0b/go. google.golang.org/genproto/googleapis/api v0.0.0-20231106174013-bbf56f31fb17/go.mod h1:0xJLfVdJqpAPl8tDg1ujOCGzx6LFLttXT5NhllGOXY4= google.golang.org/genproto/googleapis/api v0.0.0-20240102182953-50ed04b92917 h1:rcS6EyEaoCO52hQDupoSfrxI3R6C2Tq741is7X8OvnM= google.golang.org/genproto/googleapis/api v0.0.0-20240102182953-50ed04b92917/go.mod h1:CmlNWB9lSezaYELKS5Ym1r44VrrbPUa7JTvw+6MbpJ0= +google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:qpvKtACPCQhAdu3PyQgV4l3LMXZEtft7y8QcarRsp9I= google.golang.org/genproto/googleapis/bytestream v0.0.0-20230530153820-e85fd2cbaebc h1:g3hIDl0jRNd9PPTs2uBzYuaD5mQuwOkZY0vSc0LR32o= google.golang.org/genproto/googleapis/bytestream v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:ylj+BE99M198VPbBh6A8d9n3w8fChvyLK3wwBOjXBFA= google.golang.org/genproto/googleapis/bytestream v0.0.0-20231030173426-d783a09b4405 h1:o4S3HvTUEXgRsNSUQsALDVog0O9F/U1JJlHmmUN8Uas= google.golang.org/genproto/googleapis/bytestream v0.0.0-20231030173426-d783a09b4405/go.mod h1:GRUCuLdzVqZte8+Dl/D4N25yLzcGqqWaYkeVOwulFqw= +google.golang.org/genproto/googleapis/bytestream v0.0.0-20241021214115-324edc3d5d38/go.mod h1:T8O3fECQbif8cez15vxAcjbwXxvL2xbnvbQ7ZfiMAMs= google.golang.org/genproto/googleapis/rpc v0.0.0-20230526203410-71b5a4ffd15e/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc h1:XSJ8Vk1SWuNr8S18z1NZSziL0CPIXLCCMDOEFtHBOFc= google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= @@ -1387,14 +1772,24 @@ google.golang.org/genproto/googleapis/rpc v0.0.0-20231212172506-995d672761c0/go. google.golang.org/genproto/googleapis/rpc v0.0.0-20240102182953-50ed04b92917/go.mod h1:xtjpI3tXFPP051KaWnhvxkiubL/6dJ18vLVf7q2pTOU= google.golang.org/genproto/googleapis/rpc v0.0.0-20240125205218-1f4bbc51befe h1:bQnxqljG/wqi4NTXu2+DJ3n7APcEA882QZ1JvhQAq9o= google.golang.org/genproto/googleapis/rpc v0.0.0-20240125205218-1f4bbc51befe/go.mod h1:PAREbraiVEVGVdTZsVWjSbbTtSyGbAgIIvni8a8CD5s= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241104194629-dd2ea8efbc28/go.mod h1:GX3210XPVPUjJbTUbvwI8f2IpZDMZuPJWDzDuebbviI= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= -google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= google.golang.org/grpc v1.54.0/go.mod h1:PUSEXI6iWghWaB6lXM4knEgpJNu2qUcKfDtNci3EC2g= @@ -1405,22 +1800,17 @@ google.golang.org/grpc v1.59.0/go.mod h1:aUPDwccQo6OTjy7Hct4AfBPD1GptF4fyUjIkQ9Y google.golang.org/grpc v1.60.1/go.mod h1:OlCHIeLYqSSsLi6i49B5QGdzaMZK9+M7LXN2FKz4eGM= google.golang.org/grpc v1.61.0 h1:TOvOcuXn30kRao+gfcvsebNEa5iZIiLkisYEkf7R7o0= google.golang.org/grpc v1.61.0/go.mod h1:VUbo7IFqmF1QtCAstipjG0GIoq49KvMe9+h1jFLBNJs= +google.golang.org/grpc v1.67.1/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0 h1:M1YKkFIboKNieVO5DLUEVzQfGwJD30Nv2jfUgzb5UcE= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc h1:2gGKlE2+asNV9m7xrywl36YYNnBG5ZQ0r/BOOxqPpmk= gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc/go.mod h1:m7x9LTH6d71AHyAX77c9yqWCCa3UKHcVEj9y7hAtKDk= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -1428,15 +1818,21 @@ gopkg.in/errgo.v2 v2.1.0 h1:0vLT13EuvQ0hNvakwLuFZ/jYrLp5F3kcWHXdRggjCE8= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df h1:n7WqCuqOuCbNr617RXOY0AWRXxgwEyPp2z+p0+hgMuE= gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df/go.mod h1:LRQQ+SO6ZHR7tOkpBDuZnXENFzX8qRjMDMyPD6BRkCw= +gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/square/go-jose.v2 v2.6.0/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gorm.io/gorm v1.25.10/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo= gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc h1:/hemPrYIhOhy8zYrNj+069zDB68us2sMGsfkFJO0iZs= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= k8s.io/api v0.27.2/go.mod h1:ENmbocXfBT2ADujUXcBhHV55RIT31IIEvkntP6vZKS4= k8s.io/api v0.27.4/go.mod h1:O3smaaX15NfxjzILfiln1D8Z3+gEYpjEpiNA/1EVK1Y= k8s.io/api v0.28.2/go.mod h1:RVnJBsjU8tcMq7C3iaRSGMeaKt2TWEUXcpIt/90fjEg= @@ -1479,7 +1875,14 @@ k8s.io/utils v0.0.0-20210802155522-efc7438f0176/go.mod h1:jPW/WVKK9YHAvNhRxK0md/ k8s.io/utils v0.0.0-20230209194617-a36077c30491/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= k8s.io/utils v0.0.0-20231127182322-b307cd553661 h1:FepOBzJ0GXm8t0su67ln2wAZjbQ6RxQGZDnzuLcrUTI= k8s.io/utils v0.0.0-20231127182322-b307cd553661/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= +lukechampine.com/uint128 v1.3.0/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk= +modernc.org/cc/v3 v3.41.0/go.mod h1:Ni4zjJYJ04CDOhG7dn640WGfwBzfE0ecX8TyMB0Fv0Y= +modernc.org/ccgo/v3 v3.17.0/go.mod h1:Sg3fwVpmLvCUTaqEUjiBDAvshIaKDB0RXaf+zgqFu8I= +modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6/go.mod h1:Qz0X07sNOR1jWYCrJMEnbW/X55x206Q7Vt4mz6/wHp4= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/pdf v0.1.1 h1:k1MczvYDUvJBe93bYd7wrZLLUEcLZAuF824/I4e5Xr4= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.1.2 h1:trsWhjU5jZrx6UvFu4WzQDrN7Pga4a7Qg+zcfcj64PA= sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.1.2/go.mod h1:+qG7ISXqCDVVcyO8hLn12AKVYYUjM7ftlqsqmrhMZE0= sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.28.0 h1:TgtAeesdhpm2SGwkQasmbeqDo8th5wOBA5h/AjTKA4I=