From 875adfcbaae9f88efbe538bfcdc52d50132e605b Mon Sep 17 00:00:00 2001 From: Junyan Qin Date: Thu, 21 Nov 2024 23:28:19 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=E6=B7=BB=E5=8A=A0=E5=AF=B9=20Gitee=20A?= =?UTF-8?q?I=20=E7=9A=84=E6=94=AF=E6=8C=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pkg/core/migrations/m015_gitee_ai_config.py | 26 +++++++++ pkg/core/stages/migrate.py | 2 + pkg/provider/modelmgr/entities.py | 4 +- pkg/provider/modelmgr/modelmgr.py | 14 ++--- .../modelmgr/{api.py => requester.py} | 0 .../modelmgr/{apis => requesters}/__init__.py | 0 .../{apis => requesters}/anthropicmsgs.py | 8 +-- .../modelmgr/{apis => requesters}/chatcmpl.py | 6 +-- .../{apis => requesters}/deepseekchatcmpl.py | 4 +- .../modelmgr/requesters/giteeaichatcmpl.py | 53 +++++++++++++++++++ .../{apis => requesters}/moonshotchatcmpl.py | 4 +- .../{apis => requesters}/ollamachat.py | 6 +-- templates/provider.json | 8 +++ 13 files changed, 112 insertions(+), 23 deletions(-) create mode 100644 pkg/core/migrations/m015_gitee_ai_config.py rename pkg/provider/modelmgr/{api.py => requester.py} (100%) rename pkg/provider/modelmgr/{apis => requesters}/__init__.py (100%) rename pkg/provider/modelmgr/{apis => requesters}/anthropicmsgs.py (95%) rename pkg/provider/modelmgr/{apis => requesters}/chatcmpl.py (96%) rename pkg/provider/modelmgr/{apis => requesters}/deepseekchatcmpl.py (94%) create mode 100644 pkg/provider/modelmgr/requesters/giteeaichatcmpl.py rename pkg/provider/modelmgr/{apis => requesters}/moonshotchatcmpl.py (94%) rename pkg/provider/modelmgr/{apis => requesters}/ollamachat.py (96%) diff --git a/pkg/core/migrations/m015_gitee_ai_config.py b/pkg/core/migrations/m015_gitee_ai_config.py new file mode 100644 index 00000000..b41071ad --- /dev/null +++ b/pkg/core/migrations/m015_gitee_ai_config.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +from .. import migration + + +@migration.migration_class("gitee-ai-config", 15) +class GiteeAIConfigMigration(migration.Migration): + """迁移""" + + async def need_migrate(self) -> bool: + """判断当前环境是否需要运行此迁移""" + return 'gitee-ai-chat-completions' not in self.ap.provider_cfg.data['requester'] or 'gitee-ai' not in self.ap.provider_cfg.data['keys'] + + async def run(self): + """执行迁移""" + self.ap.provider_cfg.data['requester']['gitee-ai-chat-completions'] = { + "base-url": "https://ai.gitee.com/v1", + "args": {}, + "timeout": 120 + } + + self.ap.provider_cfg.data['keys']['gitee-ai'] = [ + "XXXXX" + ] + + await self.ap.provider_cfg.dump_config() diff --git a/pkg/core/stages/migrate.py b/pkg/core/stages/migrate.py index dd9023ed..8950ca35 100644 --- a/pkg/core/stages/migrate.py +++ b/pkg/core/stages/migrate.py @@ -7,6 +7,7 @@ from ..migrations import m001_sensitive_word_migration, m002_openai_config_migration, m003_anthropic_requester_cfg_completion, m004_moonshot_cfg_completion from ..migrations import m005_deepseek_cfg_completion, m006_vision_config, m007_qcg_center_url, m008_ad_fixwin_config_migrate, m009_msg_truncator_cfg from ..migrations import m010_ollama_requester_config, m011_command_prefix_config, m012_runner_config, m013_http_api_config, m014_force_delay_config +from ..migrations import m015_gitee_ai_config @stage.stage_class("MigrationStage") @@ -28,3 +29,4 @@ async def run(self, ap: app.Application): if await migration_instance.need_migrate(): await migration_instance.run() + print(f'已执行迁移 {migration_instance.name}') diff --git a/pkg/provider/modelmgr/entities.py b/pkg/provider/modelmgr/entities.py index 79cb5449..91d1d6e9 100644 --- a/pkg/provider/modelmgr/entities.py +++ b/pkg/provider/modelmgr/entities.py @@ -4,7 +4,7 @@ import pydantic -from . import api +from . import requester from . import token @@ -17,7 +17,7 @@ class LLMModelInfo(pydantic.BaseModel): token_mgr: token.TokenManager - requester: api.LLMAPIRequester + requester: requester.LLMAPIRequester tool_call_supported: typing.Optional[bool] = False diff --git a/pkg/provider/modelmgr/modelmgr.py b/pkg/provider/modelmgr/modelmgr.py index cf782302..fb6c703b 100644 --- a/pkg/provider/modelmgr/modelmgr.py +++ b/pkg/provider/modelmgr/modelmgr.py @@ -2,11 +2,11 @@ import aiohttp -from . import entities +from . import entities, requester from ...core import app -from . import token, api -from .apis import chatcmpl, anthropicmsgs, moonshotchatcmpl, deepseekchatcmpl, ollamachat +from . import token +from .requesters import chatcmpl, anthropicmsgs, moonshotchatcmpl, deepseekchatcmpl, ollamachat, giteeaichatcmpl FETCH_MODEL_LIST_URL = "https://api.qchatgpt.rockchin.top/api/v2/fetch/model_list" @@ -18,7 +18,7 @@ class ModelManager: model_list: list[entities.LLMModelInfo] - requesters: dict[str, api.LLMAPIRequester] + requesters: dict[str, requester.LLMAPIRequester] token_mgrs: dict[str, token.TokenManager] @@ -42,7 +42,7 @@ async def initialize(self): for k, v in self.ap.provider_cfg.data['keys'].items(): self.token_mgrs[k] = token.TokenManager(k, v) - for api_cls in api.preregistered_requesters: + for api_cls in requester.preregistered_requesters: api_inst = api_cls(self.ap) await api_inst.initialize() self.requesters[api_inst.name] = api_inst @@ -94,7 +94,7 @@ async def initialize(self): model_name = model.get('model_name', default_model_info.model_name) token_mgr = self.token_mgrs[model['token_mgr']] if 'token_mgr' in model else default_model_info.token_mgr - requester = self.requesters[model['requester']] if 'requester' in model else default_model_info.requester + req = self.requesters[model['requester']] if 'requester' in model else default_model_info.requester tool_call_supported = model.get('tool_call_supported', default_model_info.tool_call_supported) vision_supported = model.get('vision_supported', default_model_info.vision_supported) @@ -102,7 +102,7 @@ async def initialize(self): name=model['name'], model_name=model_name, token_mgr=token_mgr, - requester=requester, + requester=req, tool_call_supported=tool_call_supported, vision_supported=vision_supported ) diff --git a/pkg/provider/modelmgr/api.py b/pkg/provider/modelmgr/requester.py similarity index 100% rename from pkg/provider/modelmgr/api.py rename to pkg/provider/modelmgr/requester.py diff --git a/pkg/provider/modelmgr/apis/__init__.py b/pkg/provider/modelmgr/requesters/__init__.py similarity index 100% rename from pkg/provider/modelmgr/apis/__init__.py rename to pkg/provider/modelmgr/requesters/__init__.py diff --git a/pkg/provider/modelmgr/apis/anthropicmsgs.py b/pkg/provider/modelmgr/requesters/anthropicmsgs.py similarity index 95% rename from pkg/provider/modelmgr/apis/anthropicmsgs.py rename to pkg/provider/modelmgr/requesters/anthropicmsgs.py index 8c5360ec..a60c8f00 100644 --- a/pkg/provider/modelmgr/apis/anthropicmsgs.py +++ b/pkg/provider/modelmgr/requesters/anthropicmsgs.py @@ -5,17 +5,17 @@ import anthropic -from .. import api, entities, errors +from .. import entities, errors, requester -from .. import api, entities, errors +from .. import entities, errors from ....core import entities as core_entities from ... import entities as llm_entities from ...tools import entities as tools_entities from ....utils import image -@api.requester_class("anthropic-messages") -class AnthropicMessages(api.LLMAPIRequester): +@requester.requester_class("anthropic-messages") +class AnthropicMessages(requester.LLMAPIRequester): """Anthropic Messages API 请求器""" client: anthropic.AsyncAnthropic diff --git a/pkg/provider/modelmgr/apis/chatcmpl.py b/pkg/provider/modelmgr/requesters/chatcmpl.py similarity index 96% rename from pkg/provider/modelmgr/apis/chatcmpl.py rename to pkg/provider/modelmgr/requesters/chatcmpl.py index 099eef19..da993ade 100644 --- a/pkg/provider/modelmgr/apis/chatcmpl.py +++ b/pkg/provider/modelmgr/requesters/chatcmpl.py @@ -12,15 +12,15 @@ import aiohttp import async_lru -from .. import api, entities, errors +from .. import entities, errors, requester from ....core import entities as core_entities, app from ... import entities as llm_entities from ...tools import entities as tools_entities from ....utils import image -@api.requester_class("openai-chat-completions") -class OpenAIChatCompletions(api.LLMAPIRequester): +@requester.requester_class("openai-chat-completions") +class OpenAIChatCompletions(requester.LLMAPIRequester): """OpenAI ChatCompletion API 请求器""" client: openai.AsyncClient diff --git a/pkg/provider/modelmgr/apis/deepseekchatcmpl.py b/pkg/provider/modelmgr/requesters/deepseekchatcmpl.py similarity index 94% rename from pkg/provider/modelmgr/apis/deepseekchatcmpl.py rename to pkg/provider/modelmgr/requesters/deepseekchatcmpl.py index 4edc2cb1..6651ca6f 100644 --- a/pkg/provider/modelmgr/apis/deepseekchatcmpl.py +++ b/pkg/provider/modelmgr/requesters/deepseekchatcmpl.py @@ -3,13 +3,13 @@ from ....core import app from . import chatcmpl -from .. import api, entities, errors +from .. import entities, errors, requester from ....core import entities as core_entities, app from ... import entities as llm_entities from ...tools import entities as tools_entities -@api.requester_class("deepseek-chat-completions") +@requester.requester_class("deepseek-chat-completions") class DeepseekChatCompletions(chatcmpl.OpenAIChatCompletions): """Deepseek ChatCompletion API 请求器""" diff --git a/pkg/provider/modelmgr/requesters/giteeaichatcmpl.py b/pkg/provider/modelmgr/requesters/giteeaichatcmpl.py new file mode 100644 index 00000000..1104c015 --- /dev/null +++ b/pkg/provider/modelmgr/requesters/giteeaichatcmpl.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +import json + +import asyncio +import aiohttp +import typing + +from . import chatcmpl +from .. import entities, errors, requester +from ....core import app +from ... import entities as llm_entities +from ...tools import entities as tools_entities +from .. import entities as modelmgr_entities + + +@requester.requester_class("gitee-ai-chat-completions") +class GiteeAIChatCompletions(chatcmpl.OpenAIChatCompletions): + """Gitee AI ChatCompletions API 请求器""" + + def __init__(self, ap: app.Application): + self.ap = ap + self.requester_cfg = ap.provider_cfg.data['requester']['gitee-ai-chat-completions'].copy() + + async def _closure( + self, + req_messages: list[dict], + use_model: entities.LLMModelInfo, + use_funcs: list[tools_entities.LLMFunction] = None, + ) -> llm_entities.Message: + self.client.api_key = use_model.token_mgr.get_token() + + args = self.requester_cfg['args'].copy() + args["model"] = use_model.name if use_model.model_name is None else use_model.model_name + + if use_funcs: + tools = await self.ap.tool_mgr.generate_tools_for_openai(use_funcs) + + if tools: + args["tools"] = tools + + # gitee 不支持多模态,把content都转换成纯文字 + for m in req_messages: + if 'content' in m and isinstance(m["content"], list): + m["content"] = " ".join([c["text"] for c in m["content"]]) + + args["messages"] = req_messages + + resp = await self._req(args) + + message = await self._make_msg(resp) + + return message diff --git a/pkg/provider/modelmgr/apis/moonshotchatcmpl.py b/pkg/provider/modelmgr/requesters/moonshotchatcmpl.py similarity index 94% rename from pkg/provider/modelmgr/apis/moonshotchatcmpl.py rename to pkg/provider/modelmgr/requesters/moonshotchatcmpl.py index 2f299b8b..caecfd52 100644 --- a/pkg/provider/modelmgr/apis/moonshotchatcmpl.py +++ b/pkg/provider/modelmgr/requesters/moonshotchatcmpl.py @@ -3,13 +3,13 @@ from ....core import app from . import chatcmpl -from .. import api, entities, errors +from .. import entities, errors, requester from ....core import entities as core_entities, app from ... import entities as llm_entities from ...tools import entities as tools_entities -@api.requester_class("moonshot-chat-completions") +@requester.requester_class("moonshot-chat-completions") class MoonshotChatCompletions(chatcmpl.OpenAIChatCompletions): """Moonshot ChatCompletion API 请求器""" diff --git a/pkg/provider/modelmgr/apis/ollamachat.py b/pkg/provider/modelmgr/requesters/ollamachat.py similarity index 96% rename from pkg/provider/modelmgr/apis/ollamachat.py rename to pkg/provider/modelmgr/requesters/ollamachat.py index 65295dc5..f66a2a78 100644 --- a/pkg/provider/modelmgr/apis/ollamachat.py +++ b/pkg/provider/modelmgr/requesters/ollamachat.py @@ -8,7 +8,7 @@ import async_lru import ollama -from .. import api, entities, errors +from .. import entities, errors, requester from ... import entities as llm_entities from ...tools import entities as tools_entities from ....core import app @@ -17,8 +17,8 @@ REQUESTER_NAME: str = "ollama-chat" -@api.requester_class(REQUESTER_NAME) -class OllamaChatCompletions(api.LLMAPIRequester): +@requester.requester_class(REQUESTER_NAME) +class OllamaChatCompletions(requester.LLMAPIRequester): """Ollama平台 ChatCompletion API请求器""" client: ollama.AsyncClient request_cfg: dict diff --git a/templates/provider.json b/templates/provider.json index b7ec7fb9..1343cd7c 100644 --- a/templates/provider.json +++ b/templates/provider.json @@ -13,6 +13,9 @@ ], "deepseek": [ "sk-1234567890" + ], + "gitee-ai": [ + "XXXXX" ] }, "requester": { @@ -42,6 +45,11 @@ "base-url": "http://127.0.0.1:11434", "args": {}, "timeout": 600 + }, + "gitee-ai-chat-completions": { + "base-url": "https://ai.gitee.com/v1", + "args": {}, + "timeout": 120 } }, "model": "gpt-3.5-turbo",