Skip to content

Commit

Permalink
fix: 在设置model为非视觉模型时,非local-agent的runner无法获得图片消息 (#948)
Browse files Browse the repository at this point in the history
  • Loading branch information
RockChinQ committed Dec 16, 2024
1 parent 793d643 commit 3314a7a
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 7 deletions.
4 changes: 0 additions & 4 deletions libs/dify_service_api/v1/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,10 +105,6 @@ async def upload_file(
timeout: float = 30.0,
) -> str:
"""上传文件"""
# curl -X POST 'http://dify.rockchin.top/v1/files/upload' \
# --header 'Authorization: Bearer {api_key}' \
# --form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif] \
# --form 'user=abc-123'
async with httpx.AsyncClient(
base_url=self.base_url,
trust_env=True,
Expand Down
6 changes: 3 additions & 3 deletions pkg/pipeline/preproc/preproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ async def process(


# 检查vision是否启用,没启用就删除所有图片
if not self.ap.provider_cfg.data['enable-vision'] or not query.use_model.vision_supported:
if not self.ap.provider_cfg.data['enable-vision'] or (self.ap.provider_cfg.data['runner'] == 'local-agent' and not query.use_model.vision_supported):
for msg in query.messages:
if isinstance(msg.content, list):
for me in msg.content:
Expand All @@ -60,13 +60,13 @@ async def process(
llm_entities.ContentElement.from_text(me.text)
)
elif isinstance(me, platform_message.Image):
if self.ap.provider_cfg.data['enable-vision'] and query.use_model.vision_supported:
if self.ap.provider_cfg.data['enable-vision'] and (self.ap.provider_cfg.data['runner'] != 'local-agent' or query.use_model.vision_supported):
if me.url is not None:
content_list.append(
llm_entities.ContentElement.from_image_url(str(me.url))
)

query.user_message = llm_entities.Message( # TODO 适配多模态输入
query.user_message = llm_entities.Message(
role='user',
content=content_list
)
Expand Down

0 comments on commit 3314a7a

Please sign in to comment.