Skip to content

Commit

Permalink
refactor: disable embedding support on together for now (need to solv…
Browse files Browse the repository at this point in the history
…e api key issue)
  • Loading branch information
cpacker committed Nov 15, 2024
1 parent e69407f commit dfda6f0
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 40 deletions.
81 changes: 42 additions & 39 deletions letta/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,46 +414,49 @@ def list_llm_models(self) -> List[LLMConfig]:
return configs

def list_embedding_models(self) -> List[EmbeddingConfig]:
from letta.llm_api.openai import openai_get_model_list

response = openai_get_model_list(self.base_url, api_key=self.api_key)

# TogetherAI's response is missing the 'data' field
# assert "data" in response, f"OpenAI model query response missing 'data' field: {response}"
if "data" in response:
data = response["data"]
else:
data = response

configs = []
for model in data:
assert "id" in model, f"TogetherAI model missing 'id' field: {model}"
model_name = model["id"]

if "context_length" in model:
# Context length is returned in OpenRouter as "context_length"
context_window_size = model["context_length"]
else:
context_window_size = self.get_model_context_window_size(model_name)

if not context_window_size:
continue

# TogetherAI includes the type, which we can use to filter out embedding models
if "type" in model and model["type"] not in ["embedding"]:
continue

configs.append(
EmbeddingConfig(
embedding_model=model_name,
embedding_endpoint_type="openai",
embedding_endpoint=self.base_url,
embedding_dim=context_window_size,
embedding_chunk_size=300, # TODO: change?
)
)
# TODO renable once we figure out how to pass API keys through properly
return []

return configs
# from letta.llm_api.openai import openai_get_model_list

# response = openai_get_model_list(self.base_url, api_key=self.api_key)

# # TogetherAI's response is missing the 'data' field
# # assert "data" in response, f"OpenAI model query response missing 'data' field: {response}"
# if "data" in response:
# data = response["data"]
# else:
# data = response

# configs = []
# for model in data:
# assert "id" in model, f"TogetherAI model missing 'id' field: {model}"
# model_name = model["id"]

# if "context_length" in model:
# # Context length is returned in OpenRouter as "context_length"
# context_window_size = model["context_length"]
# else:
# context_window_size = self.get_model_context_window_size(model_name)

# if not context_window_size:
# continue

# # TogetherAI includes the type, which we can use to filter out embedding models
# if "type" in model and model["type"] not in ["embedding"]:
# continue

# configs.append(
# EmbeddingConfig(
# embedding_model=model_name,
# embedding_endpoint_type="openai",
# embedding_endpoint=self.base_url,
# embedding_dim=context_window_size,
# embedding_chunk_size=300, # TODO: change?
# )
# )

# return configs


class GoogleAIProvider(Provider):
Expand Down
2 changes: 1 addition & 1 deletion tests/test_providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def test_mistral():


def test_together():
provider = TogetherProvider(api_key=os.getenv("TOGETHER_API_KEY"))
provider = TogetherProvider(api_key=os.getenv("TOGETHER_API_KEY"), default_prompt_formatter="chatml")
models = provider.list_llm_models()
print([m.model for m in models])

Expand Down

0 comments on commit dfda6f0

Please sign in to comment.