From ebc91680f85e5a65a47b539093460fba15041e34 Mon Sep 17 00:00:00 2001 From: Christian Tzolov Date: Tue, 10 Dec 2024 15:14:35 +0100 Subject: [PATCH] Improve OllamaWithOpenAiChatModelIT stability --- .../chat/proxy/OllamaWithOpenAiChatModelIT.java | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/OllamaWithOpenAiChatModelIT.java b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/OllamaWithOpenAiChatModelIT.java index f053e390a6..b835f3f950 100644 --- a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/OllamaWithOpenAiChatModelIT.java +++ b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/OllamaWithOpenAiChatModelIT.java @@ -259,7 +259,7 @@ void beanStreamOutputConverterRecords() { } @ParameterizedTest(name = "{0} : {displayName} ") - @ValueSource(strings = { "llama3.2:1b" }) + @ValueSource(strings = { "llama3.1:latest", "llama3.2:latest" }) void functionCallTest(String modelName) { UserMessage userMessage = new UserMessage( @@ -268,6 +268,10 @@ void functionCallTest(String modelName) { List messages = new ArrayList<>(List.of(userMessage)); var promptOptions = OpenAiChatOptions.builder() + .withModel(modelName) + // Note for Ollama you must set the tool choice to explicitly. Unlike OpenAI + // (which defaults to "auto") Ollama defaults to "nono" + .withToolChoice("auto") .withFunctionCallbacks(List.of(FunctionCallback.builder() .function("getCurrentWeather", new MockWeatherService()) .description("Get the weather in location") @@ -282,8 +286,9 @@ void functionCallTest(String modelName) { assertThat(response.getResult().getOutput().getText()).contains("30", "10", "15"); } - @Test - void streamFunctionCallTest() { + @ParameterizedTest(name = "{0} : {displayName} ") + @ValueSource(strings = { "llama3.1:latest", "llama3.2:latest" }) + void streamFunctionCallTest(String modelName) { UserMessage userMessage = new UserMessage( "What's the weather like in San Francisco, Tokyo, and Paris? Return the temperature in Celsius."); @@ -291,6 +296,10 @@ void streamFunctionCallTest() { List messages = new ArrayList<>(List.of(userMessage)); var promptOptions = OpenAiChatOptions.builder() + .withModel(modelName) + // Note for Ollama you must set the tool choice to explicitly. Unlike OpenAI + // (which defaults to "auto") Ollama defaults to "nono" + .withToolChoice("auto") .withFunctionCallbacks(List.of(FunctionCallback.builder() .function("getCurrentWeather", new MockWeatherService()) .description("Get the weather in location")