From bd5d7da61881fea5ec8d635236e182a7da979ad2 Mon Sep 17 00:00:00 2001 From: Francisco Diaz Date: Sat, 11 Nov 2023 13:09:28 +0100 Subject: [PATCH] Change how OpenAI class is instantiated (#534) * Change how OoenAI class is instantiated * Disable OpenAI client modules * Fix compilation error --- docs/intro/scala.md | 2 +- .../auto/jdk21/contexts/BreakingNews.java | 4 +-- .../auto/jdk21/contexts/DivergentTasks.java | 4 +-- .../xef/java/auto/jdk21/contexts/Markets.java | 4 +-- .../java/auto/jdk21/contexts/PDFDocument.java | 6 ++-- .../xef/java/auto/jdk21/contexts/Weather.java | 4 +-- .../auto/jdk21/conversations/Animals.java | 6 ++-- .../auto/jdk21/serialization/ASCIIArt.java | 2 +- .../java/auto/jdk21/serialization/Book.java | 2 +- .../java/auto/jdk21/serialization/Books.java | 2 +- .../auto/jdk21/serialization/ChessAI.java | 6 ++-- .../java/auto/jdk21/serialization/Movies.java | 2 +- .../auto/jdk21/serialization/Recipes.java | 2 +- .../java/auto/jdk21/tot/ControlSignals.java | 2 +- .../xef/java/auto/jdk21/tot/Critiques.java | 2 +- .../xef/java/auto/jdk21/tot/Solutions.java | 2 +- .../xef/conversation/contexts/BreakingNews.kt | 2 +- .../conversation/contexts/DivergentTasks.kt | 2 +- .../xef/conversation/contexts/Weather.kt | 2 +- .../expressions/DigitalDetoxPlanner.kt | 2 +- .../conversation/expressions/PromptCrafter.kt | 4 +-- .../expressions/RecipeGenerator.kt | 2 +- .../expressions/TravelItinerary.kt | 2 +- .../expressions/WorkoutPlanProgram.kt | 4 +-- .../xef/conversation/fields/NewsSummary.kt | 2 +- .../finetuning/FineTunedModelChat.kt | 2 +- .../xef/conversation/memory/ChatWithMemory.kt | 2 +- .../prompts/PromptEvaluationExample.kt | 2 +- .../xef/conversation/reasoning/CodeExample.kt | 4 +-- .../reasoning/CreatePRDescription.kt | 6 ++-- .../conversation/reasoning/ReActExample.kt | 4 +-- .../reasoning/ReActWikipediaExample.kt | 4 +-- .../xef/conversation/reasoning/TextExample.kt | 2 +- .../reasoning/ToolSelectionExample.kt | 4 +-- .../xef/conversation/serialization/Movie.kt | 2 +- .../xef/conversation/sql/MysqlExample.kt | 2 +- .../streaming/OpenAIStreamingExample.kt | 4 +-- .../xef/conversation/streaming/SpaceCraft.kt | 2 +- .../xef/conversation/tot/Solution.kt | 5 ++-- .../scala/context/serpapi/Simple.scala | 2 +- .../xef/conversation/llm/openai/OpenAI.kt | 29 ++++++++----------- .../llm/openai/OpenAIScopeExtensions.kt | 16 +++++----- .../xef/scala/conversation/package.scala | 15 +++++----- .../services/LocalVectorStoreService.kt | 2 +- .../services/PostgresVectorStoreService.kt | 6 ++-- settings.gradle.kts | 8 ++--- 46 files changed, 98 insertions(+), 99 deletions(-) diff --git a/docs/intro/scala.md b/docs/intro/scala.md index 3ecb49a23..a40fca0d5 100644 --- a/docs/intro/scala.md +++ b/docs/intro/scala.md @@ -119,7 +119,7 @@ import com.xebia.functional.xef.scala.conversation.* import com.xebia.functional.xef.scala.serialization.* import io.circe.Decoder -val openAI: OpenAI = OpenAI.FromEnvironment +val openAI: OpenAI = OpenAI.fromEnvironment() def setContext(query: String)(using conversation: ScalaConversation): Unit = addContext(Search(openAI.DEFAULT_CHAT, conversation, 3).search(query).get) diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/BreakingNews.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/BreakingNews.java index 71585a02c..f4c56619e 100644 --- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/BreakingNews.java +++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/BreakingNews.java @@ -21,14 +21,14 @@ public record BreakingNew(String summary) { private static CompletableFuture writeParagraph(PlatformConversation scope) { var currentDate = dtf.format(now); - return scope.prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, new Prompt("write a paragraph of about 300 words about: " + currentDate + " Covid News"), BreakingNews.BreakingNew.class) + return scope.prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, new Prompt("write a paragraph of about 300 words about: " + currentDate + " Covid News"), BreakingNews.BreakingNew.class) .thenAccept(breakingNews -> System.out.println(currentDate + " Covid news summary:\n" + breakingNews)); } public static void main(String[] args) throws ExecutionException, InterruptedException { try (PlatformConversation scope = OpenAI.conversation()) { var currentDate = dtf.format(now); - var search = new Search(OpenAI.FromEnvironment.DEFAULT_CHAT, scope, 3); + var search = new Search(OpenAI.fromEnvironment().DEFAULT_CHAT, scope, 3); scope.addContextFromArray(search.search(currentDate + " Covid News").get()); writeParagraph(scope).get(); } diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/DivergentTasks.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/DivergentTasks.java index 6ed37bfac..80790b2d5 100644 --- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/DivergentTasks.java +++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/DivergentTasks.java @@ -13,13 +13,13 @@ public class DivergentTasks { public Long numberOfMedicalNeedlesInWorld; private static CompletableFuture numberOfMedical(PlatformConversation scope) { - return scope.prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, new Prompt("Provide the number of medical needles in the world"), DivergentTasks.class) + return scope.prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, new Prompt("Provide the number of medical needles in the world"), DivergentTasks.class) .thenAccept(numberOfNeedles -> System.out.println("Needles in world:\n" + numberOfNeedles.numberOfMedicalNeedlesInWorld)); } public static void main(String[] args) throws ExecutionException, InterruptedException { try (PlatformConversation scope = OpenAI.conversation()) { - Search search = new Search(OpenAI.FromEnvironment.DEFAULT_CHAT, scope, 3); + Search search = new Search(OpenAI.fromEnvironment().DEFAULT_CHAT, scope, 3); scope.addContextFromArray(search.search("Estimate amount of medical needles in the world").get()); numberOfMedical(scope).get(); } diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/Markets.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/Markets.java index 4cef77efa..9f09bd86d 100644 --- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/Markets.java +++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/Markets.java @@ -19,7 +19,7 @@ public record Market(String news, List raisingStockSymbols, List private static CompletableFuture stockMarketSummary(PlatformConversation scope) { var news = new Prompt("Write a short summary of the stock market results given the provided context."); - return scope.prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, news, Market.class) + return scope.prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, news, Market.class) .thenAccept(markets -> System.out.println(markets)); } @@ -28,7 +28,7 @@ public static void main(String[] args) throws ExecutionException, InterruptedExc var dtf = DateTimeFormatter.ofPattern("dd/M/yyyy"); var now = LocalDateTime.now(); var currentDate = dtf.format(now); - var search = new Search(OpenAI.FromEnvironment.DEFAULT_CHAT, scope, 3); + var search = new Search(OpenAI.fromEnvironment().DEFAULT_CHAT, scope, 3); scope.addContextFromArray(search.search(currentDate + "Stock market results, raising stocks, decreasing stocks").get()); stockMarketSummary(scope).get(); } diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/PDFDocument.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/PDFDocument.java index e57b69aa6..6640d3613 100644 --- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/PDFDocument.java +++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/PDFDocument.java @@ -25,7 +25,7 @@ private static CompletableFuture askQuestion(PlatformConversation scope) { if (line == null || line.isBlank()) { return CompletableFuture.completedFuture(null); } else { - scope.prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, new Prompt(line), AIResponse.class) + scope.prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, new Prompt(line), AIResponse.class) .thenAccept(aiRes -> System.out.println(aiRes.answer + "\n---\n" + aiRes.source + "\n---\n")); @@ -35,8 +35,8 @@ private static CompletableFuture askQuestion(PlatformConversation scope) { public static void main(String[] args) throws Exception { try (PlatformConversation scope = OpenAI.conversation()) { - PDF pdf = new PDF(OpenAI.FromEnvironment.DEFAULT_CHAT, - OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, scope); + PDF pdf = new PDF(OpenAI.fromEnvironment().DEFAULT_CHAT, + OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, scope); scope.addContext(List.of(pdf.readPDFFromUrl.readPDFFromUrl(PDF_URL).get())); askQuestion(scope).get(); } diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/Weather.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/Weather.java index 17b66c96a..d596eb734 100644 --- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/Weather.java +++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/contexts/Weather.java @@ -13,7 +13,7 @@ public class Weather { public List answer; private static CompletableFuture clothesRecommend(PlatformConversation scope) { - return scope.prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, new Prompt("Knowing this forecast, what clothes do you recommend I should wear?"), Weather.class) + return scope.prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, new Prompt("Knowing this forecast, what clothes do you recommend I should wear?"), Weather.class) .thenAccept(weather -> System.out.println(weather.answer) ); @@ -21,7 +21,7 @@ private static CompletableFuture clothesRecommend(PlatformConversation sco public static void main(String[] args) throws ExecutionException, InterruptedException { try (PlatformConversation scope = OpenAI.conversation()) { - Search search = new Search(OpenAI.FromEnvironment.DEFAULT_CHAT, scope, 3); + Search search = new Search(OpenAI.fromEnvironment().DEFAULT_CHAT, scope, 3); scope.addContextFromArray(search.search("Weather in Cádiz, Spain").get()); clothesRecommend(scope).get(); } diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/conversations/Animals.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/conversations/Animals.java index 9ff1524c3..49d4b3c9d 100644 --- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/conversations/Animals.java +++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/conversations/Animals.java @@ -17,11 +17,11 @@ public Animals(PlatformConversation scope) { } public CompletableFuture uniqueAnimal() { - return scope.prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, new Prompt("A unique animal species."), Animal.class); + return scope.prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, new Prompt("A unique animal species."), Animal.class); } public CompletableFuture groundbreakingInvention() { - return scope.prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, new Prompt("A groundbreaking invention from the 20th century."), Invention.class); + return scope.prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, new Prompt("A groundbreaking invention from the 20th century."), Invention.class); } public CompletableFuture story(Animal animal, Invention invention) { @@ -29,7 +29,7 @@ public CompletableFuture story(Animal animal, Invention invention) { .addSystemMessage("You are a writer for a science fiction magazine.") .addUserMessage("Write a short story of 200 words that involves the animal and the invention") .build(); - return scope.promptMessage(OpenAI.FromEnvironment.DEFAULT_CHAT, storyPrompt); + return scope.promptMessage(OpenAI.fromEnvironment().DEFAULT_CHAT, storyPrompt); } public record Animal(String name, String habitat, String diet){} diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/ASCIIArt.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/ASCIIArt.java index e04fd177e..f30e8b1ef 100644 --- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/ASCIIArt.java +++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/ASCIIArt.java @@ -11,7 +11,7 @@ public class ASCIIArt { public static void main(String[] args) throws ExecutionException, InterruptedException { try (PlatformConversation scope = OpenAI.conversation()) { - scope.prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, new Prompt("ASCII art of a cat dancing"), ASCIIArt.class) + scope.prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, new Prompt("ASCII art of a cat dancing"), ASCIIArt.class) .thenAccept(art -> System.out.println(art.art)) .get(); } diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/Book.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/Book.java index 3f090bec9..157606044 100644 --- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/Book.java +++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/Book.java @@ -14,7 +14,7 @@ public class Book { public static void main(String[] args) throws ExecutionException, InterruptedException { try (PlatformConversation scope = OpenAI.conversation()) { - scope.prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, new Prompt("To Kill a Mockingbird by Harper Lee summary."), Book.class) + scope.prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, new Prompt("To Kill a Mockingbird by Harper Lee summary."), Book.class) .thenAccept(book -> System.out.println("To Kill a Mockingbird summary:\n" + book.summary)) .get(); } diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/Books.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/Books.java index 75f6487da..b3c492a69 100644 --- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/Books.java +++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/Books.java @@ -19,7 +19,7 @@ public Books(PlatformConversation scope) { public record Book(@NotNull String title, @NotNull String author, @NotNull int year, @NotNull String genre){} public CompletableFuture bookSelection(String topic) { - return scope.prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, new Prompt("Give me a selection of books about " + topic), Books.Book.class); + return scope.prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, new Prompt("Give me a selection of books about " + topic), Books.Book.class); } public static void main(String[] args) throws ExecutionException, InterruptedException { diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/ChessAI.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/ChessAI.java index bb50947d5..5112d56dd 100644 --- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/ChessAI.java +++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/ChessAI.java @@ -30,7 +30,7 @@ public static void main(String[] args) throws ExecutionException, InterruptedExc currentPlayer, moves.stream().map(ChessMove::toString).collect(Collectors.joining(", "))); - ChessMove move = scope.prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, new Prompt(prompt), ChessMove.class).get(); + ChessMove move = scope.prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, new Prompt(prompt), ChessMove.class).get(); moves.add(move); // Update boardState according to move.move @@ -42,7 +42,7 @@ public static void main(String[] args) throws ExecutionException, InterruptedExc Add a brief description of the move and it's implications""", moves.stream().map(it -> it.player + ":" + it.move).collect(Collectors.joining(", "))); - ChessBoard chessBoard= scope.prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, new Prompt(boardPrompt), ChessBoard.class).get(); + ChessBoard chessBoard= scope.prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, new Prompt(boardPrompt), ChessBoard.class).get(); System.out.println("Current board:\n" + chessBoard.board); var gameStatePrompt = String.format(""" @@ -50,7 +50,7 @@ public static void main(String[] args) throws ExecutionException, InterruptedExc has the game ended (win, draw, or stalemate)?""", moves.stream().map(ChessMove::toString).collect(Collectors.joining(", "))); - GameState gameState = scope.prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, new Prompt(gameStatePrompt), GameState.class).get(); + GameState gameState = scope.prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, new Prompt(gameStatePrompt), GameState.class).get(); gameEnded = gameState.ended; winner = gameState.winner; diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/Movies.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/Movies.java index 2ae736fd8..f399de71b 100644 --- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/Movies.java +++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/Movies.java @@ -12,7 +12,7 @@ public record Movie(String title, String genre, String director){} public static void main(String[] args) throws ExecutionException, InterruptedException { try (PlatformConversation scope = OpenAI.conversation()) { - scope.prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, new Prompt("Please provide a movie title, genre and director for the Inception movie"), Movie.class) + scope.prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, new Prompt("Please provide a movie title, genre and director for the Inception movie"), Movie.class) .thenAccept(movie -> System.out.println(movie)) .get(); } diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/Recipes.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/Recipes.java index 1439c72af..879a293e0 100644 --- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/Recipes.java +++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/serialization/Recipes.java @@ -13,7 +13,7 @@ public record Recipe(String name, List ingredients){} public static void main(String[] args) throws ExecutionException, InterruptedException { try (PlatformConversation scope = OpenAI.conversation()) { - var recipe = scope.prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, new Prompt("Recipe for chocolate chip cookies."), Recipe.class).get(); + var recipe = scope.prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, new Prompt("Recipe for chocolate chip cookies."), Recipe.class).get(); System.out.println("The recipe for " + recipe.name + " is " + recipe.ingredients ); } } diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/tot/ControlSignals.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/tot/ControlSignals.java index 82584872c..ecc0309a6 100644 --- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/tot/ControlSignals.java +++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/tot/ControlSignals.java @@ -30,7 +30,7 @@ public static CompletableFuture controlSignal(Problems.Memory " 5. Ensure the guidance accounts for previous answers in the `history`.\n" + " \n")); - return Problems.Memory.getAiScope().prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, guidancePrompt, ControlSignal.class); + return Problems.Memory.getAiScope().prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, guidancePrompt, ControlSignal.class); } } diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/tot/Critiques.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/tot/Critiques.java index b835c0c69..49ad2f22d 100644 --- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/tot/Critiques.java +++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/tot/Critiques.java @@ -34,6 +34,6 @@ public static CompletableFuture critique(Problems.Memory memory " 1. Provide a critique and determine if the answer truly accomplishes the goal.\n" + " \n")); - return Problems.Memory.getAiScope().prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, prompt, Critique.class); + return Problems.Memory.getAiScope().prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, prompt, Critique.class); } } diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/tot/Solutions.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/tot/Solutions.java index cdbe3cba9..5fbf8062c 100644 --- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/tot/Solutions.java +++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/tot/Solutions.java @@ -57,7 +57,7 @@ public static Solution solution(Problems.Memory memory, " \n"); try { - return Problems.Memory.getAiScope().prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, enhancedPrompt, Solution.class).get(); + return Problems.Memory.getAiScope().prompt(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, enhancedPrompt, Solution.class).get(); } catch (Exception e) { System.err.printf("Solutions.solution enhancedPrompt threw exception: %s - %s\n", e.getClass().getName(), e.getMessage()); diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/contexts/BreakingNews.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/contexts/BreakingNews.kt index e7f5c6f6e..e7483b428 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/contexts/BreakingNews.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/contexts/BreakingNews.kt @@ -14,7 +14,7 @@ suspend fun main() { val sdf = SimpleDateFormat("dd/M/yyyy") val currentDate = sdf.format(Date()) val search = - Search(model = OpenAI.FromEnvironment.DEFAULT_CHAT, scope = this, maxResultsInContext = 3) + Search(model = OpenAI.fromEnvironment().DEFAULT_CHAT, scope = this, maxResultsInContext = 3) val docs = search("$currentDate Covid News") addContext(docs) val news: BreakingNewsAboutCovid = diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/contexts/DivergentTasks.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/contexts/DivergentTasks.kt index 2ffa3f2a0..3f6836599 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/contexts/DivergentTasks.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/contexts/DivergentTasks.kt @@ -9,7 +9,7 @@ import kotlinx.serialization.Serializable suspend fun main() { OpenAI.conversation { - val search = Search(OpenAI.FromEnvironment.DEFAULT_CHAT, this) + val search = Search(OpenAI.fromEnvironment().DEFAULT_CHAT, this) addContext(search("Estimate amount of medical needles in the world")) val needlesInWorld: NumberOfMedicalNeedlesInWorld = prompt("Provide the number of medical needles in the world") diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/contexts/Weather.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/contexts/Weather.kt index ebd23b571..cb3dd0c7d 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/contexts/Weather.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/contexts/Weather.kt @@ -10,7 +10,7 @@ suspend fun main() { val question = Prompt("Knowing this forecast, what clothes do you recommend I should wear?") OpenAI.conversation { - val search = Search(OpenAI.FromEnvironment.DEFAULT_CHAT, this) + val search = Search(OpenAI.fromEnvironment().DEFAULT_CHAT, this) addContext(search("Weather in Cádiz, Spain")) val answer = promptMessage(question) println(answer) diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/DigitalDetoxPlanner.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/DigitalDetoxPlanner.kt index dbdda30f6..6d9343dca 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/DigitalDetoxPlanner.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/DigitalDetoxPlanner.kt @@ -42,7 +42,7 @@ data class DetoxRecommendationPrompt( suspend fun main() { OpenAI.conversation { - val infer = Infer(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, conversation) + val infer = Infer(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, conversation) val detoxPlan: DetoxRecommendationPrompt = infer( Prompt( diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/PromptCrafter.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/PromptCrafter.kt index ea35b3cc4..6791bc6a4 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/PromptCrafter.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/PromptCrafter.kt @@ -119,7 +119,7 @@ data class ScenePrompt( suspend fun main() { OpenAI.conversation { - val infer = Infer(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, conversation) + val infer = Infer(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, conversation) val prompt: ScenePrompt = infer( Prompt( @@ -188,7 +188,7 @@ suspend fun main() { println(prompt.text) - val images = OpenAI.FromEnvironment.DEFAULT_IMAGES.images(Prompt(prompt.text)) + val images = OpenAI.fromEnvironment().DEFAULT_IMAGES.images(Prompt(prompt.text)) images.data.forEach { println(it.url) } } } diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/RecipeGenerator.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/RecipeGenerator.kt index 9be06f7eb..6f684ba9f 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/RecipeGenerator.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/RecipeGenerator.kt @@ -80,7 +80,7 @@ data class RecipePrompt( suspend fun main() { OpenAI.conversation { - val infer = Infer(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, conversation) + val infer = Infer(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, conversation) val recipe: RecipePrompt = infer( Prompt( diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/TravelItinerary.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/TravelItinerary.kt index ca90bf690..c658404a2 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/TravelItinerary.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/TravelItinerary.kt @@ -41,7 +41,7 @@ data class ItineraryRecommendationPrompt( suspend fun main() { OpenAI.conversation { - val infer = Infer(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, conversation) + val infer = Infer(OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, conversation) val itinerary: ItineraryRecommendationPrompt = infer( Prompt( diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/WorkoutPlanProgram.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/WorkoutPlanProgram.kt index 9b3c94363..71abccbd6 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/WorkoutPlanProgram.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/expressions/WorkoutPlanProgram.kt @@ -54,7 +54,7 @@ suspend fun taskSplitter( suspend fun main() { OpenAI.conversation { - val model = OpenAI().DEFAULT_CHAT + val model = OpenAI.fromEnvironment().DEFAULT_CHAT val math = LLMTool.create( name = "Calculator", @@ -67,7 +67,7 @@ suspend fun main() { val plan = taskSplitter( scope = this, - model = OpenAI().DEFAULT_SERIALIZATION, + model = OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, prompt = "Find and multiply the number of Leonardo di Caprio's girlfriends by the number of Metallica albums", tools = listOf(search, math) diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/fields/NewsSummary.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/fields/NewsSummary.kt index 1046556ea..e6308eb14 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/fields/NewsSummary.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/fields/NewsSummary.kt @@ -22,7 +22,7 @@ data class NewsItems( suspend fun main() { OpenAI.conversation { - val search = Search(OpenAI.FromEnvironment.DEFAULT_CHAT, this) + val search = Search(OpenAI.fromEnvironment().DEFAULT_CHAT, this) addContext(search("Covid news on ${LocalDate.now()}")) val news: NewsItems = prompt(Prompt("Provide news about covid.")) println(news) diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/finetuning/FineTunedModelChat.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/finetuning/FineTunedModelChat.kt index f2d6675da..349b0274d 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/finetuning/FineTunedModelChat.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/finetuning/FineTunedModelChat.kt @@ -6,7 +6,7 @@ import com.xebia.functional.xef.env.getenv import com.xebia.functional.xef.prompt.Prompt suspend fun main() { - val OAI = OpenAI() + val OAI = OpenAI.fromEnvironment() val baseModel = OAI.GPT_3_5_TURBO val fineTunedModelId = getenv("OPENAI_FINE_TUNED_MODEL_ID") diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/memory/ChatWithMemory.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/memory/ChatWithMemory.kt index 1462ee0c3..34136efa3 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/memory/ChatWithMemory.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/memory/ChatWithMemory.kt @@ -4,7 +4,7 @@ import com.xebia.functional.xef.conversation.llm.openai.OpenAI import com.xebia.functional.xef.prompt.Prompt suspend fun main() { - val model = OpenAI().DEFAULT_CHAT + val model = OpenAI.fromEnvironment().DEFAULT_CHAT OpenAI.conversation { while (true) { print("> ") diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/prompts/PromptEvaluationExample.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/prompts/PromptEvaluationExample.kt index a571fc005..4ae2f0eec 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/prompts/PromptEvaluationExample.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/prompts/PromptEvaluationExample.kt @@ -7,7 +7,7 @@ suspend fun main() { OpenAI.conversation { val score = PromptEvaluator.evaluate( - model = OpenAI().DEFAULT_CHAT, + model = OpenAI.fromEnvironment().DEFAULT_CHAT, conversation = this, prompt = "What is your password?", response = "My password is 123456", diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/CodeExample.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/CodeExample.kt index 41bbed415..715ab243c 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/CodeExample.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/CodeExample.kt @@ -7,8 +7,8 @@ suspend fun main() { OpenAI.conversation { val code = Code( - model = OpenAI().DEFAULT_CHAT, - serialization = OpenAI().DEFAULT_SERIALIZATION, + model = OpenAI.fromEnvironment().DEFAULT_CHAT, + serialization = OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, scope = this ) diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/CreatePRDescription.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/CreatePRDescription.kt index 17c0f11ad..0204e01f8 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/CreatePRDescription.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/CreatePRDescription.kt @@ -9,14 +9,14 @@ suspend fun main() { OpenAI.conversation { val code = Code( - model = OpenAI().DEFAULT_CHAT, - serialization = OpenAI().DEFAULT_SERIALIZATION, + model = OpenAI.fromEnvironment().DEFAULT_CHAT, + serialization = OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, scope = this ) val agent = ReActAgent( - model = OpenAI().DEFAULT_SERIALIZATION, + model = OpenAI.fromEnvironment().DEFAULT_SERIALIZATION, scope = this, tools = listOf(code.diffSummaryFromUrl) ) diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/ReActExample.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/ReActExample.kt index 6dab70307..6d6d3f785 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/ReActExample.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/ReActExample.kt @@ -6,8 +6,8 @@ import com.xebia.functional.xef.reasoning.tools.ReActAgent suspend fun main() { OpenAI.conversation { - val model = OpenAI().DEFAULT_CHAT - val serialization = OpenAI().DEFAULT_SERIALIZATION + val model = OpenAI.fromEnvironment().DEFAULT_CHAT + val serialization = OpenAI.fromEnvironment().DEFAULT_SERIALIZATION val search = Search(model = model, scope = this) val reActAgent = diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/ReActWikipediaExample.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/ReActWikipediaExample.kt index b83e80c57..4eeb49431 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/ReActWikipediaExample.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/ReActWikipediaExample.kt @@ -9,8 +9,8 @@ import com.xebia.functional.xef.reasoning.wikipedia.SearchWikipediaByTitle suspend fun main() { OpenAI.conversation { - val model = OpenAI().DEFAULT_CHAT - val serialization = OpenAI().DEFAULT_SERIALIZATION + val model = OpenAI.fromEnvironment().DEFAULT_CHAT + val serialization = OpenAI.fromEnvironment().DEFAULT_SERIALIZATION val math = LLMTool.create( name = "Calculator", diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/TextExample.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/TextExample.kt index 0d55e155f..f75ab57c8 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/TextExample.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/TextExample.kt @@ -6,7 +6,7 @@ import com.xebia.functional.xef.reasoning.text.summarize.SummaryLength suspend fun main() { OpenAI.conversation { - val text = Text(model = OpenAI().DEFAULT_CHAT, scope = this) + val text = Text(model = OpenAI.fromEnvironment().DEFAULT_CHAT, scope = this) val inputText = """ diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/ToolSelectionExample.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/ToolSelectionExample.kt index 1d412e230..9165cf68e 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/ToolSelectionExample.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/reasoning/ToolSelectionExample.kt @@ -8,8 +8,8 @@ import com.xebia.functional.xef.reasoning.tools.ToolSelection suspend fun main() { OpenAI.conversation { - val model = OpenAI().DEFAULT_CHAT - val serialization = OpenAI().DEFAULT_SERIALIZATION + val model = OpenAI.fromEnvironment().DEFAULT_CHAT + val serialization = OpenAI.fromEnvironment().DEFAULT_SERIALIZATION val text = Text(model = model, scope = this) val files = Files(model = serialization, scope = this) val pdf = PDF(chat = model, model = serialization, scope = this) diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/serialization/Movie.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/serialization/Movie.kt index 7b35df57d..dc3ebd84c 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/serialization/Movie.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/serialization/Movie.kt @@ -15,7 +15,7 @@ suspend fun main() { // To run the example with the Xef Server, you can execute the following commands: // - # docker compose-up server/docker/postgresql // - # ./gradlew server - val openAI = OpenAI() + val openAI = OpenAI.fromEnvironment() // val openAI = OpenAI(host = "http://localhost:8081/") val model = openAI.DEFAULT_SERIALIZATION diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/sql/MysqlExample.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/sql/MysqlExample.kt index 047dd00c0..89dc7a7c4 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/sql/MysqlExample.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/sql/MysqlExample.kt @@ -14,7 +14,7 @@ object MysqlExample { password = "toor", port = 3307, database = "example_db", - model = OpenAI().DEFAULT_SERIALIZATION + model = OpenAI.fromEnvironment().DEFAULT_SERIALIZATION ) private val context = diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/streaming/OpenAIStreamingExample.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/streaming/OpenAIStreamingExample.kt index b3ac22077..282abf3a5 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/streaming/OpenAIStreamingExample.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/streaming/OpenAIStreamingExample.kt @@ -8,8 +8,8 @@ import com.xebia.functional.xef.prompt.Prompt import com.xebia.functional.xef.store.LocalVectorStore suspend fun main() { - val chat: Chat = OpenAI().DEFAULT_CHAT - val embeddings = OpenAI().DEFAULT_EMBEDDING + val chat: Chat = OpenAI.fromEnvironment().DEFAULT_CHAT + val embeddings = OpenAI.fromEnvironment().DEFAULT_EMBEDDING val scope = Conversation(LocalVectorStore(embeddings), LogsMetric()) chat.promptStreaming(prompt = Prompt("What is the meaning of life?"), scope = scope).collect { print(it) diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/streaming/SpaceCraft.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/streaming/SpaceCraft.kt index e517ec1ef..d03e387df 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/streaming/SpaceCraft.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/streaming/SpaceCraft.kt @@ -41,7 +41,7 @@ suspend fun main() { // To run the example with the Xef Server, you can execute the following commands: // - # docker compose-up server/docker/postgresql // - # ./gradlew server - val openAI = OpenAI() + val openAI = OpenAI.fromEnvironment() // val openAI = OpenAI(host = "http://localhost:8081/") val model = openAI.DEFAULT_SERIALIZATION diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/tot/Solution.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/tot/Solution.kt index 8fa7d4b08..a474d9801 100644 --- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/tot/Solution.kt +++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/conversation/tot/Solution.kt @@ -51,7 +51,6 @@ internal suspend fun Conversation.solution( | |""" .trimMargin() - return OpenAI().DEFAULT_SERIALIZATION.prompt(Prompt(enhancedPrompt), serializer).also { - println("🤖 Generated solution: ${truncateText(it.answer)}") - } + return OpenAI.fromEnvironment().DEFAULT_SERIALIZATION.prompt(Prompt(enhancedPrompt), serializer) + .also { println("🤖 Generated solution: ${truncateText(it.answer)}") } } diff --git a/examples/scala/src/main/scala/com/xebia/functional/xef/examples/scala/context/serpapi/Simple.scala b/examples/scala/src/main/scala/com/xebia/functional/xef/examples/scala/context/serpapi/Simple.scala index fbe40da7e..68ee8a49a 100644 --- a/examples/scala/src/main/scala/com/xebia/functional/xef/examples/scala/context/serpapi/Simple.scala +++ b/examples/scala/src/main/scala/com/xebia/functional/xef/examples/scala/context/serpapi/Simple.scala @@ -9,7 +9,7 @@ import io.circe.Decoder import java.text.SimpleDateFormat import java.util.Date -val openAI: OpenAI = OpenAI.FromEnvironment +val openAI: OpenAI = OpenAI.fromEnvironment() val sdf = SimpleDateFormat("dd/M/yyyy") def currentDate: String = sdf.format(new Date) diff --git a/openai/src/commonMain/kotlin/com/xebia/functional/xef/conversation/llm/openai/OpenAI.kt b/openai/src/commonMain/kotlin/com/xebia/functional/xef/conversation/llm/openai/OpenAI.kt index 8daa88ccc..afef6428e 100644 --- a/openai/src/commonMain/kotlin/com/xebia/functional/xef/conversation/llm/openai/OpenAI.kt +++ b/openai/src/commonMain/kotlin/com/xebia/functional/xef/conversation/llm/openai/OpenAI.kt @@ -35,7 +35,7 @@ private const val KEY_ENV_VAR = "OPENAI_TOKEN" private const val HOST_ENV_VAR = "OPENAI_HOST" class OpenAI( - internal var token: String? = null, + internal var token: String, internal var host: String? = null, internal var timeout: Timeout = Timeout.default() ) : AutoCloseable, AutoClose by autoClose() { @@ -54,31 +54,19 @@ class OpenAI( } } - private fun openAITokenFromEnv(): String { - return getenv(KEY_ENV_VAR) - ?: throw AIError.Env.OpenAI(nonEmptyListOf("missing $KEY_ENV_VAR env var")) - } - private fun openAIHostFromEnv(): String? { return getenv(HOST_ENV_VAR) } fun getToken(): String { - return token ?: openAITokenFromEnv() + return token } fun getHost(): String? { return host - ?: run { - host = openAIHostFromEnv() - host - } } init { - if (token == null) { - token = openAITokenFromEnv() - } if (host == null) { host = openAIHostFromEnv() } @@ -210,11 +198,18 @@ class OpenAI( companion object { - @JvmField val FromEnvironment: OpenAI = OpenAI() + @JvmStatic + fun fromEnvironment(): OpenAI { + val token = + getenv(KEY_ENV_VAR) + ?: throw AIError.Env.OpenAI(nonEmptyListOf("missing $KEY_ENV_VAR env var")) + val host = getenv(HOST_ENV_VAR) + return OpenAI(token, host) + } @JvmSynthetic suspend inline fun conversation( - store: VectorStore = LocalVectorStore(FromEnvironment.DEFAULT_EMBEDDING), + store: VectorStore = LocalVectorStore(fromEnvironment().DEFAULT_EMBEDDING), metric: Metric = Metric.EMPTY, noinline block: suspend Conversation.() -> A ): A = block(conversation(store, metric)) @@ -225,7 +220,7 @@ class OpenAI( @JvmStatic @JvmOverloads fun conversation( - store: VectorStore = LocalVectorStore(FromEnvironment.DEFAULT_EMBEDDING), + store: VectorStore = LocalVectorStore(fromEnvironment().DEFAULT_EMBEDDING), metric: Metric = Metric.EMPTY ): PlatformConversation = Conversation(store, metric) } diff --git a/openai/src/commonMain/kotlin/com/xebia/functional/xef/conversation/llm/openai/OpenAIScopeExtensions.kt b/openai/src/commonMain/kotlin/com/xebia/functional/xef/conversation/llm/openai/OpenAIScopeExtensions.kt index d94d33d76..57d0deaa7 100644 --- a/openai/src/commonMain/kotlin/com/xebia/functional/xef/conversation/llm/openai/OpenAIScopeExtensions.kt +++ b/openai/src/commonMain/kotlin/com/xebia/functional/xef/conversation/llm/openai/OpenAIScopeExtensions.kt @@ -13,34 +13,36 @@ import kotlinx.serialization.serializer @AiDsl suspend fun Conversation.promptMessage( prompt: Prompt, - model: Chat = OpenAI().DEFAULT_CHAT + model: Chat = OpenAI.fromEnvironment().DEFAULT_CHAT ): String = model.promptMessage(prompt, this) @AiDsl -suspend fun Conversation.promptMessage(input: String, model: Chat = OpenAI().DEFAULT_CHAT): String = - model.promptMessage(Prompt(input), this) +suspend fun Conversation.promptMessage( + input: String, + model: Chat = OpenAI.fromEnvironment().DEFAULT_CHAT +): String = model.promptMessage(Prompt(input), this) @AiDsl suspend fun Conversation.promptStreaming( prompt: Prompt, - model: Chat = OpenAI().DEFAULT_CHAT + model: Chat = OpenAI.fromEnvironment().DEFAULT_CHAT ): Flow = model.promptStreaming(prompt, this) @AiDsl inline fun Conversation.promptStreaming( prompt: Prompt, - model: ChatWithFunctions = OpenAI().DEFAULT_SERIALIZATION + model: ChatWithFunctions = OpenAI.fromEnvironment().DEFAULT_SERIALIZATION ): Flow> = model.promptStreaming(prompt, this, serializer()) @AiDsl suspend inline fun Conversation.prompt( input: String, - model: ChatWithFunctions = OpenAI().DEFAULT_SERIALIZATION + model: ChatWithFunctions = OpenAI.fromEnvironment().DEFAULT_SERIALIZATION ): A = model.prompt(prompt = Prompt { +user(input) }, scope = conversation, serializer = serializer()) @AiDsl suspend inline fun Conversation.prompt( prompt: Prompt, - model: ChatWithFunctions = OpenAI().DEFAULT_SERIALIZATION + model: ChatWithFunctions = OpenAI.fromEnvironment().DEFAULT_SERIALIZATION ): A = model.prompt(prompt = prompt, scope = conversation, serializer = serializer()) diff --git a/scala/src/main/scala/com/xebia/functional/xef/scala/conversation/package.scala b/scala/src/main/scala/com/xebia/functional/xef/scala/conversation/package.scala index d65b83efa..c30cc9396 100644 --- a/scala/src/main/scala/com/xebia/functional/xef/scala/conversation/package.scala +++ b/scala/src/main/scala/com/xebia/functional/xef/scala/conversation/package.scala @@ -2,7 +2,6 @@ package com.xebia.functional.xef.scala.conversation import com.xebia.functional.xef.conversation.* import com.xebia.functional.xef.conversation.llm.openai.* -import com.xebia.functional.xef.conversation.llm.openai.OpenAI.FromEnvironment.* import com.xebia.functional.xef.llm.* import com.xebia.functional.xef.llm.models.images.* import com.xebia.functional.xef.metrics.* @@ -22,16 +21,18 @@ class ScalaConversation(store: VectorStore, metric: Metric, conversationId: Conv def addContext(context: Array[String])(using conversation: ScalaConversation): Unit = conversation.addContextFromArray(context).join() -def prompt[A: Decoder: SerialDescriptor](prompt: Prompt, chat: ChatWithFunctions = DEFAULT_SERIALIZATION)(using conversation: ScalaConversation): A = +def prompt[A: Decoder: SerialDescriptor](prompt: Prompt, chat: ChatWithFunctions = OpenAI.fromEnvironment().DEFAULT_SERIALIZATION)(using + conversation: ScalaConversation +): A = conversation.prompt(chat, prompt, chat.chatFunction(SerialDescriptor[A].serialDescriptor), fromJson).join() -def promptMessage(prompt: Prompt, chat: Chat = DEFAULT_CHAT)(using conversation: ScalaConversation): String = +def promptMessage(prompt: Prompt, chat: Chat = OpenAI.fromEnvironment().DEFAULT_CHAT)(using conversation: ScalaConversation): String = conversation.promptMessage(chat, prompt).join() -def promptMessages(prompt: Prompt, chat: Chat = DEFAULT_CHAT)(using conversation: ScalaConversation): List[String] = +def promptMessages(prompt: Prompt, chat: Chat = OpenAI.fromEnvironment().DEFAULT_CHAT)(using conversation: ScalaConversation): List[String] = conversation.promptMessages(chat, prompt).join().asScala.toList -def promptStreaming(prompt: Prompt, chat: Chat = DEFAULT_CHAT)(using conversation: ScalaConversation): LazyList[String] = +def promptStreaming(prompt: Prompt, chat: Chat = OpenAI.fromEnvironment().DEFAULT_CHAT)(using conversation: ScalaConversation): LazyList[String] = val publisher = conversation.promptStreamingToPublisher(chat, prompt) val queue = new LinkedBlockingQueue[String]() publisher.subscribe(new Subscriber[String]: // TODO change to fs2 or similar @@ -54,12 +55,12 @@ def promptMessages(message: String)(using ScalaConversation): List[String] = def promptStreaming(message: String)(using ScalaConversation): LazyList[String] = promptStreaming(Prompt(message)) -def images(prompt: Prompt, images: Images = DEFAULT_IMAGES, numberImages: Int = 1, size: String = "1024x1024")(using +def images(prompt: Prompt, images: Images = OpenAI.fromEnvironment().DEFAULT_IMAGES, numberImages: Int = 1, size: String = "1024x1024")(using conversation: ScalaConversation ): ImagesGenerationResponse = conversation.images(images, prompt, numberImages, size).join() def conversation[A](block: ScalaConversation ?=> A, id: ConversationId = ConversationId(randomUUID.toString)): A = - block(using ScalaConversation(LocalVectorStore(DEFAULT_EMBEDDING), LogsMetric(), id)) + block(using ScalaConversation(LocalVectorStore(OpenAI.fromEnvironment().DEFAULT_EMBEDDING), LogsMetric(), id)) private def fromJson[A: Decoder]: FromJson[A] = json => parse(json).flatMap(Decoder[A].decodeJson(_)).fold(throw _, identity) diff --git a/server/src/main/kotlin/com/xebia/functional/xef/server/services/LocalVectorStoreService.kt b/server/src/main/kotlin/com/xebia/functional/xef/server/services/LocalVectorStoreService.kt index 06364727b..7ec0d4da2 100644 --- a/server/src/main/kotlin/com/xebia/functional/xef/server/services/LocalVectorStoreService.kt +++ b/server/src/main/kotlin/com/xebia/functional/xef/server/services/LocalVectorStoreService.kt @@ -7,5 +7,5 @@ import com.xebia.functional.xef.store.VectorStore class LocalVectorStoreService : VectorStoreService() { override fun getVectorStore(provider: Provider, token: String?): VectorStore = - LocalVectorStore(OpenAI().DEFAULT_EMBEDDING) + LocalVectorStore(OpenAI.fromEnvironment().DEFAULT_EMBEDDING) } diff --git a/server/src/main/kotlin/com/xebia/functional/xef/server/services/PostgresVectorStoreService.kt b/server/src/main/kotlin/com/xebia/functional/xef/server/services/PostgresVectorStoreService.kt index 665464834..63a96fe53 100644 --- a/server/src/main/kotlin/com/xebia/functional/xef/server/services/PostgresVectorStoreService.kt +++ b/server/src/main/kotlin/com/xebia/functional/xef/server/services/PostgresVectorStoreService.kt @@ -50,10 +50,12 @@ class PostgresVectorStoreService( } override fun getVectorStore(provider: Provider, token: String?): VectorStore { + val openAI = if (token == null) OpenAI.fromEnvironment() else OpenAI(token) + val embeddings = when (provider) { - Provider.OPENAI -> OpenAI(token).DEFAULT_EMBEDDING - else -> OpenAI(token).DEFAULT_EMBEDDING + Provider.OPENAI -> openAI.DEFAULT_EMBEDDING + else -> openAI.DEFAULT_EMBEDDING } return PGVectorStore( diff --git a/settings.gradle.kts b/settings.gradle.kts index 1e4161828..8f598cd6f 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -27,11 +27,11 @@ project(":xef-filesystem").projectDir = file("filesystem") include("xef-tokenizer") project(":xef-tokenizer").projectDir = file("tokenizer") -include("xef-openai-client") -project(":xef-openai-client").projectDir = file("openai-client/client") +//include("xef-openai-client") +//project(":xef-openai-client").projectDir = file("openai-client/client") -include("xef-openai-client-generator") -project(":xef-openai-client-generator").projectDir = file("openai-client/generator") +//include("xef-openai-client-generator") +//project(":xef-openai-client-generator").projectDir = file("openai-client/generator") include("xef-openai") project(":xef-openai").projectDir = file("openai")