From a815a33117ed2de450af710319ba7c3ad3180ae3 Mon Sep 17 00:00:00 2001 From: Gerson Pozo Date: Tue, 14 Nov 2023 11:41:10 +0100 Subject: [PATCH] Remove java scala support (#25) --- content/docs/learn/examples.md | 9 +- content/docs/learn/integrations/sql.md | 3 +- .../{quickstart/kotlin.md => quickstart.md} | 12 +- content/docs/learn/quickstart/_category_.json | 9 - content/docs/learn/quickstart/index.md | 15 - content/docs/learn/quickstart/java.md | 284 ------------------ content/docs/learn/quickstart/scala.md | 198 ------------ docusaurus.config.js | 38 +-- src/pages/index.tsx | 52 ---- 9 files changed, 10 insertions(+), 610 deletions(-) rename content/docs/learn/{quickstart/kotlin.md => quickstart.md} (96%) delete mode 100644 content/docs/learn/quickstart/_category_.json delete mode 100644 content/docs/learn/quickstart/index.md delete mode 100644 content/docs/learn/quickstart/java.md delete mode 100644 content/docs/learn/quickstart/scala.md diff --git a/content/docs/learn/examples.md b/content/docs/learn/examples.md index 4f840cd..9e4e000 100644 --- a/content/docs/learn/examples.md +++ b/content/docs/learn/examples.md @@ -7,11 +7,4 @@ description: Explore all that xef.ai offers # More examples -You can also have a look at the examples to have a feeling of how using the library looks like. - - -Examples in Kotlin - -Examples in Scala - -Examples in Java +You can also have a look at the [examples](https://github.com/xebia-functional/xef/tree/main/examples/kotlin/src/main/kotlin/com/xebia/functional/xef) to have a feeling of how using the library looks like. diff --git a/content/docs/learn/integrations/sql.md b/content/docs/learn/integrations/sql.md index a36f521..21c45cc 100644 --- a/content/docs/learn/integrations/sql.md +++ b/content/docs/learn/integrations/sql.md @@ -26,7 +26,8 @@ repositories { } dependencies { - implementation("com.xebia:xef-kotlin:") + implementation("com.xebia:xef-core:") + implementation("com.xebia:xef-openai:") implementation("com.xebia:xef-sql:") } ``` diff --git a/content/docs/learn/quickstart/kotlin.md b/content/docs/learn/quickstart.md similarity index 96% rename from content/docs/learn/quickstart/kotlin.md rename to content/docs/learn/quickstart.md index 576c340..e2ebc28 100644 --- a/content/docs/learn/quickstart/kotlin.md +++ b/content/docs/learn/quickstart.md @@ -1,16 +1,15 @@ --- -id: kotlin -title: Kotlin -sidebar_position: 1 +title: Quickstart description: Get xef.ai up and running in Kotlin +sidebar_position: 2 sidebar_custom_props: - icon: kotlin-icon.svg + icon: icon-quickstart.svg --- import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; -# +# ## Getting the libraries @@ -23,7 +22,8 @@ repositories { } dependencies { - implementation("com.xebia:xef-kotlin:") + implementation("com.xebia:xef-core:") + implementation("com.xebia:xef-openai:") } ``` diff --git a/content/docs/learn/quickstart/_category_.json b/content/docs/learn/quickstart/_category_.json deleted file mode 100644 index 2d0537c..0000000 --- a/content/docs/learn/quickstart/_category_.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "label": "Quickstart", - "position": 2, - "customProps": { - "icon": "icon-quickstart.svg", - "description": "xef.ai provides support for both Kotlin and Scala", - "overview": true - } -} diff --git a/content/docs/learn/quickstart/index.md b/content/docs/learn/quickstart/index.md deleted file mode 100644 index 9d6d16e..0000000 --- a/content/docs/learn/quickstart/index.md +++ /dev/null @@ -1,15 +0,0 @@ ---- -title: Quickstart -sidebar_custom_props: - icon: icon-quickstart.svg - overview: true ---- - -import { useCurrentSidebarCategory } from '@docusaurus/theme-common'; -import DocCardList from '@theme/DocCardList'; - -# - -

{useCurrentSidebarCategory().customProps.description}

- - diff --git a/content/docs/learn/quickstart/java.md b/content/docs/learn/quickstart/java.md deleted file mode 100644 index b9100af..0000000 --- a/content/docs/learn/quickstart/java.md +++ /dev/null @@ -1,284 +0,0 @@ ---- -id: java -title: Java -sidebar_position: 3 -description: Get xef.ai up and running in Java -sidebar_custom_props: - icon: java-icon.svg ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - -# - -## Getting the libraries - -Libraries are published in Maven Central. You may need to add that repository explicitly -in your build, if you haven't done it before. Then add the library in the usual way. - - - - -```xml - - com.xebia - xef-java - 0.0.2 - pom - -``` - - - - -```kotlin -repositories { - mavenCentral() -} - -dependencies { - implementation("com.xebia:xef-java:") -} -``` - - - - - -We publish all libraries at once under the same version, so -[version catalogs](https://docs.gradle.org/current/userguide/platforms.html#sec:sharing-catalogs) -could be useful. - -By default, the `AIScope` block connects to [OpenAI](https://platform.openai.com/). -To use their services you should provide the corresponding API key in the `OPENAI_TOKEN` -environment variable, and have enough credits. - - - - -Set the environment variable `OPENAI_TOKEN=xxx` in the properties. - - - - -```shell -env OPENAI_TOKEN= -``` - - - - -:::caution - -This library may transmit source code and potentially user input data to third-party services as part of its functionality. -Developers integrating this library into their applications should be aware of this behavior and take necessary precautions to ensure that sensitive data is not inadvertently transmitted. -Read our [_Data Transmission Disclosure_](https://github.com/xebia-functional/xef#%EF%B8%8F-data-transmission-disclosure) for further information. - -::: - -## Your first prompt - -After adding the library to your project -you get access to the `Conversation` class, which is your port of entry to the modern AI world. -Inside of it, you can _prompt_ for information, which means posing the question to an LLM -(Large Language Model). The easiest way is to just get the information back as a string. - -```java -package example; - -import com.xebia.functional.xef.conversation.PlatformConversation; -import com.xebia.functional.xef.conversation.llm.openai.OpenAI; -import com.xebia.functional.xef.prompt.Prompt; - -import java.util.concurrent.ExecutionException; - -public class Books { - public static void main(String[] args) throws ExecutionException, InterruptedException { - try (var scope = OpenAI.conversation()) { - var book = scope.promptMessage( - OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, - new Prompt("A book about cooking") - ).get(); - System.out.println(book); - } - } -} - -``` - -In the example above we create a `Conversation` using the `try-with-resources` syntax, -which ensures that the scope is closed at the end of the block. -The `Conversation` gives us access to the `promptMessage` & co functions, which allow us to interact with the LLM. - -All the functions of `Conversation` are returned as a `Future` for maximum backward compatibility until JDK8, -but you can inject `Executors.newVirtualThreadPerTaskExecutor()` to have the `Future`s work on virtual threads. -We block in these examples assuming LOOM. - -Remember that exceptions in `Future` are wrapped in `ExecutionException`, -so to inspect the actual exception you need to call `getCause()` on it. -_Structured Concurrency_ is implemented under the hood by Kotlin's `CoroutineScope`, -and all futures are cancelled when the `Conversation` is closed and `Future#get` will throw `CancellationException`. - -In the next examples we'll write functions that rely on `Conversation`'s DSL functionality - -## Structure - -The output of functions like the `books` function above may be hard to parse back from the -strings we obtain. Fortunately, you can also ask xef.ai to give you back the information -using a _custom type_. The library takes care of instructing the LLM on building such -a structure, and deserialize the result back for you. - -In the following example we define a new domain around a `MealPlan` class that describes the desired response we want to receive from the LLM. -Relying on [Jakarta validation](https://beanvalidation.org) we can also specify which fields are mandatory using `NotNull`, -or include additional constraints in the [Json Schema](https://json-schema.org). - -xef.ai reuses [Jackson](https://github.com/FasterXML/jackson-databind), -and [JsonSchema generator](https://github.com/victools/jsonschema-generator) to parse and generate the Json Schema for you. - -```java -package example; - -import com.xebia.functional.xef.conversation.PlatformConversation; -import com.xebia.functional.xef.conversation.llm.openai.OpenAI; -import com.xebia.functional.xef.prompt.Prompt; -import com.xebia.functional.xef.reasoning.serpapi.Search; -import jakarta.validation.constraints.NotNull; - -import java.util.List; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; - -public class MealPlan { - - public record MealPlanRecord(@NotNull String name, @NotNull List recipes){} - public record Recipe(@NotNull String name, @NotNull List ingredients){} - - private static MealPlanRecord mealPlan(PlatformConversation scope) { - return scope.prompt( - OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, - new Prompt("Meal plan for the week for a person with gall bladder stones that includes 5 recipes."), - MealPlanRecord.class - ); - } - - public static void main(String[] args) throws ExecutionException, InterruptedException { - try (var scope = OpenAI.conversation()) { - System.out.println(mealPlan(scope)); - } - } -} -``` - -In a larger AI application it's common to end up with quite some template for prompts. -Online material like [this course](https://www.deeplearning.ai/short-courses/chatgpt-prompt-engineering-for-developers/) -and [this tutorial](https://learnprompting.org/docs/intro) explain some of the most important patterns, -some of them readily available in xef.ai. - - -## Context - -LLMs have knowledge about a broad variety of topics. But by construction they are not able -to respond to questions about information not available in their training set. However, you -often want to supplement the LLM with more data: -- Transient information referring to the current moment, like the current weather, or - the trends in the stock market in the past 10 days. -- Non-public information, for example for summarizing a piece of text you're creating - within you organization. - -These additional pieces of information are called the _context_ in xef.ai, and are attached -to every question to the LLM. Although you can add arbitrary strings to the context at any -point, the most common mode of usage is using an _agent_ to consult an external service, -and make its response part of the context. One such agent is `search`, which uses a web -search service to enrich that context. - -```java -package example; - -import com.xebia.functional.xef.conversation.PlatformConversation; -import com.xebia.functional.xef.conversation.llm.openai.OpenAI; -import com.xebia.functional.xef.prompt.Prompt; -import com.xebia.functional.xef.reasoning.serpapi.Search; - -import java.util.List; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; - -public class MealPlan { - - public record MealPlanRecord(String name, List recipes){} - public record Recipe(String name, List ingredients){} - - private static MealPlanRecord mealPlan(PlatformConversation scope) { - return scope.prompt( - OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, - new Prompt("Meal plan for the week for a person with gall bladder stones that includes 5 recipes."), - MealPlanRecord.class - ); - } - - public static void main(String[] args) throws ExecutionException, InterruptedException { - try (var scope = OpenAI.conversation()) { - var search = new Search(OpenAI.FromEnvironment.DEFAULT_CHAT, scope, 3); - scope.addContextFromArray(search.search("gall bladder stones meals").get()); - System.out.pritnln(mealPlan(scope).get()); - } - } -} -``` - -In some cases the LLM needs to be _primed_ with some information before it can answer. -We can use the @Description annotation to provide a description of the information we want to add -to the LLM about the expected response for our objects: - -## @Description annotations - -```java -package example; - -import com.xebia.functional.xef.conversation.PlatformConversation; -import com.xebia.functional.xef.conversation.llm.openai.OpenAI; -import com.xebia.functional.xef.prompt.Prompt; -import com.xebia.functional.xef.reasoning.serpapi.Search; -import com.xebia.functional.xef.conversation.jvm.Description; - -import java.util.List; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; - - -public class MealPlan { - - @Description("A meal plan") - public record MealPlanRecord( - @Description("The name of the meal plan") String name, - @Description("A list of 5 recipes for the meal plan") List recipes - ){} - - public record Recipe( - @Description("The name of the recipe") String name, - @Description("A list of ingredients for the recipe") List ingredients - ){} - - private static MealPlanRecord mealPlan(PlatformConversation scope) { - return scope.prompt(OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, new Prompt("Meal plan for the week for a person with gall bladder stones that includes 5 recipes."), MealPlanRecord.class); - } - - public static void main(String[] args) throws ExecutionException, InterruptedException { - try (var scope = OpenAI.conversation()) { - var search = new Search(OpenAI.FromEnvironment.DEFAULT_CHAT, scope, 3); - scope.addContextFromArray(search.search("gall bladder stones meals").get()); - System.out.pritnln(mealPlan(scope).get()); - } - } -} -``` - -:::note Better vector stores - -The underlying mechanism of the context is a _vector store_, a data structure which -saves a set of strings, and is able to find those similar to another given one. -By default xef.ai uses an _in-memory_ vector store, since it provides maximum -compatibility across platforms. However, if you foresee your context growing above -the hundreds of elements, you may consider switching to another alternative, like -Lucene or PostgreSQL. diff --git a/content/docs/learn/quickstart/scala.md b/content/docs/learn/quickstart/scala.md deleted file mode 100644 index a18faba..0000000 --- a/content/docs/learn/quickstart/scala.md +++ /dev/null @@ -1,198 +0,0 @@ ---- -id: scala -title: Scala -sidebar_position: 2 -description: Get xef.ai up and running in Scala -sidebar_custom_props: - icon: scala-icon.svg ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - -# - -## Getting the libraries - -Just add the library to your SBT build. - -```scala -libraryDependencies += "com.xebia" %% "xef-scala" % "" -``` - -:::info Version compatibility - -`xef-scala` is currently only available for Scala 3. - -::: - -The Scala module depends on project [Loom](https://openjdk.org/projects/loom/), -so you will need at least Java 19 to use the library. Furthermore, you need to pass -the `--enable-preview` flag. - - - - -```shell -sbt -J--enable-preview -``` - - - - -
    -
  • Set the Java version to at least 19.
  • -
  • Set VM options to --enable-preview.
  • -
- -
-
- -By default, the `conversation` block connects to [OpenAI](https://platform.openai.com/). -To use their services you should provide the corresponding API key in the `OPENAI_TOKEN` -environment variable, and have enough credits. - - - - -```shell -env OPENAI_TOKEN= -``` - - - - -Set the environment variable `OPENAI_TOKEN=xxx` in the properties. - - - - -:::caution - -This library may transmit source code and potentially user input data to third-party services as part of its functionality. -Developers integrating this library into their applications should be aware of this behavior and take necessary precautions to ensure that sensitive data is not inadvertently transmitted. -Read our [_Data Transmission Disclosure_](https://github.com/xebia-functional/xef#%EF%B8%8F-data-transmission-disclosure) for further information. - -::: - -## Your first prompt - -After adding the library to your project, -you get access to the `conversation` function, which is your gate to the modern AI world. -Inside of it, you can _prompt_ for information, which means posing the question to an LLM -(Large Language Model). The easiest way is to just get the information back as a string. - -```scala -import com.xebia.functional.xef.scala.auto.* - -@main def runBook: Unit = conversation { - val topic: String = "functional programming" - val result = promptMessage(s"Give me a selection of books about $topic") - println(result) -} -``` - -## Structure - -The output from the `books` function above may be hard to parse back from the -strings we obtain. Fortunately, you can also ask xef.ai to give you back the information -using a _custom type_. The library takes care of instructing the LLM on building such -a structure, and deserialize the result back for you. - -```scala -import com.xebia.functional.xef.scala.conversation.* -import io.circe.Decoder -import com.xebia.functional.xef.prompt.Prompt - -case class Book(name: String, author: String, summary: String) derives SerialDescriptor, Decoder - -def summarizeBook(title: String, author: String)(using conversation: ScalaConversation): Book = - prompt(Prompt(s"$title by $author summary.")) - -@main def runBook: Unit = - conversation { - val toKillAMockingBird = summarizeBook("To Kill a Mockingbird", "Harper Lee") - println(s"${toKillAMockingBird.name} by ${toKillAMockingBird.author} summary:\n ${toKillAMockingBird.summary}") - } -``` - -xef.ai for Scala uses `xef-core`, which it's based on Kotlin. Hence, the core -reuses [Kotlin's common serialization](https://kotlinlang.org/docs/serialization.html), and -Scala uses [circe](https://github.com/circe/circe) to derive the required serializable instance. -The LLM is usually able to detect which kind of information should -go on each field based on its name (like `title` and `author` above). - -For those cases where the LLM is not able to infer the type, you can use the `@Description` annotation: - -## @Description annotations - -```scala -import com.xebia.functional.xef.scala.conversation.Description -import com.xebia.functional.xef.scala.conversation.* -import io.circe.Decoder -import com.xebia.functional.xef.prompt.Prompt - -@Description("A book") -case class Book( - @Description("the name of the book") name: String, - @Description("the author of the book") author: String, - @Description("A 50 word paragraph with a summary of this book") summary: String - ) derives SerialDescriptor, Decoder - -def summarizeBook(title: String, author: String)(using conversation: ScalaConversation): Book = - prompt(Prompt(s"$title by $author summary.")) - -@main def runBook: Unit = - conversation { - val toKillAMockingBird = summarizeBook("To Kill a Mockingbird", "Harper Lee") - println(s"${toKillAMockingBird.name} by ${toKillAMockingBird.author} summary:\n ${toKillAMockingBird.summary}") - } -``` - - -## Context - -LLMs have knowledge about a broad variety of topics. But by construction they are not able -to respond to questions about information not available in their training set. However, you -often want to supplement the LLM with more data: -- Transient information referring to the current moment, like the current weather, or - the trends in the stock market in the past 10 days. -- Non-public information, for example for summarizing a piece of text you're creating - within you organization. - -These additional pieces of information are called the _context_ in xef.ai, and are attached -to every question to the LLM. Although you can add arbitrary strings to the context at any -point, the most common mode of usage is using an _agent_ to consult an external service, -and make its response part of the context. One such agent is `search`, which uses a web -search service to enrich that context. - -```scala -import com.xebia.functional.xef.reasoning.serpapi.Search -import com.xebia.functional.xef.scala.conversation.* -import com.xebia.functional.xef.conversation.llm.openai.OpenAI -import io.circe.Decoder -import com.xebia.functional.xef.prompt.Prompt - -private final case class MealPlanRecipe(name: String, ingredients: List[String]) derives SerialDescriptor, Decoder - -private final case class MealPlan(name: String, recipes: List[MealPlanRecipe]) derives SerialDescriptor, Decoder - -@main def runMealPlan: Unit = - conversation { - val search = Search(OpenAI.FromEnvironment.DEFAULT_CHAT, summon[ScalaConversation], 3) - addContext(search.search("gall bladder stones meals").get()) - val mealPlan = prompt[MealPlan](Prompt("Meal plan for the week for a person with gall bladder stones that includes 5 recipes.")) - println(mealPlan) - } -``` - -:::note Better vector stores - -The underlying mechanism of the context is a _vector store_, a data structure which -saves a set of strings, and is able to find those similar to another given one. -By default, xef.ai uses an _in-memory_ vector store, since it provides maximum -compatibility across platforms. However, if you foresee your context growing above -the hundreds of elements, you may consider switching to another alternative, like -Lucene or PostgreSQL. - -::: diff --git a/docusaurus.config.js b/docusaurus.config.js index d9877af..45f1368 100644 --- a/docusaurus.config.js +++ b/docusaurus.config.js @@ -110,28 +110,9 @@ const createConfig = async () => { position: 'right', }, { - type: 'dropdown', label: 'Quickstart', - position: 'right', to: '/learn/quickstart', - items: [ - { - label: 'Kotlin', - to: '/learn/quickstart/kotlin', - }, - { - label: 'Scala', - to: '/learn/quickstart/scala', - }, - { - label: 'Java', - to: '/learn/quickstart/java', - }, - { - label: 'Examples', - to: '/learn/examples', - }, - ], + position: 'right', }, { type: 'dropdown', @@ -171,23 +152,6 @@ const createConfig = async () => { }, ], }, - { - title: 'Quickstart', - items: [ - { - label: 'Kotlin', - to: '/learn/quickstart/kotlin', - }, - { - label: 'Scala', - to: '/learn/quickstart/scala', - }, - { - label: 'Examples', - to: '/learn/examples', - }, - ], - }, { title: 'Integrations', items: [ diff --git a/src/pages/index.tsx b/src/pages/index.tsx index 822ccd1..a0d5df0 100644 --- a/src/pages/index.tsx +++ b/src/pages/index.tsx @@ -39,28 +39,6 @@ export default function Home(): JSX.Element {

Discover its potential

- - - {`package examples - -import com.xebia.functional.xef.scala.conversation.* -import io.circe.Decoder -import com.xebia.functional.xef.prompt.Prompt - -private final case class TouristAttraction(name: String, location: String, history: String) derives SerialDescriptor, Decoder - -@main def runTouristAttraction: Unit = conversation { - val statueOfLiberty: TouristAttraction = prompt(Prompt("Statue of Liberty location and history.")) - println( - s""" - |\${statueOfLiberty.name} is located in \${statueOfLiberty.location} and has the following history: - |\${statueOfLiberty.history} - """.stripMargin - ) -} -`} - - {`package examples @@ -81,36 +59,6 @@ suspend fun main() = .trimMargin() ) } -`} - - - - - {`package example; - -import com.xebia.functional.xef.conversation.*; -import com.xebia.functional.xef.conversation.llm.openai.OpenAI; -import com.xebia.functional.xef.prompt.Prompt; - -import java.util.concurrent.ExecutionException; - -public class TouristAttractions { - - public record TouristAttraction(String name, String location, String history) {} - - public static void main(String[] args) throws ExecutionException, InterruptedException { - try (var scope = OpenAI.conversation()) { - var statueOfLiberty = scope.prompt( - OpenAI.FromEnvironment.DEFAULT_SERIALIZATION, - new Prompt("Statue of Liberty location and history."), - TouristAttraction.class - ).get() - System.out.println( - statueOfLiberty.name + "is located in " + statueOfLiberty.location + - " and has the following history: " + statueOfLiberty.history - ); - } - } `}