From 8cad10fc61f8d1192c9cb6787f5e9d78836b45f7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Carlos=20Monta=C3=B1ez?= Date: Tue, 24 Oct 2023 17:13:45 +0200 Subject: [PATCH] Update parameters type in CFunction to JsonObject (#503) * parameters in CFunction is a Json object * spotless * removed encodeJsonSchema --- .../com/xebia/functional/xef/llm/ChatWithFunctions.kt | 6 +++--- .../com/xebia/functional/xef/llm/StreamedFunction.kt | 2 +- .../functional/xef/llm/models/functions/CFunction.kt | 3 ++- .../functional/xef/llm/models/functions/JsonSchema.kt | 5 +---- .../xef/conversation/PlatformConversation.jvm.kt | 10 +++++++--- .../conversation/llm/openai/models/OpenAIFunChat.kt | 3 +-- 6 files changed, 15 insertions(+), 14 deletions(-) diff --git a/core/src/commonMain/kotlin/com/xebia/functional/xef/llm/ChatWithFunctions.kt b/core/src/commonMain/kotlin/com/xebia/functional/xef/llm/ChatWithFunctions.kt index 5fc814a88..6379feb86 100644 --- a/core/src/commonMain/kotlin/com/xebia/functional/xef/llm/ChatWithFunctions.kt +++ b/core/src/commonMain/kotlin/com/xebia/functional/xef/llm/ChatWithFunctions.kt @@ -10,7 +10,7 @@ import com.xebia.functional.xef.llm.models.chat.ChatCompletionChunk import com.xebia.functional.xef.llm.models.chat.ChatCompletionResponseWithFunctions import com.xebia.functional.xef.llm.models.functions.CFunction import com.xebia.functional.xef.llm.models.functions.FunChatCompletionRequest -import com.xebia.functional.xef.llm.models.functions.encodeJsonSchema +import com.xebia.functional.xef.llm.models.functions.buildJsonSchema import com.xebia.functional.xef.prompt.Prompt import io.github.oshai.kotlinlogging.KotlinLogging import kotlinx.coroutines.flow.* @@ -32,10 +32,10 @@ interface ChatWithFunctions : LLM { @OptIn(ExperimentalSerializationApi::class) fun chatFunction(descriptor: SerialDescriptor): CFunction { val fnName = descriptor.serialName.substringAfterLast(".") - return chatFunction(fnName, encodeJsonSchema(descriptor)) + return chatFunction(fnName, buildJsonSchema(descriptor)) } - fun chatFunction(fnName: String, schema: String): CFunction = + fun chatFunction(fnName: String, schema: JsonObject): CFunction = CFunction(fnName, "Generated function for $fnName", schema) @AiDsl diff --git a/core/src/commonMain/kotlin/com/xebia/functional/xef/llm/StreamedFunction.kt b/core/src/commonMain/kotlin/com/xebia/functional/xef/llm/StreamedFunction.kt index 5703536c7..39bc6ebda 100644 --- a/core/src/commonMain/kotlin/com/xebia/functional/xef/llm/StreamedFunction.kt +++ b/core/src/commonMain/kotlin/com/xebia/functional/xef/llm/StreamedFunction.kt @@ -57,7 +57,7 @@ sealed class StreamedFunction { // the path to this potential nested property var path: List = emptyList() // we extract the expected JSON schema before the LLM replies - val schema = Json.parseToJsonElement(function.parameters) + val schema = function.parameters // we create an example from the schema from which we can expect and infer the paths // as the LLM is sending us chunks with malformed JSON val example = createExampleFromSchema(schema) diff --git a/core/src/commonMain/kotlin/com/xebia/functional/xef/llm/models/functions/CFunction.kt b/core/src/commonMain/kotlin/com/xebia/functional/xef/llm/models/functions/CFunction.kt index 4a354883b..e4f1507d0 100644 --- a/core/src/commonMain/kotlin/com/xebia/functional/xef/llm/models/functions/CFunction.kt +++ b/core/src/commonMain/kotlin/com/xebia/functional/xef/llm/models/functions/CFunction.kt @@ -1,6 +1,7 @@ package com.xebia.functional.xef.llm.models.functions import kotlinx.serialization.Serializable +import kotlinx.serialization.json.JsonObject @Serializable -data class CFunction(val name: String, val description: String, val parameters: String) +data class CFunction(val name: String, val description: String, val parameters: JsonObject) diff --git a/core/src/commonMain/kotlin/com/xebia/functional/xef/llm/models/functions/JsonSchema.kt b/core/src/commonMain/kotlin/com/xebia/functional/xef/llm/models/functions/JsonSchema.kt index a1d463d41..ae77d08b6 100644 --- a/core/src/commonMain/kotlin/com/xebia/functional/xef/llm/models/functions/JsonSchema.kt +++ b/core/src/commonMain/kotlin/com/xebia/functional/xef/llm/models/functions/JsonSchema.kt @@ -107,11 +107,8 @@ annotation class JsonSchema { annotation class NoDefinition } -fun encodeJsonSchema(descriptor: SerialDescriptor): String = - Json.encodeToString(JsonObject.serializer(), buildJsonSchema(descriptor)) - /** Creates a Json Schema using the provided [descriptor] */ -private fun buildJsonSchema(descriptor: SerialDescriptor): JsonObject { +fun buildJsonSchema(descriptor: SerialDescriptor): JsonObject { val autoDefinitions = false val prepend = mapOf("\$schema" to JsonPrimitive("http://json-schema.org/draft-07/schema")) val definitions = JsonSchemaDefinitions(autoDefinitions) diff --git a/core/src/jvmMain/kotlin/com/xebia/functional/xef/conversation/PlatformConversation.jvm.kt b/core/src/jvmMain/kotlin/com/xebia/functional/xef/conversation/PlatformConversation.jvm.kt index 2f34b8de2..4c91a66e6 100644 --- a/core/src/jvmMain/kotlin/com/xebia/functional/xef/conversation/PlatformConversation.jvm.kt +++ b/core/src/jvmMain/kotlin/com/xebia/functional/xef/conversation/PlatformConversation.jvm.kt @@ -17,6 +17,8 @@ import kotlinx.coroutines.async import kotlinx.coroutines.cancel import kotlinx.coroutines.future.asCompletableFuture import kotlinx.coroutines.reactive.asPublisher +import kotlinx.serialization.json.Json +import kotlinx.serialization.json.jsonObject import org.reactivestreams.Publisher actual abstract class PlatformConversation @@ -58,12 +60,14 @@ actual constructor( } .asCompletableFuture() - fun chatFunction(target: Class<*>): CFunction = - CFunction( + fun chatFunction(target: Class<*>): CFunction { + val targetString = JacksonSerialization.schemaGenerator.generateSchema(target).toString() + return CFunction( name = target.simpleName, description = "Generated function for ${target.simpleName}", - parameters = JacksonSerialization.schemaGenerator.generateSchema(target).toString() + parameters = Json.parseToJsonElement(targetString).jsonObject ) + } fun promptMessage(chat: Chat, prompt: Prompt): CompletableFuture = coroutineScope diff --git a/openai/src/commonMain/kotlin/com/xebia/functional/xef/conversation/llm/openai/models/OpenAIFunChat.kt b/openai/src/commonMain/kotlin/com/xebia/functional/xef/conversation/llm/openai/models/OpenAIFunChat.kt index d7e0bd437..d500ed90e 100644 --- a/openai/src/commonMain/kotlin/com/xebia/functional/xef/conversation/llm/openai/models/OpenAIFunChat.kt +++ b/openai/src/commonMain/kotlin/com/xebia/functional/xef/conversation/llm/openai/models/OpenAIFunChat.kt @@ -14,7 +14,6 @@ import com.xebia.functional.xef.llm.models.functions.FunChatCompletionRequest import com.xebia.functional.xef.llm.models.functions.FunctionCall import kotlinx.coroutines.flow.Flow import kotlinx.coroutines.flow.map -import kotlinx.serialization.json.Json class OpenAIFunChat( private val provider: OpenAI, // TODO: use context receiver @@ -102,7 +101,7 @@ private fun CFunction.toOpenAI() = ChatCompletionFunction( name = name, description = description, - parameters = Parameters(Json.parseToJsonElement(parameters)), + parameters = Parameters(parameters) ) private fun Message.toOpenAI() =