Skip to content

Commit

Permalink
OpenAI api client gen (#531)
Browse files Browse the repository at this point in the history
* Incorporates the generator as a module

* Fixes `ChatCompletionRequestMessage` model
  • Loading branch information
fedefernandez authored Nov 10, 2023
1 parent 5d4eef1 commit 11c9eba
Show file tree
Hide file tree
Showing 208 changed files with 238 additions and 224 deletions.
2 changes: 1 addition & 1 deletion openai-client/.gitignore
Original file line number Diff line number Diff line change
@@ -1 +1 @@
generator/openai-api.yml
generator/config/openai-api.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,11 @@ docs/**
src/*Test/**

# Unused models
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionRequestSystemMessage.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionRequestUserMessage.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionRequestAssistantMessage.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionRequestToolMessage.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionRequestFunctionMessage.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/CreateChatCompletionFunctionResponse.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/CreateChatCompletionFunctionResponseChoicesInner.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionRequestMessageContentPart.kt
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,13 +39,7 @@ src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionMessageTo
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionMessageToolCallFunction.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionNamedToolChoice.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionNamedToolChoiceFunction.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionRequestAssistantMessage.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionRequestAssistantMessageFunctionCall.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionRequestFunctionMessage.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionRequestMessage.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionRequestSystemMessage.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionRequestToolMessage.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionRequestUserMessage.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionResponseMessage.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionRole.kt
src/commonMain/kotlin/com/xebia/functional/openai/models/ChatCompletionStreamResponseDelta.kt
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,10 @@ kotlin {
integrationTest.associateWith(test)
}
}
// JavaScript not supported due to reserved words
// js(IR) {
// browser()
// nodejs()
// }
js(IR) {
browser()
nodejs()
}
linuxX64()
macosX64()
macosArm64()
Expand Down Expand Up @@ -82,7 +81,7 @@ kotlin {
}
}
val jvmTest by getting { dependencies { implementation(libs.kotest.junit5) } }
// val jsMain by getting { dependencies { api(libs.ktor.client.js) } }
val jsMain by getting { dependencies { api(libs.ktor.client.js) } }
val linuxX64Main by getting { dependencies { api(libs.ktor.client.cio) } }
val macosX64Main by getting { dependencies { api(libs.ktor.client.cio) } }
val macosArm64Main by getting { dependencies { api(libs.ktor.client.cio) } }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,10 @@ data class CreateChatCompletionRequest(
/* A list of messages comprising the conversation so far. [Example Python code](https://cookbook.openai.com/examples/how_to_format_inputs_to_chatgpt_models). */
@SerialName(value = "messages")
@Required
val messages: kotlin.collections.List<ChatCompletionRequestMessage>,
val messages:
kotlin.collections.List<
com.xebia.functional.openai.apis.com.xebia.functional.openai.models.ext.chat.ChatCompletionRequestMessage
>,
@SerialName(value = "model")
@Required
val model: com.xebia.functional.openai.models.ext.chat.create.CreateChatCompletionRequestModel,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,12 @@ data class CreateChatCompletionResponseChoicesInner(
* filters, `tool_calls` if the model called a tool, or `function_call` (deprecated) if the model
* called a function.
*
* Values: stop,length,toolCalls,contentFilter,functionCall
* Values: stop,lengthType,toolCalls,contentFilter,functionCall
*/
@Serializable
enum class FinishReason(val value: kotlin.String) {
@SerialName(value = "stop") stop("stop"),
@SerialName(value = "length") length("length"),
@SerialName(value = "length") lengthType("length"),
@SerialName(value = "tool_calls") toolCalls("tool_calls"),
@SerialName(value = "content_filter") contentFilter("content_filter"),
@SerialName(value = "function_call") functionCall("function_call")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,12 @@ data class CreateChatCompletionStreamResponseChoicesInner(
* filters, `tool_calls` if the model called a tool, or `function_call` (deprecated) if the model
* called a function.
*
* Values: stop,length,toolCalls,contentFilter,functionCall
* Values: stop,lengthType,toolCalls,contentFilter,functionCall
*/
@Serializable
enum class FinishReason(val value: kotlin.String) {
@SerialName(value = "stop") stop("stop"),
@SerialName(value = "length") length("length"),
@SerialName(value = "length") lengthType("length"),
@SerialName(value = "tool_calls") toolCalls("tool_calls"),
@SerialName(value = "content_filter") contentFilter("content_filter"),
@SerialName(value = "function_call") functionCall("function_call")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,12 @@ data class CreateCompletionResponseChoicesInner(
* the request was reached, or `content_filter` if content was omitted due to a flag from our
* content filters.
*
* Values: stop,length,contentFilter
* Values: stop,lengthType,contentFilter
*/
@Serializable
enum class FinishReason(val value: kotlin.String) {
@SerialName(value = "stop") stop("stop"),
@SerialName(value = "length") length("length"),
@SerialName(value = "length") lengthType("length"),
@SerialName(value = "content_filter") contentFilter("content_filter")
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,11 @@ data class CreateEditResponseChoicesInner(
* the request was reached, or `content_filter` if content was omitted due to a flag from our
* content filters.
*
* Values: stop,length
* Values: stop,lengthType
*/
@Serializable
enum class FinishReason(val value: kotlin.String) {
@SerialName(value = "stop") stop("stop"),
@SerialName(value = "length") length("length")
@SerialName(value = "length") lengthType("length")
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
package com.xebia.functional.openai.apis.com.xebia.functional.openai.models.ext.chat

import com.xebia.functional.openai.models.ChatCompletionMessageToolCall
import com.xebia.functional.openai.models.ChatCompletionRequestAssistantMessageFunctionCall
import kotlinx.serialization.Required
import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable

@Serializable
sealed interface ChatCompletionRequestMessage {

/**
* @param content The contents of the system message.
* @param role The role of the messages author, in this case `system`.
*/
@Serializable
data class ChatCompletionRequestSystemMessage(

/* The contents of the system message. */
@SerialName(value = "content") @Required val content: String?,

/* The role of the messages author, in this case `system`. */
@SerialName(value = "role") @Required val role: ChatCompletionRequestSystemMessage.Role
) : ChatCompletionRequestMessage {

/**
* The role of the messages author, in this case `system`.
*
* Values: system
*/
@Serializable
enum class Role(val value: String) {
@SerialName(value = "system") system("system")
}
}

/**
* @param content
* @param role The role of the messages author, in this case `user`.
*/
@Serializable
data class ChatCompletionRequestUserMessage(
@SerialName(value = "content")
@Required
val content:
com.xebia.functional.openai.models.ext.chat.ChatCompletionRequestUserMessageContent?,

/* The role of the messages author, in this case `user`. */
@SerialName(value = "role") @Required val role: ChatCompletionRequestUserMessage.Role
) : ChatCompletionRequestMessage {

/**
* The role of the messages author, in this case `user`.
*
* Values: user
*/
@Serializable
enum class Role(val value: String) {
@SerialName(value = "user") user("user")
}
}

/**
* @param content The contents of the assistant message.
* @param role The role of the messages author, in this case `assistant`.
* @param toolCalls The tool calls generated by the model, such as function calls.
* @param functionCall
*/
@Serializable
data class ChatCompletionRequestAssistantMessage(

/* The contents of the assistant message. */
@SerialName(value = "content") @Required val content: String?,

/* The role of the messages author, in this case `assistant`. */
@SerialName(value = "role") @Required val role: ChatCompletionRequestAssistantMessage.Role,

/* The tool calls generated by the model, such as function calls. */
@SerialName(value = "tool_calls") val toolCalls: List<ChatCompletionMessageToolCall>? = null,
@Deprecated(message = "This property is deprecated.")
@SerialName(value = "function_call")
val functionCall: ChatCompletionRequestAssistantMessageFunctionCall? = null
) {

/**
* The role of the messages author, in this case `assistant`.
*
* Values: assistant
*/
@Serializable
enum class Role(val value: String) {
@SerialName(value = "assistant") assistant("assistant")
}
}

/**
* @param role The role of the messages author, in this case `tool`.
* @param content The contents of the tool message.
* @param toolCallId Tool call that this message is responding to.
*/
@Serializable
data class ChatCompletionRequestToolMessage(

/* The role of the messages author, in this case `tool`. */
@SerialName(value = "role") @Required val role: ChatCompletionRequestToolMessage.Role,

/* The contents of the tool message. */
@SerialName(value = "content") @Required val content: String?,

/* Tool call that this message is responding to. */
@SerialName(value = "tool_call_id") @Required val toolCallId: String
) {

/**
* The role of the messages author, in this case `tool`.
*
* Values: tool
*/
@Serializable
enum class Role(val value: String) {
@SerialName(value = "tool") tool("tool")
}
}

/**
* @param role The role of the messages author, in this case `function`.
* @param content The return value from the function call, to return to the model.
* @param name The name of the function to call.
*/
@Serializable
@Deprecated(message = "This schema is deprecated.")
data class ChatCompletionRequestFunctionMessage(

/* The role of the messages author, in this case `function`. */
@SerialName(value = "role") @Required val role: ChatCompletionRequestFunctionMessage.Role,

/* The return value from the function call, to return to the model. */
@SerialName(value = "content") @Required val content: kotlin.String?,

/* The name of the function to call. */
@SerialName(value = "name") @Required val name: kotlin.String
) {

/**
* The role of the messages author, in this case `function`.
*
* Values: function
*/
@Serializable
enum class Role(val value: kotlin.String) {
@SerialName(value = "function") function("function")
}
}
}
4 changes: 2 additions & 2 deletions openai-client/generator/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ Autogenerated client for OpenAI using the OpenAPI specification and [OpenAPI Gen
2. Download openai-api specification

```shell
curl -o generator/openai-api.yaml https://raw.githubusercontent.com/openai/openai-openapi/main/openapi.yaml
curl -o config/openai-api.yaml https://raw.githubusercontent.com/openai/openai-openapi/master/openapi.yaml
```

3. Run the openapi generator CLI from module's root with the following parameters:

```shell
openapi-generator generate -i generator/openai-api.yaml -g kotlin -o . --skip-validate-spec -c generator/openai-config.json
./gradlew openaiClientGenerate
```

4. Run the spotLess task from project's root
Expand Down
32 changes: 32 additions & 0 deletions openai-client/generator/build.gradle.kts
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
@file:Suppress("DSL_SCOPE_VIOLATION")

plugins {
java
alias(libs.plugins.spotless)
}

dependencies {
implementation("org.openapitools:openapi-generator-cli:7.0.1")
}

tasks.test {
useJUnitPlatform()
}

task("openaiClientGenerate", JavaExec::class) {
group = "GenerateTasks"
mainClass = "org.openapitools.codegen.OpenAPIGenerator"
args = listOf(
"generate",
"-i",
"config/openai-api.yaml",
"-g",
"ai.xef.openai.generator.KMMGeneratorConfig",
"-o",
"../client",
"--skip-validate-spec",
"-c",
"config/openai-config.json",
)
classpath = sourceSets["main"].runtimeClasspath
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
},
"schemaMappings": {
"FunctionParameters": "kotlinx.serialization.json.JsonObject",
"ChatCompletionRequestMessage": "com.xebia.functional.openai.apis.com.xebia.functional.openai.models.ext.chat.ChatCompletionRequestMessage",
"ChatCompletionRequestUserMessage_content": "com.xebia.functional.openai.models.ext.chat.ChatCompletionRequestUserMessageContent",
"CreateChatCompletionRequest_model": "com.xebia.functional.openai.models.ext.chat.create.CreateChatCompletionRequestModel",
"CreateChatCompletionRequest_stop": "com.xebia.functional.openai.models.ext.chat.create.CreateChatCompletionRequestStop",
Expand All @@ -31,7 +32,7 @@
"FineTuningJobRequest_hyperparameters_n_epochs": "com.xebia.functional.openai.models.ext.finetune.job.FineTuningJobRequestHyperparametersNEpochs",
"FineTuningJob_hyperparameters_n_epochs": "com.xebia.functional.openai.models.ext.finetune.job.FineTuningJobHyperparametersNEpochs"
},
"templateDir": "generator",
"templateDir": "config",
"files": {
"api.mustache": {
"templateType": "API",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
package ai.xef.openai.generator;

import org.openapitools.codegen.languages.KotlinClientCodegen;

public class KMMGeneratorConfig extends KotlinClientCodegen {

public KMMGeneratorConfig() {
super();
}

@Override
public String toEnumVarName(String value, String datatype) {
String varName;
if ("length".equals(value)) {
varName = value + "Type";
} else {
varName = value;
}
return super.toEnumVarName(varName, datatype);
}
}
Loading

0 comments on commit 11c9eba

Please sign in to comment.