diff --git a/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.InferenceDeployV1InferenceDeployDeployIdPost.g.cs b/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.InferenceDeployV1InferenceDeployDeployIdPost.g.cs
index 9303ed3..8469505 100644
--- a/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.InferenceDeployV1InferenceDeployDeployIdPost.g.cs
+++ b/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.InferenceDeployV1InferenceDeployDeployIdPost.g.cs
@@ -8,12 +8,14 @@ public partial class DeepInfraApi
partial void PrepareInferenceDeployV1InferenceDeployDeployIdPostArguments(
global::System.Net.Http.HttpClient httpClient,
ref string deployId,
+ ref bool useCache,
ref string? xDeepinfraSource,
ref string? userAgent);
partial void PrepareInferenceDeployV1InferenceDeployDeployIdPostRequest(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpRequestMessage httpRequestMessage,
string deployId,
+ bool useCache,
string? xDeepinfraSource,
string? userAgent);
partial void ProcessInferenceDeployV1InferenceDeployDeployIdPostResponse(
@@ -29,12 +31,16 @@ partial void ProcessInferenceDeployV1InferenceDeployDeployIdPostResponseContent(
/// Inference Deploy
///
///
+ ///
+ /// Default Value: true
+ ///
///
///
/// The token to cancel the operation with
///
public async global::System.Threading.Tasks.Task InferenceDeployV1InferenceDeployDeployIdPostAsync(
string deployId,
+ bool useCache,
string? xDeepinfraSource,
string? userAgent,
global::System.Threading.CancellationToken cancellationToken = default)
@@ -44,12 +50,13 @@ partial void ProcessInferenceDeployV1InferenceDeployDeployIdPostResponseContent(
PrepareInferenceDeployV1InferenceDeployDeployIdPostArguments(
httpClient: _httpClient,
deployId: ref deployId,
+ useCache: ref useCache,
xDeepinfraSource: ref xDeepinfraSource,
userAgent: ref userAgent);
using var httpRequest = new global::System.Net.Http.HttpRequestMessage(
method: global::System.Net.Http.HttpMethod.Post,
- requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + $"/v1/inference/deploy/{deployId}", global::System.UriKind.RelativeOrAbsolute));
+ requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + $"/v1/inference/deploy/{deployId}?use_cache={useCache}", global::System.UriKind.RelativeOrAbsolute));
PrepareRequest(
client: _httpClient,
@@ -58,6 +65,7 @@ partial void ProcessInferenceDeployV1InferenceDeployDeployIdPostResponseContent(
httpClient: _httpClient,
httpRequestMessage: httpRequest,
deployId: deployId,
+ useCache: useCache,
xDeepinfraSource: xDeepinfraSource,
userAgent: userAgent);
diff --git a/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.OpenaiAudioSpeechV1OpenaiAudioSpeechPost.g.cs b/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.OpenaiAudioSpeechV1OpenaiAudioSpeechPost.g.cs
index 2602aef..232bc4d 100644
--- a/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.OpenaiAudioSpeechV1OpenaiAudioSpeechPost.g.cs
+++ b/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.OpenaiAudioSpeechV1OpenaiAudioSpeechPost.g.cs
@@ -7,11 +7,13 @@ public partial class DeepInfraApi
{
partial void PrepareOpenaiAudioSpeechV1OpenaiAudioSpeechPostArguments(
global::System.Net.Http.HttpClient httpClient,
+ ref bool useCache,
ref string? xDeepinfraSource,
global::DeepInfra.OpenAITextToSpeechIn request);
partial void PrepareOpenaiAudioSpeechV1OpenaiAudioSpeechPostRequest(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpRequestMessage httpRequestMessage,
+ bool useCache,
string? xDeepinfraSource,
global::DeepInfra.OpenAITextToSpeechIn request);
partial void ProcessOpenaiAudioSpeechV1OpenaiAudioSpeechPostResponse(
@@ -26,11 +28,15 @@ partial void ProcessOpenaiAudioSpeechV1OpenaiAudioSpeechPostResponseContent(
///
/// Openai Audio Speech
///
+ ///
+ /// Default Value: true
+ ///
///
///
/// The token to cancel the operation with
///
public async global::System.Threading.Tasks.Task OpenaiAudioSpeechV1OpenaiAudioSpeechPostAsync(
+ bool useCache,
string? xDeepinfraSource,
global::DeepInfra.OpenAITextToSpeechIn request,
global::System.Threading.CancellationToken cancellationToken = default)
@@ -41,12 +47,13 @@ partial void ProcessOpenaiAudioSpeechV1OpenaiAudioSpeechPostResponseContent(
client: _httpClient);
PrepareOpenaiAudioSpeechV1OpenaiAudioSpeechPostArguments(
httpClient: _httpClient,
+ useCache: ref useCache,
xDeepinfraSource: ref xDeepinfraSource,
request: request);
using var httpRequest = new global::System.Net.Http.HttpRequestMessage(
method: global::System.Net.Http.HttpMethod.Post,
- requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + "/v1/openai/audio/speech", global::System.UriKind.RelativeOrAbsolute));
+ requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + $"/v1/openai/audio/speech?use_cache={useCache}", global::System.UriKind.RelativeOrAbsolute));
var __httpRequestContentBody = global::System.Text.Json.JsonSerializer.Serialize(request, global::DeepInfra.SourceGenerationContext.Default.OpenAITextToSpeechIn);
var __httpRequestContent = new global::System.Net.Http.StringContent(
content: __httpRequestContentBody,
@@ -60,6 +67,7 @@ partial void ProcessOpenaiAudioSpeechV1OpenaiAudioSpeechPostResponseContent(
PrepareOpenaiAudioSpeechV1OpenaiAudioSpeechPostRequest(
httpClient: _httpClient,
httpRequestMessage: httpRequest,
+ useCache: useCache,
xDeepinfraSource: xDeepinfraSource,
request: request);
@@ -103,6 +111,9 @@ partial void ProcessOpenaiAudioSpeechV1OpenaiAudioSpeechPostResponseContent(
///
/// Openai Audio Speech
///
+ ///
+ /// Default Value: true
+ ///
///
///
/// model name
@@ -130,6 +141,7 @@ partial void ProcessOpenaiAudioSpeechV1OpenaiAudioSpeechPostResponseContent(
public async global::System.Threading.Tasks.Task OpenaiAudioSpeechV1OpenaiAudioSpeechPostAsync(
string model,
string input,
+ bool useCache = true,
string? xDeepinfraSource = default,
global::System.AllOf? voice = default,
global::System.AllOf? responseFormat = default,
@@ -146,6 +158,7 @@ partial void ProcessOpenaiAudioSpeechV1OpenaiAudioSpeechPostResponseContent(
};
return await OpenaiAudioSpeechV1OpenaiAudioSpeechPostAsync(
+ useCache: useCache,
xDeepinfraSource: xDeepinfraSource,
request: request,
cancellationToken: cancellationToken).ConfigureAwait(false);
diff --git a/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.OpenaiChatCompletionsV1OpenaiChatCompletionsPost.g.cs b/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.OpenaiChatCompletionsV1OpenaiChatCompletionsPost.g.cs
index 1c62bb0..27b2e94 100644
--- a/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.OpenaiChatCompletionsV1OpenaiChatCompletionsPost.g.cs
+++ b/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.OpenaiChatCompletionsV1OpenaiChatCompletionsPost.g.cs
@@ -7,12 +7,14 @@ public partial class DeepInfraApi
{
partial void PrepareOpenaiChatCompletionsV1OpenaiChatCompletionsPostArguments(
global::System.Net.Http.HttpClient httpClient,
+ ref bool useCache,
ref string? xDeepinfraSource,
ref string? userAgent,
global::DeepInfra.OpenAIChatCompletionsIn request);
partial void PrepareOpenaiChatCompletionsV1OpenaiChatCompletionsPostRequest(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpRequestMessage httpRequestMessage,
+ bool useCache,
string? xDeepinfraSource,
string? userAgent,
global::DeepInfra.OpenAIChatCompletionsIn request);
@@ -28,12 +30,16 @@ partial void ProcessOpenaiChatCompletionsV1OpenaiChatCompletionsPostResponseCont
///
/// Openai Chat Completions
///
+ ///
+ /// Default Value: true
+ ///
///
///
///
/// The token to cancel the operation with
///
public async global::System.Threading.Tasks.Task OpenaiChatCompletionsV1OpenaiChatCompletionsPostAsync(
+ bool useCache,
string? xDeepinfraSource,
string? userAgent,
global::DeepInfra.OpenAIChatCompletionsIn request,
@@ -45,13 +51,14 @@ partial void ProcessOpenaiChatCompletionsV1OpenaiChatCompletionsPostResponseCont
client: _httpClient);
PrepareOpenaiChatCompletionsV1OpenaiChatCompletionsPostArguments(
httpClient: _httpClient,
+ useCache: ref useCache,
xDeepinfraSource: ref xDeepinfraSource,
userAgent: ref userAgent,
request: request);
using var httpRequest = new global::System.Net.Http.HttpRequestMessage(
method: global::System.Net.Http.HttpMethod.Post,
- requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + "/v1/openai/chat/completions", global::System.UriKind.RelativeOrAbsolute));
+ requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + $"/v1/openai/chat/completions?use_cache={useCache}", global::System.UriKind.RelativeOrAbsolute));
var __httpRequestContentBody = global::System.Text.Json.JsonSerializer.Serialize(request, global::DeepInfra.SourceGenerationContext.Default.OpenAIChatCompletionsIn);
var __httpRequestContent = new global::System.Net.Http.StringContent(
content: __httpRequestContentBody,
@@ -65,6 +72,7 @@ partial void ProcessOpenaiChatCompletionsV1OpenaiChatCompletionsPostResponseCont
PrepareOpenaiChatCompletionsV1OpenaiChatCompletionsPostRequest(
httpClient: _httpClient,
httpRequestMessage: httpRequest,
+ useCache: useCache,
xDeepinfraSource: xDeepinfraSource,
userAgent: userAgent,
request: request);
@@ -109,6 +117,9 @@ partial void ProcessOpenaiChatCompletionsV1OpenaiChatCompletionsPostResponseCont
///
/// Openai Chat Completions
///
+ ///
+ /// Default Value: true
+ ///
///
///
///
@@ -171,6 +182,7 @@ partial void ProcessOpenaiChatCompletionsV1OpenaiChatCompletionsPostResponseCont
public async global::System.Threading.Tasks.Task OpenaiChatCompletionsV1OpenaiChatCompletionsPostAsync(
string model,
global::System.Collections.Generic.IList> messages,
+ bool useCache = true,
string? xDeepinfraSource = default,
string? userAgent = default,
bool stream = false,
@@ -208,6 +220,7 @@ partial void ProcessOpenaiChatCompletionsV1OpenaiChatCompletionsPostResponseCont
};
return await OpenaiChatCompletionsV1OpenaiChatCompletionsPostAsync(
+ useCache: useCache,
xDeepinfraSource: xDeepinfraSource,
userAgent: userAgent,
request: request,
diff --git a/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.OpenaiCompletionsV1OpenaiCompletionsPost.g.cs b/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.OpenaiCompletionsV1OpenaiCompletionsPost.g.cs
index ddb15f2..f8e2bab 100644
--- a/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.OpenaiCompletionsV1OpenaiCompletionsPost.g.cs
+++ b/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.OpenaiCompletionsV1OpenaiCompletionsPost.g.cs
@@ -7,12 +7,14 @@ public partial class DeepInfraApi
{
partial void PrepareOpenaiCompletionsV1OpenaiCompletionsPostArguments(
global::System.Net.Http.HttpClient httpClient,
+ ref bool useCache,
ref string? xDeepinfraSource,
ref string? userAgent,
global::DeepInfra.OpenAICompletionsIn request);
partial void PrepareOpenaiCompletionsV1OpenaiCompletionsPostRequest(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpRequestMessage httpRequestMessage,
+ bool useCache,
string? xDeepinfraSource,
string? userAgent,
global::DeepInfra.OpenAICompletionsIn request);
@@ -28,12 +30,16 @@ partial void ProcessOpenaiCompletionsV1OpenaiCompletionsPostResponseContent(
///
/// Openai Completions
///
+ ///
+ /// Default Value: true
+ ///
///
///
///
/// The token to cancel the operation with
///
public async global::System.Threading.Tasks.Task OpenaiCompletionsV1OpenaiCompletionsPostAsync(
+ bool useCache,
string? xDeepinfraSource,
string? userAgent,
global::DeepInfra.OpenAICompletionsIn request,
@@ -45,13 +51,14 @@ partial void ProcessOpenaiCompletionsV1OpenaiCompletionsPostResponseContent(
client: _httpClient);
PrepareOpenaiCompletionsV1OpenaiCompletionsPostArguments(
httpClient: _httpClient,
+ useCache: ref useCache,
xDeepinfraSource: ref xDeepinfraSource,
userAgent: ref userAgent,
request: request);
using var httpRequest = new global::System.Net.Http.HttpRequestMessage(
method: global::System.Net.Http.HttpMethod.Post,
- requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + "/v1/openai/completions", global::System.UriKind.RelativeOrAbsolute));
+ requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + $"/v1/openai/completions?use_cache={useCache}", global::System.UriKind.RelativeOrAbsolute));
var __httpRequestContentBody = global::System.Text.Json.JsonSerializer.Serialize(request, global::DeepInfra.SourceGenerationContext.Default.OpenAICompletionsIn);
var __httpRequestContent = new global::System.Net.Http.StringContent(
content: __httpRequestContentBody,
@@ -65,6 +72,7 @@ partial void ProcessOpenaiCompletionsV1OpenaiCompletionsPostResponseContent(
PrepareOpenaiCompletionsV1OpenaiCompletionsPostRequest(
httpClient: _httpClient,
httpRequestMessage: httpRequest,
+ useCache: useCache,
xDeepinfraSource: xDeepinfraSource,
userAgent: userAgent,
request: request);
@@ -109,6 +117,9 @@ partial void ProcessOpenaiCompletionsV1OpenaiCompletionsPostResponseContent(
///
/// Openai Completions
///
+ ///
+ /// Default Value: true
+ ///
///
///
///
@@ -171,6 +182,7 @@ partial void ProcessOpenaiCompletionsV1OpenaiCompletionsPostResponseContent(
public async global::System.Threading.Tasks.Task OpenaiCompletionsV1OpenaiCompletionsPostAsync(
string model,
string prompt,
+ bool useCache = true,
string? xDeepinfraSource = default,
string? userAgent = default,
int maxTokens = 512,
@@ -208,6 +220,7 @@ partial void ProcessOpenaiCompletionsV1OpenaiCompletionsPostResponseContent(
};
return await OpenaiCompletionsV1OpenaiCompletionsPostAsync(
+ useCache: useCache,
xDeepinfraSource: xDeepinfraSource,
userAgent: userAgent,
request: request,
diff --git a/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.OpenaiEmbeddingsV1OpenaiEmbeddingsPost.g.cs b/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.OpenaiEmbeddingsV1OpenaiEmbeddingsPost.g.cs
index 80df7b9..c6249f3 100644
--- a/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.OpenaiEmbeddingsV1OpenaiEmbeddingsPost.g.cs
+++ b/src/libs/DeepInfra/Generated/DeepInfra.DeepInfraApi.OpenaiEmbeddingsV1OpenaiEmbeddingsPost.g.cs
@@ -7,12 +7,14 @@ public partial class DeepInfraApi
{
partial void PrepareOpenaiEmbeddingsV1OpenaiEmbeddingsPostArguments(
global::System.Net.Http.HttpClient httpClient,
+ ref bool useCache,
ref string? xDeepinfraSource,
ref string? userAgent,
global::DeepInfra.OpenAIEmbeddingsIn request);
partial void PrepareOpenaiEmbeddingsV1OpenaiEmbeddingsPostRequest(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpRequestMessage httpRequestMessage,
+ bool useCache,
string? xDeepinfraSource,
string? userAgent,
global::DeepInfra.OpenAIEmbeddingsIn request);
@@ -28,12 +30,16 @@ partial void ProcessOpenaiEmbeddingsV1OpenaiEmbeddingsPostResponseContent(
///
/// Openai Embeddings
///
+ ///
+ /// Default Value: true
+ ///
///
///
///
/// The token to cancel the operation with
///
public async global::System.Threading.Tasks.Task OpenaiEmbeddingsV1OpenaiEmbeddingsPostAsync(
+ bool useCache,
string? xDeepinfraSource,
string? userAgent,
global::DeepInfra.OpenAIEmbeddingsIn request,
@@ -45,13 +51,14 @@ partial void ProcessOpenaiEmbeddingsV1OpenaiEmbeddingsPostResponseContent(
client: _httpClient);
PrepareOpenaiEmbeddingsV1OpenaiEmbeddingsPostArguments(
httpClient: _httpClient,
+ useCache: ref useCache,
xDeepinfraSource: ref xDeepinfraSource,
userAgent: ref userAgent,
request: request);
using var httpRequest = new global::System.Net.Http.HttpRequestMessage(
method: global::System.Net.Http.HttpMethod.Post,
- requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + "/v1/openai/embeddings", global::System.UriKind.RelativeOrAbsolute));
+ requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + $"/v1/openai/embeddings?use_cache={useCache}", global::System.UriKind.RelativeOrAbsolute));
var __httpRequestContentBody = global::System.Text.Json.JsonSerializer.Serialize(request, global::DeepInfra.SourceGenerationContext.Default.OpenAIEmbeddingsIn);
var __httpRequestContent = new global::System.Net.Http.StringContent(
content: __httpRequestContentBody,
@@ -65,6 +72,7 @@ partial void ProcessOpenaiEmbeddingsV1OpenaiEmbeddingsPostResponseContent(
PrepareOpenaiEmbeddingsV1OpenaiEmbeddingsPostRequest(
httpClient: _httpClient,
httpRequestMessage: httpRequest,
+ useCache: useCache,
xDeepinfraSource: xDeepinfraSource,
userAgent: userAgent,
request: request);
@@ -109,6 +117,9 @@ partial void ProcessOpenaiEmbeddingsV1OpenaiEmbeddingsPostResponseContent(
///
/// Openai Embeddings
///
+ ///
+ /// Default Value: true
+ ///
///
///
///
@@ -128,6 +139,7 @@ partial void ProcessOpenaiEmbeddingsV1OpenaiEmbeddingsPostResponseContent(
public async global::System.Threading.Tasks.Task OpenaiEmbeddingsV1OpenaiEmbeddingsPostAsync(
string model,
global::System.AnyOf, string?> input,
+ bool useCache = true,
string? xDeepinfraSource = default,
string? userAgent = default,
global::DeepInfra.OpenAIEmbeddingsInEncodingFormat? encodingFormat = global::DeepInfra.OpenAIEmbeddingsInEncodingFormat.Float,
@@ -141,6 +153,7 @@ partial void ProcessOpenaiEmbeddingsV1OpenaiEmbeddingsPostResponseContent(
};
return await OpenaiEmbeddingsV1OpenaiEmbeddingsPostAsync(
+ useCache: useCache,
xDeepinfraSource: xDeepinfraSource,
userAgent: userAgent,
request: request,
diff --git a/src/libs/DeepInfra/openapi.yaml b/src/libs/DeepInfra/openapi.yaml
index 6c51372..3c11976 100644
--- a/src/libs/DeepInfra/openapi.yaml
+++ b/src/libs/DeepInfra/openapi.yaml
@@ -949,6 +949,12 @@ paths:
schema:
title: Deploy Id
type: string
+ - name: use_cache
+ in: query
+ schema:
+ title: Use Cache
+ type: boolean
+ default: true
- name: x-deepinfra-source
in: header
schema:
@@ -1044,6 +1050,12 @@ paths:
summary: Openai Completions
operationId: openai_completions_v1_openai_completions_post
parameters:
+ - name: use_cache
+ in: query
+ schema:
+ title: Use Cache
+ type: boolean
+ default: true
- name: x-deepinfra-source
in: header
schema:
@@ -1079,6 +1091,12 @@ paths:
summary: Openai Chat Completions
operationId: openai_chat_completions_v1_openai_chat_completions_post
parameters:
+ - name: use_cache
+ in: query
+ schema:
+ title: Use Cache
+ type: boolean
+ default: true
- name: x-deepinfra-source
in: header
schema:
@@ -1114,6 +1132,12 @@ paths:
summary: Openai Embeddings
operationId: openai_embeddings_v1_openai_embeddings_post
parameters:
+ - name: use_cache
+ in: query
+ schema:
+ title: Use Cache
+ type: boolean
+ default: true
- name: x-deepinfra-source
in: header
schema:
@@ -1193,6 +1217,12 @@ paths:
summary: Openai Audio Speech
operationId: openai_audio_speech_v1_openai_audio_speech_post
parameters:
+ - name: use_cache
+ in: query
+ schema:
+ title: Use Cache
+ type: boolean
+ default: true
- name: x-deepinfra-source
in: header
schema: