Skip to content

Commit

Permalink
Merge pull request #97 from tryAGI/bot/update-openapi_202412130919
Browse files Browse the repository at this point in the history
feat:Add new endpoints and schemas to OpenAPI specification for DeepApi
  • Loading branch information
github-actions[bot] authored Dec 13, 2024
2 parents aed46cb + da164e1 commit 971522c
Show file tree
Hide file tree
Showing 35 changed files with 2,867 additions and 26 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,240 @@

#nullable enable

namespace DeepInfra
{
public partial class DeepInfraApi
{
partial void PrepareCreateOpenaiBatchArguments(
global::System.Net.Http.HttpClient httpClient,
ref string? xiApiKey,
global::DeepInfra.OpenAIBatchesIn request);
partial void PrepareCreateOpenaiBatchRequest(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpRequestMessage httpRequestMessage,
string? xiApiKey,
global::DeepInfra.OpenAIBatchesIn request);
partial void ProcessCreateOpenaiBatchResponse(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpResponseMessage httpResponseMessage);

partial void ProcessCreateOpenaiBatchResponseContent(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpResponseMessage httpResponseMessage,
ref string content);

/// <summary>
/// Create Openai Batch
/// </summary>
/// <param name="xiApiKey"></param>
/// <param name="request"></param>
/// <param name="cancellationToken">The token to cancel the operation with</param>
/// <exception cref="global::DeepInfra.ApiException"></exception>
public async global::System.Threading.Tasks.Task<global::DeepInfra.OpenAIBatch> CreateOpenaiBatchAsync(
global::DeepInfra.OpenAIBatchesIn request,
string? xiApiKey = default,
global::System.Threading.CancellationToken cancellationToken = default)
{
request = request ?? throw new global::System.ArgumentNullException(nameof(request));

PrepareArguments(
client: HttpClient);
PrepareCreateOpenaiBatchArguments(
httpClient: HttpClient,
xiApiKey: ref xiApiKey,
request: request);

var __pathBuilder = new PathBuilder(
path: "/v1/openai/batches",
baseUri: HttpClient.BaseAddress);
var __path = __pathBuilder.ToString();
using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
method: global::System.Net.Http.HttpMethod.Post,
requestUri: new global::System.Uri(__path, global::System.UriKind.RelativeOrAbsolute));
#if NET6_0_OR_GREATER
__httpRequest.Version = global::System.Net.HttpVersion.Version11;
__httpRequest.VersionPolicy = global::System.Net.Http.HttpVersionPolicy.RequestVersionOrHigher;
#endif

foreach (var __authorization in Authorizations)
{
if (__authorization.Type == "Http" ||
__authorization.Type == "OAuth2")
{
__httpRequest.Headers.Authorization = new global::System.Net.Http.Headers.AuthenticationHeaderValue(
scheme: __authorization.Name,
parameter: __authorization.Value);
}
else if (__authorization.Type == "ApiKey" &&
__authorization.Location == "Header")
{
__httpRequest.Headers.Add(__authorization.Name, __authorization.Value);
}
}

if (xiApiKey != default)
{
__httpRequest.Headers.TryAddWithoutValidation("xi-api-key", xiApiKey.ToString());
}

var __httpRequestContentBody = request.ToJson(JsonSerializerContext);
var __httpRequestContent = new global::System.Net.Http.StringContent(
content: __httpRequestContentBody,
encoding: global::System.Text.Encoding.UTF8,
mediaType: "application/json");
__httpRequest.Content = __httpRequestContent;

PrepareRequest(
client: HttpClient,
request: __httpRequest);
PrepareCreateOpenaiBatchRequest(
httpClient: HttpClient,
httpRequestMessage: __httpRequest,
xiApiKey: xiApiKey,
request: request);

using var __response = await HttpClient.SendAsync(
request: __httpRequest,
completionOption: global::System.Net.Http.HttpCompletionOption.ResponseContentRead,
cancellationToken: cancellationToken).ConfigureAwait(false);

ProcessResponse(
client: HttpClient,
response: __response);
ProcessCreateOpenaiBatchResponse(
httpClient: HttpClient,
httpResponseMessage: __response);
// Validation Error
if ((int)__response.StatusCode == 422)
{
string? __content_422 = null;
global::DeepInfra.HTTPValidationError? __value_422 = null;
if (ReadResponseAsString)
{
__content_422 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
__value_422 = global::DeepInfra.HTTPValidationError.FromJson(__content_422, JsonSerializerContext);
}
else
{
var __contentStream_422 = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
__value_422 = await global::DeepInfra.HTTPValidationError.FromJsonStreamAsync(__contentStream_422, JsonSerializerContext).ConfigureAwait(false);
}

throw new global::DeepInfra.ApiException<global::DeepInfra.HTTPValidationError>(
message: __response.ReasonPhrase ?? string.Empty,
statusCode: __response.StatusCode)
{
ResponseBody = __content_422,
ResponseObject = __value_422,
ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
__response.Headers,
h => h.Key,
h => h.Value),
};
}

if (ReadResponseAsString)
{
var __content = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);

ProcessResponseContent(
client: HttpClient,
response: __response,
content: ref __content);
ProcessCreateOpenaiBatchResponseContent(
httpClient: HttpClient,
httpResponseMessage: __response,
content: ref __content);

try
{
__response.EnsureSuccessStatusCode();
}
catch (global::System.Net.Http.HttpRequestException __ex)
{
throw new global::DeepInfra.ApiException(
message: __content ?? __response.ReasonPhrase ?? string.Empty,
innerException: __ex,
statusCode: __response.StatusCode)
{
ResponseBody = __content,
ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
__response.Headers,
h => h.Key,
h => h.Value),
};
}

return
global::DeepInfra.OpenAIBatch.FromJson(__content, JsonSerializerContext) ??
throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" ");
}
else
{
try
{
__response.EnsureSuccessStatusCode();
}
catch (global::System.Net.Http.HttpRequestException __ex)
{
throw new global::DeepInfra.ApiException(
message: __response.ReasonPhrase ?? string.Empty,
innerException: __ex,
statusCode: __response.StatusCode)
{
ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
__response.Headers,
h => h.Key,
h => h.Value),
};
}

using var __content = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);

return
await global::DeepInfra.OpenAIBatch.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ??
throw new global::System.InvalidOperationException("Response deserialization failed.");
}
}

/// <summary>
/// Create Openai Batch
/// </summary>
/// <param name="xiApiKey"></param>
/// <param name="inputFileId">
/// The ID of an uploaded file that contains requests for the new batch.
/// </param>
/// <param name="endpoint">
/// The endpoint to be used for all requests in the batch. Currently /v1/chat/completions, /v1/completions are supported.
/// </param>
/// <param name="completionWindow">
/// The time frame within which the batch should be processed. Currently only 24h is supported.
/// </param>
/// <param name="metadata">
/// Optional metadata to be stored with the batch.
/// </param>
/// <param name="cancellationToken">The token to cancel the operation with</param>
/// <exception cref="global::System.InvalidOperationException"></exception>
public async global::System.Threading.Tasks.Task<global::DeepInfra.OpenAIBatch> CreateOpenaiBatchAsync(
string inputFileId,
global::DeepInfra.OpenAIBatchesInEndpoint endpoint,
string? xiApiKey = default,
global::DeepInfra.OpenAIBatchesInCompletionWindow completionWindow = default,
object? metadata = default,
global::System.Threading.CancellationToken cancellationToken = default)
{
var __request = new global::DeepInfra.OpenAIBatchesIn
{
InputFileId = inputFileId,
Endpoint = endpoint,
CompletionWindow = completionWindow,
Metadata = metadata,
};

return await CreateOpenaiBatchAsync(
xiApiKey: xiApiKey,
request: __request,
cancellationToken: cancellationToken).ConfigureAwait(false);
}
}
}
Loading

0 comments on commit 971522c

Please sign in to comment.