Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat:Add user authentication feature with JWT support #39

Merged
merged 1 commit into from
Aug 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,14 @@ public partial class DeepInfraApi
partial void PrepareInferenceDeployV1InferenceDeployDeployIdPostArguments(
global::System.Net.Http.HttpClient httpClient,
ref string deployId,
ref bool useCache,
ref string? xDeepinfraSource,
ref string? userAgent);
partial void PrepareInferenceDeployV1InferenceDeployDeployIdPostRequest(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpRequestMessage httpRequestMessage,
string deployId,
bool useCache,
string? xDeepinfraSource,
string? userAgent);
partial void ProcessInferenceDeployV1InferenceDeployDeployIdPostResponse(
Expand All @@ -29,12 +31,16 @@ partial void ProcessInferenceDeployV1InferenceDeployDeployIdPostResponseContent(
/// Inference Deploy
/// </summary>
/// <param name="deployId"></param>
/// <param name="useCache">
/// Default Value: true
/// </param>
/// <param name="xDeepinfraSource"></param>
/// <param name="userAgent"></param>
/// <param name="cancellationToken">The token to cancel the operation with</param>
/// <exception cref="global::System.InvalidOperationException"></exception>
public async global::System.Threading.Tasks.Task<global::DeepInfra.InferenceDeployV1InferenceDeployDeployIdPostResponse> InferenceDeployV1InferenceDeployDeployIdPostAsync(
string deployId,
bool useCache,
string? xDeepinfraSource,
string? userAgent,
global::System.Threading.CancellationToken cancellationToken = default)
Expand All @@ -44,12 +50,13 @@ partial void ProcessInferenceDeployV1InferenceDeployDeployIdPostResponseContent(
PrepareInferenceDeployV1InferenceDeployDeployIdPostArguments(
httpClient: _httpClient,
deployId: ref deployId,
useCache: ref useCache,
xDeepinfraSource: ref xDeepinfraSource,
userAgent: ref userAgent);

using var httpRequest = new global::System.Net.Http.HttpRequestMessage(
method: global::System.Net.Http.HttpMethod.Post,
requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + $"/v1/inference/deploy/{deployId}", global::System.UriKind.RelativeOrAbsolute));
requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + $"/v1/inference/deploy/{deployId}?use_cache={useCache}", global::System.UriKind.RelativeOrAbsolute));

PrepareRequest(
client: _httpClient,
Expand All @@ -58,6 +65,7 @@ partial void ProcessInferenceDeployV1InferenceDeployDeployIdPostResponseContent(
httpClient: _httpClient,
httpRequestMessage: httpRequest,
deployId: deployId,
useCache: useCache,
xDeepinfraSource: xDeepinfraSource,
userAgent: userAgent);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,13 @@ public partial class DeepInfraApi
{
partial void PrepareOpenaiAudioSpeechV1OpenaiAudioSpeechPostArguments(
global::System.Net.Http.HttpClient httpClient,
ref bool useCache,
ref string? xDeepinfraSource,
global::DeepInfra.OpenAITextToSpeechIn request);
partial void PrepareOpenaiAudioSpeechV1OpenaiAudioSpeechPostRequest(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpRequestMessage httpRequestMessage,
bool useCache,
string? xDeepinfraSource,
global::DeepInfra.OpenAITextToSpeechIn request);
partial void ProcessOpenaiAudioSpeechV1OpenaiAudioSpeechPostResponse(
Expand All @@ -26,11 +28,15 @@ partial void ProcessOpenaiAudioSpeechV1OpenaiAudioSpeechPostResponseContent(
/// <summary>
/// Openai Audio Speech
/// </summary>
/// <param name="useCache">
/// Default Value: true
/// </param>
/// <param name="xDeepinfraSource"></param>
/// <param name="request"></param>
/// <param name="cancellationToken">The token to cancel the operation with</param>
/// <exception cref="global::System.InvalidOperationException"></exception>
public async global::System.Threading.Tasks.Task<global::DeepInfra.OpenaiAudioSpeechV1OpenaiAudioSpeechPostResponse> OpenaiAudioSpeechV1OpenaiAudioSpeechPostAsync(
bool useCache,
string? xDeepinfraSource,
global::DeepInfra.OpenAITextToSpeechIn request,
global::System.Threading.CancellationToken cancellationToken = default)
Expand All @@ -41,12 +47,13 @@ partial void ProcessOpenaiAudioSpeechV1OpenaiAudioSpeechPostResponseContent(
client: _httpClient);
PrepareOpenaiAudioSpeechV1OpenaiAudioSpeechPostArguments(
httpClient: _httpClient,
useCache: ref useCache,
xDeepinfraSource: ref xDeepinfraSource,
request: request);

using var httpRequest = new global::System.Net.Http.HttpRequestMessage(
method: global::System.Net.Http.HttpMethod.Post,
requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + "/v1/openai/audio/speech", global::System.UriKind.RelativeOrAbsolute));
requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + $"/v1/openai/audio/speech?use_cache={useCache}", global::System.UriKind.RelativeOrAbsolute));
var __httpRequestContentBody = global::System.Text.Json.JsonSerializer.Serialize(request, global::DeepInfra.SourceGenerationContext.Default.OpenAITextToSpeechIn);
var __httpRequestContent = new global::System.Net.Http.StringContent(
content: __httpRequestContentBody,
Expand All @@ -60,6 +67,7 @@ partial void ProcessOpenaiAudioSpeechV1OpenaiAudioSpeechPostResponseContent(
PrepareOpenaiAudioSpeechV1OpenaiAudioSpeechPostRequest(
httpClient: _httpClient,
httpRequestMessage: httpRequest,
useCache: useCache,
xDeepinfraSource: xDeepinfraSource,
request: request);

Expand Down Expand Up @@ -103,6 +111,9 @@ partial void ProcessOpenaiAudioSpeechV1OpenaiAudioSpeechPostResponseContent(
/// <summary>
/// Openai Audio Speech
/// </summary>
/// <param name="useCache">
/// Default Value: true
/// </param>
/// <param name="xDeepinfraSource"></param>
/// <param name="model">
/// model name<br/>
Expand Down Expand Up @@ -130,6 +141,7 @@ partial void ProcessOpenaiAudioSpeechV1OpenaiAudioSpeechPostResponseContent(
public async global::System.Threading.Tasks.Task<global::DeepInfra.OpenaiAudioSpeechV1OpenaiAudioSpeechPostResponse> OpenaiAudioSpeechV1OpenaiAudioSpeechPostAsync(
string model,
string input,
bool useCache = true,
string? xDeepinfraSource = default,
global::System.AllOf<global::DeepInfra.TtsVoice?>? voice = default,
global::System.AllOf<global::DeepInfra.TtsResponseFormat?>? responseFormat = default,
Expand All @@ -146,6 +158,7 @@ partial void ProcessOpenaiAudioSpeechV1OpenaiAudioSpeechPostResponseContent(
};

return await OpenaiAudioSpeechV1OpenaiAudioSpeechPostAsync(
useCache: useCache,
xDeepinfraSource: xDeepinfraSource,
request: request,
cancellationToken: cancellationToken).ConfigureAwait(false);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,14 @@ public partial class DeepInfraApi
{
partial void PrepareOpenaiChatCompletionsV1OpenaiChatCompletionsPostArguments(
global::System.Net.Http.HttpClient httpClient,
ref bool useCache,
ref string? xDeepinfraSource,
ref string? userAgent,
global::DeepInfra.OpenAIChatCompletionsIn request);
partial void PrepareOpenaiChatCompletionsV1OpenaiChatCompletionsPostRequest(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpRequestMessage httpRequestMessage,
bool useCache,
string? xDeepinfraSource,
string? userAgent,
global::DeepInfra.OpenAIChatCompletionsIn request);
Expand All @@ -28,12 +30,16 @@ partial void ProcessOpenaiChatCompletionsV1OpenaiChatCompletionsPostResponseCont
/// <summary>
/// Openai Chat Completions
/// </summary>
/// <param name="useCache">
/// Default Value: true
/// </param>
/// <param name="xDeepinfraSource"></param>
/// <param name="userAgent"></param>
/// <param name="request"></param>
/// <param name="cancellationToken">The token to cancel the operation with</param>
/// <exception cref="global::System.InvalidOperationException"></exception>
public async global::System.Threading.Tasks.Task<global::DeepInfra.OpenaiChatCompletionsV1OpenaiChatCompletionsPostResponse> OpenaiChatCompletionsV1OpenaiChatCompletionsPostAsync(
bool useCache,
string? xDeepinfraSource,
string? userAgent,
global::DeepInfra.OpenAIChatCompletionsIn request,
Expand All @@ -45,13 +51,14 @@ partial void ProcessOpenaiChatCompletionsV1OpenaiChatCompletionsPostResponseCont
client: _httpClient);
PrepareOpenaiChatCompletionsV1OpenaiChatCompletionsPostArguments(
httpClient: _httpClient,
useCache: ref useCache,
xDeepinfraSource: ref xDeepinfraSource,
userAgent: ref userAgent,
request: request);

using var httpRequest = new global::System.Net.Http.HttpRequestMessage(
method: global::System.Net.Http.HttpMethod.Post,
requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + "/v1/openai/chat/completions", global::System.UriKind.RelativeOrAbsolute));
requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + $"/v1/openai/chat/completions?use_cache={useCache}", global::System.UriKind.RelativeOrAbsolute));
var __httpRequestContentBody = global::System.Text.Json.JsonSerializer.Serialize(request, global::DeepInfra.SourceGenerationContext.Default.OpenAIChatCompletionsIn);
var __httpRequestContent = new global::System.Net.Http.StringContent(
content: __httpRequestContentBody,
Expand All @@ -65,6 +72,7 @@ partial void ProcessOpenaiChatCompletionsV1OpenaiChatCompletionsPostResponseCont
PrepareOpenaiChatCompletionsV1OpenaiChatCompletionsPostRequest(
httpClient: _httpClient,
httpRequestMessage: httpRequest,
useCache: useCache,
xDeepinfraSource: xDeepinfraSource,
userAgent: userAgent,
request: request);
Expand Down Expand Up @@ -109,6 +117,9 @@ partial void ProcessOpenaiChatCompletionsV1OpenaiChatCompletionsPostResponseCont
/// <summary>
/// Openai Chat Completions
/// </summary>
/// <param name="useCache">
/// Default Value: true
/// </param>
/// <param name="xDeepinfraSource"></param>
/// <param name="userAgent"></param>
/// <param name="model">
Expand Down Expand Up @@ -171,6 +182,7 @@ partial void ProcessOpenaiChatCompletionsV1OpenaiChatCompletionsPostResponseCont
public async global::System.Threading.Tasks.Task<global::DeepInfra.OpenaiChatCompletionsV1OpenaiChatCompletionsPostResponse> OpenaiChatCompletionsV1OpenaiChatCompletionsPostAsync(
string model,
global::System.Collections.Generic.IList<global::System.AnyOf<global::DeepInfra.ChatCompletionToolMessage, global::DeepInfra.ChatCompletionAssistantMessage, global::DeepInfra.ChatCompletionUserMessage, global::DeepInfra.ChatCompletionSystemMessage>> messages,
bool useCache = true,
string? xDeepinfraSource = default,
string? userAgent = default,
bool stream = false,
Expand Down Expand Up @@ -208,6 +220,7 @@ partial void ProcessOpenaiChatCompletionsV1OpenaiChatCompletionsPostResponseCont
};

return await OpenaiChatCompletionsV1OpenaiChatCompletionsPostAsync(
useCache: useCache,
xDeepinfraSource: xDeepinfraSource,
userAgent: userAgent,
request: request,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,14 @@ public partial class DeepInfraApi
{
partial void PrepareOpenaiCompletionsV1OpenaiCompletionsPostArguments(
global::System.Net.Http.HttpClient httpClient,
ref bool useCache,
ref string? xDeepinfraSource,
ref string? userAgent,
global::DeepInfra.OpenAICompletionsIn request);
partial void PrepareOpenaiCompletionsV1OpenaiCompletionsPostRequest(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpRequestMessage httpRequestMessage,
bool useCache,
string? xDeepinfraSource,
string? userAgent,
global::DeepInfra.OpenAICompletionsIn request);
Expand All @@ -28,12 +30,16 @@ partial void ProcessOpenaiCompletionsV1OpenaiCompletionsPostResponseContent(
/// <summary>
/// Openai Completions
/// </summary>
/// <param name="useCache">
/// Default Value: true
/// </param>
/// <param name="xDeepinfraSource"></param>
/// <param name="userAgent"></param>
/// <param name="request"></param>
/// <param name="cancellationToken">The token to cancel the operation with</param>
/// <exception cref="global::System.InvalidOperationException"></exception>
public async global::System.Threading.Tasks.Task<global::DeepInfra.OpenaiCompletionsV1OpenaiCompletionsPostResponse> OpenaiCompletionsV1OpenaiCompletionsPostAsync(
bool useCache,
string? xDeepinfraSource,
string? userAgent,
global::DeepInfra.OpenAICompletionsIn request,
Expand All @@ -45,13 +51,14 @@ partial void ProcessOpenaiCompletionsV1OpenaiCompletionsPostResponseContent(
client: _httpClient);
PrepareOpenaiCompletionsV1OpenaiCompletionsPostArguments(
httpClient: _httpClient,
useCache: ref useCache,
xDeepinfraSource: ref xDeepinfraSource,
userAgent: ref userAgent,
request: request);

using var httpRequest = new global::System.Net.Http.HttpRequestMessage(
method: global::System.Net.Http.HttpMethod.Post,
requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + "/v1/openai/completions", global::System.UriKind.RelativeOrAbsolute));
requestUri: new global::System.Uri(_httpClient.BaseAddress?.AbsoluteUri.TrimEnd('/') + $"/v1/openai/completions?use_cache={useCache}", global::System.UriKind.RelativeOrAbsolute));
var __httpRequestContentBody = global::System.Text.Json.JsonSerializer.Serialize(request, global::DeepInfra.SourceGenerationContext.Default.OpenAICompletionsIn);
var __httpRequestContent = new global::System.Net.Http.StringContent(
content: __httpRequestContentBody,
Expand All @@ -65,6 +72,7 @@ partial void ProcessOpenaiCompletionsV1OpenaiCompletionsPostResponseContent(
PrepareOpenaiCompletionsV1OpenaiCompletionsPostRequest(
httpClient: _httpClient,
httpRequestMessage: httpRequest,
useCache: useCache,
xDeepinfraSource: xDeepinfraSource,
userAgent: userAgent,
request: request);
Expand Down Expand Up @@ -109,6 +117,9 @@ partial void ProcessOpenaiCompletionsV1OpenaiCompletionsPostResponseContent(
/// <summary>
/// Openai Completions
/// </summary>
/// <param name="useCache">
/// Default Value: true
/// </param>
/// <param name="xDeepinfraSource"></param>
/// <param name="userAgent"></param>
/// <param name="model">
Expand Down Expand Up @@ -171,6 +182,7 @@ partial void ProcessOpenaiCompletionsV1OpenaiCompletionsPostResponseContent(
public async global::System.Threading.Tasks.Task<global::DeepInfra.OpenaiCompletionsV1OpenaiCompletionsPostResponse> OpenaiCompletionsV1OpenaiCompletionsPostAsync(
string model,
string prompt,
bool useCache = true,
string? xDeepinfraSource = default,
string? userAgent = default,
int maxTokens = 512,
Expand Down Expand Up @@ -208,6 +220,7 @@ partial void ProcessOpenaiCompletionsV1OpenaiCompletionsPostResponseContent(
};

return await OpenaiCompletionsV1OpenaiCompletionsPostAsync(
useCache: useCache,
xDeepinfraSource: xDeepinfraSource,
userAgent: userAgent,
request: request,
Expand Down
Loading