diff --git a/README.md b/README.md
index ad3bf0b..03e2104 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,7 @@
[![Discord](https://img.shields.io/discord/1115206893015662663?label=Discord&logo=discord&logoColor=white&color=d82679)](https://discord.gg/Ca2xhfBf3v)
## Features 🔥
-- Fully generated C# SDK based on [OpenAPI specification](https://raw.githubusercontent.com/davidmigloz/langchain_dart/main/packages/anthropic_sdk_dart/oas/anthropic_openapi_curated.yaml) using [OpenApiGenerator](https://github.com/HavenDV/OpenApiGenerator)
+- Fully generated C# SDK based on [official OpenAPI specification](https://raw.githubusercontent.com/anthropics/anthropic-sdk-typescript/refs/heads/main/.stats.yml) using [AutoSDK](https://github.com/HavenDV/OpenApiGenerator)
- Automatic releases of new preview versions if there was an update to the OpenAPI specification
- Source generator to define tools natively through C# interfaces
- All modern .NET features - nullability, trimming, NativeAOT, etc.
diff --git a/src/helpers/FixOpenApiSpec/FixOpenApiSpec.csproj b/src/helpers/FixOpenApiSpec/FixOpenApiSpec.csproj
index fbfc94e..3facc26 100644
--- a/src/helpers/FixOpenApiSpec/FixOpenApiSpec.csproj
+++ b/src/helpers/FixOpenApiSpec/FixOpenApiSpec.csproj
@@ -9,6 +9,7 @@
+
diff --git a/src/helpers/FixOpenApiSpec/Program.cs b/src/helpers/FixOpenApiSpec/Program.cs
index bfedcf7..d5fec01 100644
--- a/src/helpers/FixOpenApiSpec/Program.cs
+++ b/src/helpers/FixOpenApiSpec/Program.cs
@@ -1,34 +1,72 @@
+using AutoSDK.Helpers;
using Microsoft.OpenApi;
using Microsoft.OpenApi.Any;
using Microsoft.OpenApi.Extensions;
+using Microsoft.OpenApi.Models;
using Microsoft.OpenApi.Readers;
var path = args[0];
var jsonOrYaml = await File.ReadAllTextAsync(path);
+if (OpenApi31Support.IsOpenApi31(jsonOrYaml))
+{
+ jsonOrYaml = OpenApi31Support.ConvertToOpenApi30(jsonOrYaml);
+}
+
var openApiDocument = new OpenApiStringReader().Read(jsonOrYaml, out var diagnostics);
-openApiDocument.Components.Schemas["TextBlock"].Properties["type"].Enum = new List
+openApiDocument.Components.Schemas.Add("Ping", new OpenApiSchema
{
- new OpenApiString("text"),
-};
-openApiDocument.Components.Schemas["ImageBlock"].Properties["type"].Enum = new List
-{
- new OpenApiString("image"),
-};
-openApiDocument.Components.Schemas["ToolUseBlock"]!.Properties["type"].Enum = new List
+ Type = "object",
+ Properties = new Dictionary
+ {
+ ["type"] = new()
+ {
+ Enum = new List
+ {
+ new OpenApiString("ping"),
+ },
+ Type = "string",
+ Default = new OpenApiString("ping"),
+ },
+ },
+ Required = new HashSet
+ {
+ "type",
+ },
+});
+openApiDocument.Components.Schemas["MessageStreamEvent"].OneOf.Add(new OpenApiSchema
{
- new OpenApiString("tool_use"),
-};
-openApiDocument.Components.Schemas["ToolResultBlock"]!.Properties["type"].Enum = new List
+ Reference = new OpenApiReference
+ {
+ Type = ReferenceType.Schema,
+ Id = "Ping",
+ },
+});
+
+openApiDocument.Components.SecuritySchemes.Clear();
+openApiDocument.Components.SecuritySchemes.Add("ApiKeyAuth", new OpenApiSecurityScheme
{
- new OpenApiString("tool_result"),
-};
+ Type = SecuritySchemeType.ApiKey,
+ In = ParameterLocation.Header,
+ Name = "x-api-key",
+});
-openApiDocument.Components.Schemas["TextBlock"].Required.Add("type");
-openApiDocument.Components.Schemas["ImageBlock"].Required.Add("type");
-openApiDocument.Components.Schemas["ToolUseBlock"].Required.Add("type");
-openApiDocument.Components.Schemas["ToolResultBlock"].Required.Add("type");
+openApiDocument.SecurityRequirements.Clear();
+openApiDocument.SecurityRequirements.Add(new OpenApiSecurityRequirement
+{
+ {
+ new OpenApiSecurityScheme
+ {
+ Reference = new OpenApiReference
+ {
+ Type = ReferenceType.SecurityScheme,
+ Id = "ApiKeyAuth",
+ },
+ },
+ new List()
+ }
+});
jsonOrYaml = openApiDocument.SerializeAsYaml(OpenApiSpecVersion.OpenApi3_0);
_ = new OpenApiStringReader().Read(jsonOrYaml, out diagnostics);
diff --git a/src/libs/Anthropic/AnthropicClient.Streaming.cs b/src/libs/Anthropic/AnthropicClient.Streaming.cs
index 2488128..71ee739 100755
--- a/src/libs/Anthropic/AnthropicClient.Streaming.cs
+++ b/src/libs/Anthropic/AnthropicClient.Streaming.cs
@@ -1,5 +1,4 @@
-using System.Net.Http;
-using System.Net.Http.Headers;
+using System.Net.Http.Headers;
using System.Runtime.CompilerServices;
// ReSharper disable RedundantNameQualifier
@@ -11,35 +10,39 @@ public partial class AnthropicClient
{
///
/// Create a Message
- /// Send a structured list of input messages with text and/or image content, and the
- /// model will generate the next message in the conversation.
- /// The Messages API can be used for either single queries or stateless multi-turn
- /// conversations.
+ /// Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
+ /// The Messages API can be used for either single queries or stateless multi-turn conversations.
///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
///
/// The token to cancel the operation with
- ///
+ ///
public async IAsyncEnumerable CreateMessageAsStreamAsync(
- global::Anthropic.CreateMessageRequest request,
+ global::Anthropic.CreateMessageParams request,
+ string? anthropicVersion = default,
[EnumeratorCancellation] global::System.Threading.CancellationToken cancellationToken = default)
{
request = request ?? throw new global::System.ArgumentNullException(nameof(request));
request.Stream = true;
-
+
PrepareArguments(
client: HttpClient);
- PrepareCreateMessageArguments(
- httpClient: HttpClient,
- request: request);
var __pathBuilder = new PathBuilder(
- path: "/messages",
+ path: "/v1/messages",
baseUri: HttpClient.BaseAddress);
var __path = __pathBuilder.ToString();
using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
method: global::System.Net.Http.HttpMethod.Post,
requestUri: new global::System.Uri(__path, global::System.UriKind.RelativeOrAbsolute));
__httpRequest.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("text/event-stream"));
+#if NET6_0_OR_GREATER
+ __httpRequest.Version = global::System.Net.HttpVersion.Version11;
+ __httpRequest.VersionPolicy = global::System.Net.Http.HttpVersionPolicy.RequestVersionOrHigher;
+#endif
foreach (var __authorization in Authorizations)
{
@@ -56,6 +59,12 @@ public partial class AnthropicClient
__httpRequest.Headers.Add(__authorization.Name, __authorization.Value);
}
}
+
+ if (anthropicVersion != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-version", anthropicVersion.ToString());
+ }
+
var __httpRequestContentBody = request.ToJson(JsonSerializerContext);
var __httpRequestContent = new global::System.Net.Http.StringContent(
content: __httpRequestContentBody,
@@ -66,10 +75,6 @@ public partial class AnthropicClient
PrepareRequest(
client: HttpClient,
request: __httpRequest);
- PrepareCreateMessageRequest(
- httpClient: HttpClient,
- httpRequestMessage: __httpRequest,
- request: request);
using var __response = await HttpClient.SendAsync(
request: __httpRequest,
@@ -79,10 +84,52 @@ public partial class AnthropicClient
ProcessResponse(
client: HttpClient,
response: __response);
- ProcessCreateMessageResponse(
- httpClient: HttpClient,
- httpResponseMessage: __response);
-
+ // Error response. See our [errors documentation](https://docs.anthropic.com/en/api/errors) for more details.
+ if ((int)__response.StatusCode >= 400 && (int)__response.StatusCode <= 499)
+ {
+ string? __content_4XX = null;
+ global::Anthropic.ErrorResponse? __value_4XX = null;
+ if (ReadResponseAsString)
+ {
+ __content_4XX = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = global::Anthropic.ErrorResponse.FromJson(__content_4XX, JsonSerializerContext);
+ }
+ else
+ {
+ var __contentStream_4XX = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = await global::Anthropic.ErrorResponse.FromJsonStreamAsync(__contentStream_4XX, JsonSerializerContext).ConfigureAwait(false);
+ }
+
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content_4XX,
+ ResponseObject = __value_4XX,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
#if NET6_0_OR_GREATER
using var __content = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
diff --git a/src/libs/Anthropic/Extensions/AnthropicClient.ChatClient.cs b/src/libs/Anthropic/Extensions/AnthropicClient.ChatClient.cs
index c7c748a..4a3ad60 100644
--- a/src/libs/Anthropic/Extensions/AnthropicClient.ChatClient.cs
+++ b/src/libs/Anthropic/Extensions/AnthropicClient.ChatClient.cs
@@ -1,11 +1,5 @@
using Microsoft.Extensions.AI;
-using System;
-using System.Collections.Generic;
-using System.Net;
-using System.Reflection;
using System.Runtime.CompilerServices;
-using System.Text;
-using System.Text.Json.Serialization;
// ReSharper disable ConvertTypeCheckPatternToNullCheck
// ReSharper disable once CheckNamespace
@@ -14,14 +8,14 @@ namespace Anthropic;
public partial class AnthropicClient : IChatClient
{
- private static readonly JsonElement s_defaultParameterSchema = JsonDocument.Parse("{}").RootElement;
+ private static readonly JsonElement DefaultParameterSchema = JsonDocument.Parse("{}").RootElement;
private ChatClientMetadata? _metadata;
///
ChatClientMetadata IChatClient.Metadata => _metadata ??= new(nameof(AnthropicClient), this.BaseUri);
///
- object? IChatClient.GetService(Type serviceType, object? key)
+ object? IChatClient.GetService(Type? serviceType, object? key)
{
return key is null && serviceType?.IsInstanceOfType(this) is true ? this : null;
}
@@ -29,9 +23,9 @@ public partial class AnthropicClient : IChatClient
async Task IChatClient.CompleteAsync(
IList chatMessages, ChatOptions? options, CancellationToken cancellationToken)
{
- CreateMessageRequest request = CreateRequest(chatMessages, options);
+ CreateMessageParams request = CreateRequest(chatMessages, options);
- var response = await this.CreateMessageAsync(request, cancellationToken).ConfigureAwait(false);
+ var response = await this.Messages.MessagesPostAsync(request, anthropicVersion: "2023-06-01", cancellationToken).ConfigureAwait(false);
ChatMessage responseMessage = new()
{
@@ -44,33 +38,33 @@ async Task IChatClient.CompleteAsync(
(responseMessage.AdditionalProperties ??= [])[nameof(response.StopSequence)] = response.StopSequence;
}
- if (response.Content.Value1 is string stringContents)
+ // if (response.Content.Value1 is string stringContents)
+ // {
+ // responseMessage.Contents.Add(new TextContent(stringContents));
+ // }
+ //else if (response.Content.Value2 is IList blocks)
{
- responseMessage.Contents.Add(new TextContent(stringContents));
- }
- else if (response.Content.Value2 is IList blocks)
- {
- foreach (var block in blocks)
+ foreach (var block in response.Content)
{
if (block.IsText)
{
responseMessage.Contents.Add(new TextContent(block.Text!.Text) { RawRepresentation = block.Text });
}
- else if (block.IsImage)
- {
- responseMessage.Contents.Add(new ImageContent(
- block.Image!.Source.Data,
- block.Image!.Source.MediaType switch
- {
- ImageBlockSourceMediaType.ImagePng => "image/png",
- ImageBlockSourceMediaType.ImageGif => "image/gif",
- ImageBlockSourceMediaType.ImageWebp => "image/webp",
- _ => "image/jpeg",
- })
- {
- RawRepresentation = block.Image
- });
- }
+ // else if (block.IsImage)
+ // {
+ // responseMessage.Contents.Add(new ImageContent(
+ // block.Image!.Source.Data,
+ // block.Image!.Source.MediaType switch
+ // {
+ // ImageBlockSourceMediaType.ImagePng => "image/png",
+ // ImageBlockSourceMediaType.ImageGif => "image/gif",
+ // ImageBlockSourceMediaType.ImageWebp => "image/webp",
+ // _ => "image/jpeg",
+ // })
+ // {
+ // RawRepresentation = block.Image
+ // });
+ // }
else if (block.IsToolUse)
{
responseMessage.Contents.Add(new FunctionCallContent(
@@ -90,10 +84,10 @@ async Task IChatClient.CompleteAsync(
FinishReason = response.StopReason switch
{
null => null,
- StopReason.EndTurn or StopReason.StopSequence => ChatFinishReason.Stop,
- StopReason.MaxTokens => ChatFinishReason.Length,
- StopReason.ToolUse => ChatFinishReason.ToolCalls,
- _ => new ChatFinishReason(response.StopReason.ToString()!),
+ MessageStopReason.EndTurn or MessageStopReason.StopSequence => ChatFinishReason.Stop,
+ MessageStopReason.MaxTokens => ChatFinishReason.Length,
+ MessageStopReason.ToolUse => ChatFinishReason.ToolCalls,
+ _ => new ChatFinishReason(response.StopReason.ToString()),
},
};
@@ -106,15 +100,15 @@ async Task IChatClient.CompleteAsync(
TotalTokenCount = u.InputTokens + u.OutputTokens,
};
- if (u.CacheCreationInputTokens is not null)
- {
- (completion.Usage.AdditionalProperties ??= [])[nameof(u.CacheCreationInputTokens)] = u.CacheCreationInputTokens;
- }
-
- if (u.CacheReadInputTokens is not null)
- {
- (completion.Usage.AdditionalProperties ??= [])[nameof(u.CacheReadInputTokens)] = u.CacheReadInputTokens;
- }
+ // if (u.CacheCreationInputTokens is not null)
+ // {
+ // (completion.Usage.AdditionalProperties ??= [])[nameof(u.CacheCreationInputTokens)] = u.CacheCreationInputTokens;
+ // }
+ //
+ // if (u.CacheReadInputTokens is not null)
+ // {
+ // (completion.Usage.AdditionalProperties ??= [])[nameof(u.CacheReadInputTokens)] = u.CacheReadInputTokens;
+ // }
}
return completion;
@@ -155,11 +149,11 @@ async IAsyncEnumerable IChatClient.CompleteStream
}
}
- private static CreateMessageRequest CreateRequest(IList chatMessages, ChatOptions? options)
+ private static CreateMessageParams CreateRequest(IList chatMessages, ChatOptions? options)
{
string? systemMessage = null;
- List messages = [];
+ List messages = [];
foreach (var chatMessage in chatMessages)
{
if (chatMessage.Role == ChatRole.System)
@@ -172,35 +166,35 @@ private static CreateMessageRequest CreateRequest(IList chatMessage
continue;
}
- List blocks = [];
+ List blocks = [];
foreach (var content in chatMessage.Contents)
{
switch (content)
{
case TextContent tc:
- blocks.Add(new Block(new TextBlock() { Text = tc.Text }));
+ blocks.Add(new ContentVariant2Item2(new RequestTextBlock { Text = tc.Text }));
break;
case ImageContent ic when ic.ContainsData:
- blocks.Add(new Block(new ImageBlock()
+ blocks.Add(new ContentVariant2Item2(new RequestImageBlock
{
- Source = new ImageBlockSource()
+ Source = new Base64ImageSource
{
MediaType = ic.MediaType switch
{
- "image/png" => ImageBlockSourceMediaType.ImagePng,
- "image/gif" => ImageBlockSourceMediaType.ImageGif,
- "image/webp" => ImageBlockSourceMediaType.ImageWebp,
- _ => ImageBlockSourceMediaType.ImageJpeg,
+ "image/png" => Base64ImageSourceMediaType.ImagePng,
+ "image/gif" => Base64ImageSourceMediaType.ImageGif,
+ "image/webp" => Base64ImageSourceMediaType.ImageWebp,
+ _ => Base64ImageSourceMediaType.ImageJpeg,
},
- Data = Convert.ToBase64String(ic.Data?.ToArray() ?? []),
- Type = ImageBlockSourceType.Base64,
+ Data = ic.Data?.ToArray() ?? [], //Convert.ToBase64String(ic.Data?.ToArray() ?? []),
+ Type = Base64ImageSourceType.Base64,
}
}));
break;
case FunctionCallContent fcc:
- blocks.Add(new Block(new ToolUseBlock()
+ blocks.Add(new ContentVariant2Item2(new RequestToolUseBlock
{
Id = fcc.CallId,
Name = fcc.Name,
@@ -209,7 +203,7 @@ private static CreateMessageRequest CreateRequest(IList chatMessage
break;
case FunctionResultContent frc:
- blocks.Add(new Block(new ToolResultBlock()
+ blocks.Add(new ContentVariant2Item2(new RequestToolResultBlock
{
ToolUseId = frc.CallId,
Content = frc.Result?.ToString() ?? string.Empty,
@@ -218,18 +212,18 @@ private static CreateMessageRequest CreateRequest(IList chatMessage
break;
}
- foreach (Block block in blocks)
+ foreach (ContentVariant2Item2 block in blocks)
{
- messages.Add(new Message()
+ messages.Add(new InputMessage
{
- Role = chatMessage.Role == ChatRole.Assistant ? MessageRole.Assistant : MessageRole.User,
+ Role = chatMessage.Role == ChatRole.Assistant ? InputMessageRole.Assistant : InputMessageRole.User,
Content = new([block])
});
}
}
}
- var request = new CreateMessageRequest()
+ var request = new CreateMessageParams
{
MaxTokens = options?.MaxOutputTokens ?? 250,
Messages = messages,
@@ -241,22 +235,23 @@ private static CreateMessageRequest CreateRequest(IList chatMessage
TopK = options?.TopK,
ToolChoice =
options?.Tools is not { Count: > 0 } ? null:
- options?.ToolMode is AutoChatToolMode ? new ToolChoice() { Type = ToolChoiceType.Auto } :
- options?.ToolMode is RequiredChatToolMode r ?
- new ToolChoice()
- {
- Type = r.RequiredFunctionName is not null ? ToolChoiceType.Tool : ToolChoiceType.Any,
- Name = r.RequiredFunctionName
- } :
- null,
- Tools = options?.Tools is IList tools ?
- tools.OfType().Select(f => new Tool(new ToolCustom()
- {
- Name = f.Metadata.Name,
- Description = f.Metadata.Description,
- InputSchema = CreateSchema(f),
- })).ToList() :
- null,
+ options?.ToolMode is AutoChatToolMode ? new ToolChoice(new ToolChoiceAuto()) :
+ options?.ToolMode is RequiredChatToolMode r
+ ? r.RequiredFunctionName is not null
+ ? new ToolChoice(new ToolChoiceTool
+ {
+ Name = r.RequiredFunctionName,
+ })
+ : new ToolChoice(new ToolChoiceAny())
+ : (ToolChoice?)null,
+ // Tools = options?.Tools is IList tools ?
+ // tools.OfType().Select(f => new Tool
+ // {
+ // Name = f.Metadata.Name,
+ // Description = f.Metadata.Description,
+ // InputSchema = CreateSchema(f),
+ // }).ToList() :
+ // null,
};
return request;
}
@@ -269,7 +264,7 @@ private static ToolParameterJsonSchema CreateSchema(AIFunction f)
foreach (AIFunctionParameterMetadata parameter in parameters)
{
- tool.Properties.Add(parameter.Name, parameter.Schema is JsonElement e ? e : s_defaultParameterSchema);
+ tool.Properties.Add(parameter.Name, parameter.Schema is JsonElement e ? e : DefaultParameterSchema);
if (parameter.IsRequired)
{
diff --git a/src/libs/Anthropic/Extensions/StringExtensions.cs b/src/libs/Anthropic/Extensions/StringExtensions.cs
index c24a7f5..44acbc4 100755
--- a/src/libs/Anthropic/Extensions/StringExtensions.cs
+++ b/src/libs/Anthropic/Extensions/StringExtensions.cs
@@ -10,11 +10,11 @@ public static class StringExtensions
///
///
///
- public static Message AsUserMessage(this string content)
+ public static InputMessage AsUserMessage(this string content)
{
- return new Message
+ return new InputMessage
{
- Role = MessageRole.User,
+ Role = InputMessageRole.User,
Content = content,
};
}
@@ -24,11 +24,11 @@ public static Message AsUserMessage(this string content)
///
///
///
- public static Message AsAssistantMessage(this string content)
+ public static InputMessage AsAssistantMessage(this string content)
{
- return new Message
+ return new InputMessage
{
- Role = MessageRole.Assistant,
+ Role = InputMessageRole.Assistant,
Content = content,
};
}
@@ -39,16 +39,16 @@ public static Message AsAssistantMessage(this string content)
///
///
///
- public static Message AsToolCall(this string content, ToolUseBlock toolUse)
+ public static InputMessage AsToolCall(this string content, ResponseToolUseBlock toolUse)
{
toolUse = toolUse ?? throw new ArgumentNullException(nameof(toolUse));
- return new Message
+ return new InputMessage
{
- Role = MessageRole.User,
- Content = new List
+ Role = InputMessageRole.User,
+ Content = new List
{
- new ToolResultBlock
+ new RequestToolResultBlock
{
ToolUseId = toolUse.Id,
Content = content,
@@ -62,15 +62,34 @@ public static Message AsToolCall(this string content, ToolUseBlock toolUse)
///
///
///
- public static Message AsRequestMessage(this Message message)
+ public static InputMessage AsInputMessage(this Message message)
{
message = message ?? throw new ArgumentNullException(nameof(message));
- return new Message
+ return new InputMessage
{
- Content = message.Content,
- Role = message.Role,
- StopSequence = message.StopSequence,
+ Content = message.Content.Select(x =>
+ {
+ if (x.IsText)
+ {
+ return new ContentVariant2Item2(new RequestTextBlock
+ {
+ Text = x.Text!.Text,
+ });
+ }
+ if (x.IsToolUse)
+ {
+ return new ContentVariant2Item2(new RequestToolUseBlock
+ {
+ Id = x.ToolUse!.Id,
+ Input = x.ToolUse.Input,
+ Name = x.ToolUse!.Name,
+ });
+ }
+
+ return new ContentVariant2Item2();
+ }).ToList(),
+ Role = InputMessageRole.Assistant,
};
}
@@ -98,11 +117,11 @@ public static IList AsAnthropicTools(
this IList tools)
{
return tools
- .Select(x => (Tool)new ToolCustom
+ .Select(x => new Tool
{
Description = x.Description ?? string.Empty,
Name = x.Name ?? string.Empty,
- InputSchema = x.Parameters ?? new ToolCustomInputSchema(),
+ InputSchema = new InputSchema(), // x.Parameters ??
})
.ToList();
}
diff --git a/src/libs/Anthropic/Generated/Anthropic.AnthropicClient.g.cs b/src/libs/Anthropic/Generated/Anthropic.AnthropicClient.g.cs
index 179c0e7..80615e5 100644
--- a/src/libs/Anthropic/Generated/Anthropic.AnthropicClient.g.cs
+++ b/src/libs/Anthropic/Generated/Anthropic.AnthropicClient.g.cs
@@ -4,7 +4,6 @@
namespace Anthropic
{
///
- /// API Spec for Anthropic API. Please see https://docs.anthropic.com/en/api for more details.
/// If no httpClient is provided, a new one will be created.
/// If no baseUri is provided, the default baseUri from OpenAPI spec will be used.
///
@@ -13,7 +12,7 @@ public sealed partial class AnthropicClient : global::Anthropic.IAnthropicClient
///
///
///
- public const string DefaultBaseUrl = "https://api.anthropic.com/v1";
+ public const string DefaultBaseUrl = "https://api.anthropic.com";
private bool _disposeHttpClient = true;
@@ -37,6 +36,33 @@ public sealed partial class AnthropicClient : global::Anthropic.IAnthropicClient
public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::Anthropic.SourceGenerationContext.Default;
+ ///
+ ///
+ ///
+ public MessagesClient Messages => new MessagesClient(HttpClient, authorizations: Authorizations)
+ {
+ ReadResponseAsString = ReadResponseAsString,
+ JsonSerializerContext = JsonSerializerContext,
+ };
+
+ ///
+ ///
+ ///
+ public TextCompletionsClient TextCompletions => new TextCompletionsClient(HttpClient, authorizations: Authorizations)
+ {
+ ReadResponseAsString = ReadResponseAsString,
+ JsonSerializerContext = JsonSerializerContext,
+ };
+
+ ///
+ ///
+ ///
+ public MessageBatchesClient MessageBatches => new MessageBatchesClient(HttpClient, authorizations: Authorizations)
+ {
+ ReadResponseAsString = ReadResponseAsString,
+ JsonSerializerContext = JsonSerializerContext,
+ };
+
///
/// Creates a new instance of the AnthropicClient.
/// If no httpClient is provided, a new one will be created.
diff --git a/src/libs/Anthropic/Generated/Anthropic.IAnthropicClient.CreateMessage.g.cs b/src/libs/Anthropic/Generated/Anthropic.IAnthropicClient.CreateMessage.g.cs
deleted file mode 100644
index 0321787..0000000
--- a/src/libs/Anthropic/Generated/Anthropic.IAnthropicClient.CreateMessage.g.cs
+++ /dev/null
@@ -1,244 +0,0 @@
-#nullable enable
-
-namespace Anthropic
-{
- public partial interface IAnthropicClient
- {
- ///
- /// Create a Message
- /// Send a structured list of input messages with text and/or image content, and the
- /// model will generate the next message in the conversation.
- /// The Messages API can be used for either single queries or stateless multi-turn
- /// conversations.
- ///
- ///
- /// The token to cancel the operation with
- ///
- global::System.Threading.Tasks.Task CreateMessageAsync(
- global::Anthropic.CreateMessageRequest request,
- global::System.Threading.CancellationToken cancellationToken = default);
-
- ///
- /// Create a Message
- /// Send a structured list of input messages with text and/or image content, and the
- /// model will generate the next message in the conversation.
- /// The Messages API can be used for either single queries or stateless multi-turn
- /// conversations.
- ///
- ///
- /// The model that will complete your prompt.
- /// See [models](https://docs.anthropic.com/en/docs/models-overview) for additional
- /// details and options.
- /// Example: claude-3-5-sonnet-20241022
- ///
- ///
- /// Input messages.
- /// Our models are trained to operate on alternating `user` and `assistant`
- /// conversational turns. When creating a new `Message`, you specify the prior
- /// conversational turns with the `messages` parameter, and the model then generates
- /// the next `Message` in the conversation.
- /// Each input message must be an object with a `role` and `content`. You can
- /// specify a single `user`-role message, or you can include multiple `user` and
- /// `assistant` messages. The first message must always use the `user` role.
- /// If the final message uses the `assistant` role, the response content will
- /// continue immediately from the content in that message. This can be used to
- /// constrain part of the model's response.
- /// See [message content](https://docs.anthropic.com/en/api/messages-content) for
- /// details on how to construct valid message objects.
- /// Example with a single `user` message:
- /// ```json
- /// [{ "role": "user", "content": "Hello, Claude" }]
- /// ```
- /// Example with multiple conversational turns:
- /// ```json
- /// [
- /// { "role": "user", "content": "Hello there." },
- /// { "role": "assistant", "content": "Hi, I'm Claude. How can I help you?" },
- /// { "role": "user", "content": "Can you explain LLMs in plain English?" }
- /// ]
- /// ```
- /// Example with a partially-filled response from Claude:
- /// ```json
- /// [
- /// {
- /// "role": "user",
- /// "content": "What's the Greek name for Sun? (A) Sol (B) Helios (C) Sun"
- /// },
- /// { "role": "assistant", "content": "The best answer is (" }
- /// ]
- /// ```
- /// Each input message `content` may be either a single `string` or an array of
- /// content blocks, where each block has a specific `type`. Using a `string` for
- /// `content` is shorthand for an array of one content block of type `"text"`. The
- /// following input messages are equivalent:
- /// ```json
- /// { "role": "user", "content": "Hello, Claude" }
- /// ```
- /// ```json
- /// { "role": "user", "content": [{ "type": "text", "text": "Hello, Claude" }] }
- /// ```
- /// Starting with Claude 3 models, you can also send image content blocks:
- /// ```json
- /// {
- /// "role": "user",
- /// "content": [
- /// {
- /// "type": "image",
- /// "source": {
- /// "type": "base64",
- /// "media_type": "image/jpeg",
- /// "data": "/9j/4AAQSkZJRg..."
- /// }
- /// },
- /// { "type": "text", "text": "What is in this image?" }
- /// ]
- /// }
- /// ```
- /// We currently support the `base64` source type for images, and the `image/jpeg`,
- /// `image/png`, `image/gif`, and `image/webp` media types.
- /// See [examples](https://docs.anthropic.com/en/api/messages-examples) for more
- /// input examples.
- /// Note that if you want to include a
- /// [system prompt](https://docs.anthropic.com/en/docs/system-prompts), you can use
- /// the top-level `system` parameter — there is no `"system"` role for input
- /// messages in the Messages API.
- ///
- ///
- /// The maximum number of tokens to generate before stopping.
- /// Note that our models may stop _before_ reaching this maximum. This parameter
- /// only specifies the absolute maximum number of tokens to generate.
- /// Different models have different maximum values for this parameter. See
- /// [models](https://docs.anthropic.com/en/docs/models-overview) for details.
- ///
- ///
- /// An object describing metadata about the request.
- ///
- ///
- /// Custom text sequences that will cause the model to stop generating.
- /// Our models will normally stop when they have naturally completed their turn,
- /// which will result in a response `stop_reason` of `"end_turn"`.
- /// If you want the model to stop generating when it encounters custom strings of
- /// text, you can use the `stop_sequences` parameter. If the model encounters one of
- /// the custom sequences, the response `stop_reason` value will be `"stop_sequence"`
- /// and the response `stop_sequence` value will contain the matched stop sequence.
- ///
- ///
- /// System prompt.
- /// A system prompt is a way of providing context and instructions to Claude, such
- /// as specifying a particular goal or role. See our
- /// [guide to system prompts](https://docs.anthropic.com/en/docs/system-prompts).
- ///
- ///
- /// Amount of randomness injected into the response.
- /// Defaults to `1.0`. Ranges from `0.0` to `1.0`. Use `temperature` closer to `0.0`
- /// for analytical / multiple choice, and closer to `1.0` for creative and
- /// generative tasks.
- /// Note that even with `temperature` of `0.0`, the results will not be fully
- /// deterministic.
- ///
- ///
- /// How the model should use the provided tools. The model can use a specific tool,
- /// any available tool, or decide by itself.
- /// - `auto`: allows Claude to decide whether to call any provided tools or not. This is the default value.
- /// - `any`: tells Claude that it must use one of the provided tools, but doesn’t force a particular tool.
- /// - `tool`: allows us to force Claude to always use a particular tool specified in the `name` field.
- ///
- ///
- /// Definitions of tools that the model may use.
- /// If you include `tools` in your API request, the model may return `tool_use`
- /// content blocks that represent the model's use of those tools. You can then run
- /// those tools using the tool input generated by the model and then optionally
- /// return results back to the model using `tool_result` content blocks.
- /// Each tool definition includes:
- /// - `name`: Name of the tool.
- /// - `description`: Optional, but strongly-recommended description of the tool.
- /// - `input_schema`: [JSON schema](https://json-schema.org/) for the tool `input`
- /// shape that the model will produce in `tool_use` output content blocks.
- /// For example, if you defined `tools` as:
- /// ```json
- /// [
- /// {
- /// "name": "get_stock_price",
- /// "description": "Get the current stock price for a given ticker symbol.",
- /// "input_schema": {
- /// "type": "object",
- /// "properties": {
- /// "ticker": {
- /// "type": "string",
- /// "description": "The stock ticker symbol, e.g. AAPL for Apple Inc."
- /// }
- /// },
- /// "required": ["ticker"]
- /// }
- /// }
- /// ]
- /// ```
- /// And then asked the model "What's the S&P 500 at today?", the model might produce
- /// `tool_use` content blocks in the response like this:
- /// ```json
- /// [
- /// {
- /// "type": "tool_use",
- /// "id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
- /// "name": "get_stock_price",
- /// "input": { "ticker": "^GSPC" }
- /// }
- /// ]
- /// ```
- /// You might then run your `get_stock_price` tool with `{"ticker": "^GSPC"}` as an
- /// input, and return the following back to the model in a subsequent `user`
- /// message:
- /// ```json
- /// [
- /// {
- /// "type": "tool_result",
- /// "tool_use_id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
- /// "content": "259.75 USD"
- /// }
- /// ]
- /// ```
- /// Tools can be used for workflows that include running client-side tools and
- /// functions, or more generally whenever you want the model to produce a particular
- /// JSON structure of output.
- /// See our [guide](https://docs.anthropic.com/en/docs/tool-use) for more details.
- ///
- ///
- /// Only sample from the top K options for each subsequent token.
- /// Used to remove "long tail" low probability responses.
- /// [Learn more technical details here](https://towardsdatascience.com/how-to-sample-from-language-models-682bceb97277).
- /// Recommended for advanced use cases only. You usually only need to use
- /// `temperature`.
- ///
- ///
- /// Use nucleus sampling.
- /// In nucleus sampling, we compute the cumulative distribution over all the options
- /// for each subsequent token in decreasing probability order and cut it off once it
- /// reaches a particular probability specified by `top_p`. You should either alter
- /// `temperature` or `top_p`, but not both.
- /// Recommended for advanced use cases only. You usually only need to use
- /// `temperature`.
- ///
- ///
- /// Whether to incrementally stream the response using server-sent events.
- /// See [streaming](https://docs.anthropic.com/en/api/messages-streaming) for
- /// details.
- /// Default Value: false
- ///
- /// The token to cancel the operation with
- ///
- global::System.Threading.Tasks.Task CreateMessageAsync(
- global::Anthropic.AnyOf model,
- global::System.Collections.Generic.IList messages,
- int maxTokens,
- global::Anthropic.CreateMessageRequestMetadata? metadata = default,
- global::System.Collections.Generic.IList? stopSequences = default,
- global::Anthropic.OneOf>? system = default,
- double? temperature = default,
- global::Anthropic.ToolChoice? toolChoice = default,
- global::System.Collections.Generic.IList? tools = default,
- int? topK = default,
- double? topP = default,
- bool? stream = default,
- global::System.Threading.CancellationToken cancellationToken = default);
- }
-}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IAnthropicClient.CreateMessageBatch.g.cs b/src/libs/Anthropic/Generated/Anthropic.IAnthropicClient.CreateMessageBatch.g.cs
deleted file mode 100644
index 6c16ed8..0000000
--- a/src/libs/Anthropic/Generated/Anthropic.IAnthropicClient.CreateMessageBatch.g.cs
+++ /dev/null
@@ -1,31 +0,0 @@
-#nullable enable
-
-namespace Anthropic
-{
- public partial interface IAnthropicClient
- {
- ///
- /// Create a Message Batch
- /// Send a batch of Message creation requests.
- ///
- ///
- /// The token to cancel the operation with
- ///
- global::System.Threading.Tasks.Task CreateMessageBatchAsync(
- global::Anthropic.CreateMessageBatchRequest request,
- global::System.Threading.CancellationToken cancellationToken = default);
-
- ///
- /// Create a Message Batch
- /// Send a batch of Message creation requests.
- ///
- ///
- /// List of requests for prompt completion. Each is an individual request to create a Message.
- ///
- /// The token to cancel the operation with
- ///
- global::System.Threading.Tasks.Task CreateMessageBatchAsync(
- global::System.Collections.Generic.IList requests,
- global::System.Threading.CancellationToken cancellationToken = default);
- }
-}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IAnthropicClient.RetrieveMessageBatch.g.cs b/src/libs/Anthropic/Generated/Anthropic.IAnthropicClient.RetrieveMessageBatch.g.cs
deleted file mode 100644
index 455909a..0000000
--- a/src/libs/Anthropic/Generated/Anthropic.IAnthropicClient.RetrieveMessageBatch.g.cs
+++ /dev/null
@@ -1,20 +0,0 @@
-#nullable enable
-
-namespace Anthropic
-{
- public partial interface IAnthropicClient
- {
- ///
- /// Retrieve a Message Batch
- /// This endpoint is idempotent and can be used to poll for Message Batch
- /// completion. To access the results of a Message Batch, make a request to the
- /// `results_url` field in the response.
- ///
- ///
- /// The token to cancel the operation with
- ///
- global::System.Threading.Tasks.Task RetrieveMessageBatchAsync(
- string id,
- global::System.Threading.CancellationToken cancellationToken = default);
- }
-}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IAnthropicClient.g.cs b/src/libs/Anthropic/Generated/Anthropic.IAnthropicClient.g.cs
index 4e45e49..a3aa598 100644
--- a/src/libs/Anthropic/Generated/Anthropic.IAnthropicClient.g.cs
+++ b/src/libs/Anthropic/Generated/Anthropic.IAnthropicClient.g.cs
@@ -4,7 +4,6 @@
namespace Anthropic
{
///
- /// API Spec for Anthropic API. Please see https://docs.anthropic.com/en/api for more details.
/// If no httpClient is provided, a new one will be created.
/// If no baseUri is provided, the default baseUri from OpenAPI spec will be used.
///
@@ -37,5 +36,20 @@ public partial interface IAnthropicClient : global::System.IDisposable
global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; }
+ ///
+ ///
+ ///
+ public MessagesClient Messages { get; }
+
+ ///
+ ///
+ ///
+ public TextCompletionsClient TextCompletions { get; }
+
+ ///
+ ///
+ ///
+ public MessageBatchesClient MessageBatches { get; }
+
}
}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesCancel.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesCancel.g.cs
new file mode 100644
index 0000000..6275049
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesCancel.g.cs
@@ -0,0 +1,31 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public partial interface IMessageBatchesClient
+ {
+ ///
+ /// Cancel a Message Batch
+ /// Batches may be canceled any time before processing ends. Once cancellation is initiated, the batch enters a `canceling` state, at which time the system may complete any in-progress, non-interruptible requests before finalizing cancellation.
+ /// The number of canceled requests is specified in `request_counts`. To determine which requests were canceled, check the individual results within the batch. Note that cancellation may not result in any canceled requests if they were non-interruptible.
+ ///
+ ///
+ /// ID of the Message Batch.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessageBatchesCancelAsync(
+ string messageBatchId,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesCancel2.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesCancel2.g.cs
new file mode 100644
index 0000000..9b5fabe
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesCancel2.g.cs
@@ -0,0 +1,31 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public partial interface IMessageBatchesClient
+ {
+ ///
+ /// Cancel a Message Batch
+ /// Batches may be canceled any time before processing ends. Once cancellation is initiated, the batch enters a `canceling` state, at which time the system may complete any in-progress, non-interruptible requests before finalizing cancellation.
+ /// The number of canceled requests is specified in `request_counts`. To determine which requests were canceled, check the individual results within the batch. Note that cancellation may not result in any canceled requests if they were non-interruptible.
+ ///
+ ///
+ /// ID of the Message Batch.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessageBatchesCancel2Async(
+ string messageBatchId,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesList.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesList.g.cs
new file mode 100644
index 0000000..aca874a
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesList.g.cs
@@ -0,0 +1,45 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public partial interface IMessageBatchesClient
+ {
+ ///
+ /// List Message Batches
+ /// List all Message Batches within a Workspace. Most recently created batches are returned first.
+ ///
+ ///
+ /// ID of the object to use as a cursor for pagination. When provided, returns the page of results immediately before this object.
+ ///
+ ///
+ /// ID of the object to use as a cursor for pagination. When provided, returns the page of results immediately after this object.
+ ///
+ ///
+ /// Number of items to return per page.
+ /// Defaults to `20`. Ranges from `1` to `100`.
+ /// Default Value: 20
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// Your unique API key for authentication.
+ /// This key is required in the header of all API requests, to authenticate your account and access Anthropic's services. Get your API key through the [Console](https://console.anthropic.com/settings/keys). Each key is scoped to a Workspace.
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessageBatchesListAsync(
+ string? beforeId = default,
+ string? afterId = default,
+ int? limit = default,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ string? xApiKey = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesList2.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesList2.g.cs
new file mode 100644
index 0000000..c1208f3
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesList2.g.cs
@@ -0,0 +1,45 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public partial interface IMessageBatchesClient
+ {
+ ///
+ /// List Message Batches
+ /// List all Message Batches within a Workspace. Most recently created batches are returned first.
+ ///
+ ///
+ /// ID of the object to use as a cursor for pagination. When provided, returns the page of results immediately before this object.
+ ///
+ ///
+ /// ID of the object to use as a cursor for pagination. When provided, returns the page of results immediately after this object.
+ ///
+ ///
+ /// Number of items to return per page.
+ /// Defaults to `20`. Ranges from `1` to `100`.
+ /// Default Value: 20
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// Your unique API key for authentication.
+ /// This key is required in the header of all API requests, to authenticate your account and access Anthropic's services. Get your API key through the [Console](https://console.anthropic.com/settings/keys). Each key is scoped to a Workspace.
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessageBatchesList2Async(
+ string? beforeId = default,
+ string? afterId = default,
+ int? limit = default,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ string? xApiKey = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesPost.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesPost.g.cs
new file mode 100644
index 0000000..6cf1012
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesPost.g.cs
@@ -0,0 +1,53 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public partial interface IMessageBatchesClient
+ {
+ ///
+ /// Create a Message Batch
+ /// Send a batch of Message creation requests.
+ /// The Message Batches API can be used to process multiple Messages API requests at once. Once a Message Batch is created, it begins processing immediately. Batches can take up to 24 hours to complete.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessageBatchesPostAsync(
+ global::Anthropic.BetaCreateMessageBatchParams request,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+
+ ///
+ /// Create a Message Batch
+ /// Send a batch of Message creation requests.
+ /// The Message Batches API can be used to process multiple Messages API requests at once. Once a Message Batch is created, it begins processing immediately. Batches can take up to 24 hours to complete.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// List of requests for prompt completion. Each is an individual request to create a Message.
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessageBatchesPostAsync(
+ global::System.Collections.Generic.IList requests,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesPost2.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesPost2.g.cs
new file mode 100644
index 0000000..19fda27
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesPost2.g.cs
@@ -0,0 +1,53 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public partial interface IMessageBatchesClient
+ {
+ ///
+ /// Create a Message Batch
+ /// Send a batch of Message creation requests.
+ /// The Message Batches API can be used to process multiple Messages API requests at once. Once a Message Batch is created, it begins processing immediately. Batches can take up to 24 hours to complete.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessageBatchesPost2Async(
+ global::Anthropic.BetaCreateMessageBatchParams request,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+
+ ///
+ /// Create a Message Batch
+ /// Send a batch of Message creation requests.
+ /// The Message Batches API can be used to process multiple Messages API requests at once. Once a Message Batch is created, it begins processing immediately. Batches can take up to 24 hours to complete.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// List of requests for prompt completion. Each is an individual request to create a Message.
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessageBatchesPost2Async(
+ global::System.Collections.Generic.IList requests,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesResults.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesResults.g.cs
new file mode 100644
index 0000000..1b23864
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesResults.g.cs
@@ -0,0 +1,36 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public partial interface IMessageBatchesClient
+ {
+ ///
+ /// Retrieve Message Batch results
+ /// Streams the results of a Message Batch as a `.jsonl` file.
+ /// Each line in the file is a JSON object containing the result of a single request in the Message Batch. Results are not guaranteed to be in the same order as requests. Use the `custom_id` field to match results to requests.
+ ///
+ ///
+ /// ID of the Message Batch.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// Your unique API key for authentication.
+ /// This key is required in the header of all API requests, to authenticate your account and access Anthropic's services. Get your API key through the [Console](https://console.anthropic.com/settings/keys). Each key is scoped to a Workspace.
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessageBatchesResultsAsync(
+ string messageBatchId,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ string? xApiKey = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesResults2.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesResults2.g.cs
new file mode 100644
index 0000000..eccdb10
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesResults2.g.cs
@@ -0,0 +1,36 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public partial interface IMessageBatchesClient
+ {
+ ///
+ /// Retrieve Message Batch results
+ /// Streams the results of a Message Batch as a `.jsonl` file.
+ /// Each line in the file is a JSON object containing the result of a single request in the Message Batch. Results are not guaranteed to be in the same order as requests. Use the `custom_id` field to match results to requests.
+ ///
+ ///
+ /// ID of the Message Batch.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// Your unique API key for authentication.
+ /// This key is required in the header of all API requests, to authenticate your account and access Anthropic's services. Get your API key through the [Console](https://console.anthropic.com/settings/keys). Each key is scoped to a Workspace.
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessageBatchesResults2Async(
+ string messageBatchId,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ string? xApiKey = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesRetrieve.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesRetrieve.g.cs
new file mode 100644
index 0000000..d7861f1
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesRetrieve.g.cs
@@ -0,0 +1,35 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public partial interface IMessageBatchesClient
+ {
+ ///
+ /// Retrieve a Message Batch
+ /// This endpoint is idempotent and can be used to poll for Message Batch completion. To access the results of a Message Batch, make a request to the `results_url` field in the response.
+ ///
+ ///
+ /// ID of the Message Batch.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// Your unique API key for authentication.
+ /// This key is required in the header of all API requests, to authenticate your account and access Anthropic's services. Get your API key through the [Console](https://console.anthropic.com/settings/keys). Each key is scoped to a Workspace.
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessageBatchesRetrieveAsync(
+ string messageBatchId,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ string? xApiKey = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesRetrieve2.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesRetrieve2.g.cs
new file mode 100644
index 0000000..cdca70f
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.BetaMessageBatchesRetrieve2.g.cs
@@ -0,0 +1,35 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public partial interface IMessageBatchesClient
+ {
+ ///
+ /// Retrieve a Message Batch
+ /// This endpoint is idempotent and can be used to poll for Message Batch completion. To access the results of a Message Batch, make a request to the `results_url` field in the response.
+ ///
+ ///
+ /// ID of the Message Batch.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// Your unique API key for authentication.
+ /// This key is required in the header of all API requests, to authenticate your account and access Anthropic's services. Get your API key through the [Console](https://console.anthropic.com/settings/keys). Each key is scoped to a Workspace.
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessageBatchesRetrieve2Async(
+ string messageBatchId,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ string? xApiKey = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.g.cs
new file mode 100644
index 0000000..98154d5
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessageBatchesClient.g.cs
@@ -0,0 +1,40 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ ///
+ /// If no httpClient is provided, a new one will be created.
+ /// If no baseUri is provided, the default baseUri from OpenAPI spec will be used.
+ ///
+ public partial interface IMessageBatchesClient : global::System.IDisposable
+ {
+ ///
+ /// The HttpClient instance.
+ ///
+ public global::System.Net.Http.HttpClient HttpClient { get; }
+
+ ///
+ /// The base URL for the API.
+ ///
+ public System.Uri? BaseUri { get; }
+
+ ///
+ /// The authorizations to use for the requests.
+ ///
+ public global::System.Collections.Generic.List Authorizations { get; }
+
+ ///
+ /// Gets or sets a value indicating whether the response content should be read as a string.
+ /// True by default in debug builds, false otherwise.
+ ///
+ public bool ReadResponseAsString { get; set; }
+
+ ///
+ ///
+ ///
+ global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; }
+
+
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.BetaMessagesCountTokensPost.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.BetaMessagesCountTokensPost.g.cs
new file mode 100644
index 0000000..c905518
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.BetaMessagesCountTokensPost.g.cs
@@ -0,0 +1,164 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public partial interface IMessagesClient
+ {
+ ///
+ /// Count tokens in a Message
+ /// Count the number of tokens in a Message.
+ /// The Token Count API can be used to count the number of tokens in a Message, including tools, images, and documents, without creating it.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessagesCountTokensPostAsync(
+ global::Anthropic.BetaCountMessageTokensParams request,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+
+ ///
+ /// Count tokens in a Message
+ /// Count the number of tokens in a Message.
+ /// The Token Count API can be used to count the number of tokens in a Message, including tools, images, and documents, without creating it.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// How the model should use the provided tools. The model can use a specific tool, any available tool, or decide by itself.
+ ///
+ ///
+ /// Definitions of tools that the model may use.
+ /// If you include `tools` in your API request, the model may return `tool_use` content blocks that represent the model's use of those tools. You can then run those tools using the tool input generated by the model and then optionally return results back to the model using `tool_result` content blocks.
+ /// Each tool definition includes:
+ /// * `name`: Name of the tool.
+ /// * `description`: Optional, but strongly-recommended description of the tool.
+ /// * `input_schema`: [JSON schema](https://json-schema.org/) for the tool `input` shape that the model will produce in `tool_use` output content blocks.
+ /// For example, if you defined `tools` as:
+ /// ```json
+ /// [
+ /// {
+ /// "name": "get_stock_price",
+ /// "description": "Get the current stock price for a given ticker symbol.",
+ /// "input_schema": {
+ /// "type": "object",
+ /// "properties": {
+ /// "ticker": {
+ /// "type": "string",
+ /// "description": "The stock ticker symbol, e.g. AAPL for Apple Inc."
+ /// }
+ /// },
+ /// "required": ["ticker"]
+ /// }
+ /// }
+ /// ]
+ /// ```
+ /// And then asked the model "What's the S&P 500 at today?", the model might produce `tool_use` content blocks in the response like this:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_use",
+ /// "id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "name": "get_stock_price",
+ /// "input": { "ticker": "^GSPC" }
+ /// }
+ /// ]
+ /// ```
+ /// You might then run your `get_stock_price` tool with `{"ticker": "^GSPC"}` as an input, and return the following back to the model in a subsequent `user` message:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_result",
+ /// "tool_use_id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "content": "259.75 USD"
+ /// }
+ /// ]
+ /// ```
+ /// Tools can be used for workflows that include running client-side tools and functions, or more generally whenever you want the model to produce a particular JSON structure of output.
+ /// See our [guide](https://docs.anthropic.com/en/docs/tool-use) for more details.
+ ///
+ ///
+ /// Input messages.
+ /// Our models are trained to operate on alternating `user` and `assistant` conversational turns. When creating a new `Message`, you specify the prior conversational turns with the `messages` parameter, and the model then generates the next `Message` in the conversation. Consecutive `user` or `assistant` turns in your request will be combined into a single turn.
+ /// Each input message must be an object with a `role` and `content`. You can specify a single `user`-role message, or you can include multiple `user` and `assistant` messages.
+ /// If the final message uses the `assistant` role, the response content will continue immediately from the content in that message. This can be used to constrain part of the model's response.
+ /// Example with a single `user` message:
+ /// ```json
+ /// [{"role": "user", "content": "Hello, Claude"}]
+ /// ```
+ /// Example with multiple conversational turns:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "Hello there."},
+ /// {"role": "assistant", "content": "Hi, I'm Claude. How can I help you?"},
+ /// {"role": "user", "content": "Can you explain LLMs in plain English?"},
+ /// ]
+ /// ```
+ /// Example with a partially-filled response from Claude:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "What's the Greek name for Sun? (A) Sol (B) Helios (C) Sun"},
+ /// {"role": "assistant", "content": "The best answer is ("},
+ /// ]
+ /// ```
+ /// Each input message `content` may be either a single `string` or an array of content blocks, where each block has a specific `type`. Using a `string` for `content` is shorthand for an array of one content block of type `"text"`. The following input messages are equivalent:
+ /// ```json
+ /// {"role": "user", "content": "Hello, Claude"}
+ /// ```
+ /// ```json
+ /// {"role": "user", "content": [{"type": "text", "text": "Hello, Claude"}]}
+ /// ```
+ /// Starting with Claude 3 models, you can also send image content blocks:
+ /// ```json
+ /// {"role": "user", "content": [
+ /// {
+ /// "type": "image",
+ /// "source": {
+ /// "type": "base64",
+ /// "media_type": "image/jpeg",
+ /// "data": "/9j/4AAQSkZJRg...",
+ /// }
+ /// },
+ /// {"type": "text", "text": "What is in this image?"}
+ /// ]}
+ /// ```
+ /// We currently support the `base64` source type for images, and the `image/jpeg`, `image/png`, `image/gif`, and `image/webp` media types.
+ /// See [examples](https://docs.anthropic.com/en/api/messages-examples#vision) for more input examples.
+ /// Note that if you want to include a [system prompt](https://docs.anthropic.com/en/docs/system-prompts), you can use the top-level `system` parameter — there is no `"system"` role for input messages in the Messages API.
+ ///
+ ///
+ /// System prompt.
+ /// A system prompt is a way of providing context and instructions to Claude, such as specifying a particular goal or role. See our [guide to system prompts](https://docs.anthropic.com/en/docs/system-prompts).
+ /// Example: []
+ ///
+ ///
+ /// The model that will complete your prompt.\n\nSee [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options.
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessagesCountTokensPostAsync(
+ global::System.Collections.Generic.IList messages,
+ global::Anthropic.Model model,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::Anthropic.BetaToolChoice? toolChoice = default,
+ global::System.Collections.Generic.IList? tools = default,
+ global::Anthropic.AnyOf>? system = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.BetaMessagesCountTokensPost2.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.BetaMessagesCountTokensPost2.g.cs
new file mode 100644
index 0000000..4cfee07
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.BetaMessagesCountTokensPost2.g.cs
@@ -0,0 +1,164 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public partial interface IMessagesClient
+ {
+ ///
+ /// Count tokens in a Message
+ /// Count the number of tokens in a Message.
+ /// The Token Count API can be used to count the number of tokens in a Message, including tools, images, and documents, without creating it.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessagesCountTokensPost2Async(
+ global::Anthropic.BetaCountMessageTokensParams request,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+
+ ///
+ /// Count tokens in a Message
+ /// Count the number of tokens in a Message.
+ /// The Token Count API can be used to count the number of tokens in a Message, including tools, images, and documents, without creating it.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// How the model should use the provided tools. The model can use a specific tool, any available tool, or decide by itself.
+ ///
+ ///
+ /// Definitions of tools that the model may use.
+ /// If you include `tools` in your API request, the model may return `tool_use` content blocks that represent the model's use of those tools. You can then run those tools using the tool input generated by the model and then optionally return results back to the model using `tool_result` content blocks.
+ /// Each tool definition includes:
+ /// * `name`: Name of the tool.
+ /// * `description`: Optional, but strongly-recommended description of the tool.
+ /// * `input_schema`: [JSON schema](https://json-schema.org/) for the tool `input` shape that the model will produce in `tool_use` output content blocks.
+ /// For example, if you defined `tools` as:
+ /// ```json
+ /// [
+ /// {
+ /// "name": "get_stock_price",
+ /// "description": "Get the current stock price for a given ticker symbol.",
+ /// "input_schema": {
+ /// "type": "object",
+ /// "properties": {
+ /// "ticker": {
+ /// "type": "string",
+ /// "description": "The stock ticker symbol, e.g. AAPL for Apple Inc."
+ /// }
+ /// },
+ /// "required": ["ticker"]
+ /// }
+ /// }
+ /// ]
+ /// ```
+ /// And then asked the model "What's the S&P 500 at today?", the model might produce `tool_use` content blocks in the response like this:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_use",
+ /// "id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "name": "get_stock_price",
+ /// "input": { "ticker": "^GSPC" }
+ /// }
+ /// ]
+ /// ```
+ /// You might then run your `get_stock_price` tool with `{"ticker": "^GSPC"}` as an input, and return the following back to the model in a subsequent `user` message:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_result",
+ /// "tool_use_id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "content": "259.75 USD"
+ /// }
+ /// ]
+ /// ```
+ /// Tools can be used for workflows that include running client-side tools and functions, or more generally whenever you want the model to produce a particular JSON structure of output.
+ /// See our [guide](https://docs.anthropic.com/en/docs/tool-use) for more details.
+ ///
+ ///
+ /// Input messages.
+ /// Our models are trained to operate on alternating `user` and `assistant` conversational turns. When creating a new `Message`, you specify the prior conversational turns with the `messages` parameter, and the model then generates the next `Message` in the conversation. Consecutive `user` or `assistant` turns in your request will be combined into a single turn.
+ /// Each input message must be an object with a `role` and `content`. You can specify a single `user`-role message, or you can include multiple `user` and `assistant` messages.
+ /// If the final message uses the `assistant` role, the response content will continue immediately from the content in that message. This can be used to constrain part of the model's response.
+ /// Example with a single `user` message:
+ /// ```json
+ /// [{"role": "user", "content": "Hello, Claude"}]
+ /// ```
+ /// Example with multiple conversational turns:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "Hello there."},
+ /// {"role": "assistant", "content": "Hi, I'm Claude. How can I help you?"},
+ /// {"role": "user", "content": "Can you explain LLMs in plain English?"},
+ /// ]
+ /// ```
+ /// Example with a partially-filled response from Claude:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "What's the Greek name for Sun? (A) Sol (B) Helios (C) Sun"},
+ /// {"role": "assistant", "content": "The best answer is ("},
+ /// ]
+ /// ```
+ /// Each input message `content` may be either a single `string` or an array of content blocks, where each block has a specific `type`. Using a `string` for `content` is shorthand for an array of one content block of type `"text"`. The following input messages are equivalent:
+ /// ```json
+ /// {"role": "user", "content": "Hello, Claude"}
+ /// ```
+ /// ```json
+ /// {"role": "user", "content": [{"type": "text", "text": "Hello, Claude"}]}
+ /// ```
+ /// Starting with Claude 3 models, you can also send image content blocks:
+ /// ```json
+ /// {"role": "user", "content": [
+ /// {
+ /// "type": "image",
+ /// "source": {
+ /// "type": "base64",
+ /// "media_type": "image/jpeg",
+ /// "data": "/9j/4AAQSkZJRg...",
+ /// }
+ /// },
+ /// {"type": "text", "text": "What is in this image?"}
+ /// ]}
+ /// ```
+ /// We currently support the `base64` source type for images, and the `image/jpeg`, `image/png`, `image/gif`, and `image/webp` media types.
+ /// See [examples](https://docs.anthropic.com/en/api/messages-examples#vision) for more input examples.
+ /// Note that if you want to include a [system prompt](https://docs.anthropic.com/en/docs/system-prompts), you can use the top-level `system` parameter — there is no `"system"` role for input messages in the Messages API.
+ ///
+ ///
+ /// System prompt.
+ /// A system prompt is a way of providing context and instructions to Claude, such as specifying a particular goal or role. See our [guide to system prompts](https://docs.anthropic.com/en/docs/system-prompts).
+ /// Example: []
+ ///
+ ///
+ /// The model that will complete your prompt.\n\nSee [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options.
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessagesCountTokensPost2Async(
+ global::System.Collections.Generic.IList messages,
+ global::Anthropic.Model model,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::Anthropic.BetaToolChoice? toolChoice = default,
+ global::System.Collections.Generic.IList? tools = default,
+ global::Anthropic.AnyOf>? system = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.BetaMessagesPost.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.BetaMessagesPost.g.cs
new file mode 100644
index 0000000..15eaae6
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.BetaMessagesPost.g.cs
@@ -0,0 +1,207 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public partial interface IMessagesClient
+ {
+ ///
+ /// Create a Message
+ /// Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
+ /// The Messages API can be used for either single queries or stateless multi-turn conversations.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessagesPostAsync(
+ global::Anthropic.BetaCreateMessageParams request,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+
+ ///
+ /// Create a Message
+ /// Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
+ /// The Messages API can be used for either single queries or stateless multi-turn conversations.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The model that will complete your prompt.\n\nSee [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options.
+ ///
+ ///
+ /// Input messages.
+ /// Our models are trained to operate on alternating `user` and `assistant` conversational turns. When creating a new `Message`, you specify the prior conversational turns with the `messages` parameter, and the model then generates the next `Message` in the conversation. Consecutive `user` or `assistant` turns in your request will be combined into a single turn.
+ /// Each input message must be an object with a `role` and `content`. You can specify a single `user`-role message, or you can include multiple `user` and `assistant` messages.
+ /// If the final message uses the `assistant` role, the response content will continue immediately from the content in that message. This can be used to constrain part of the model's response.
+ /// Example with a single `user` message:
+ /// ```json
+ /// [{"role": "user", "content": "Hello, Claude"}]
+ /// ```
+ /// Example with multiple conversational turns:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "Hello there."},
+ /// {"role": "assistant", "content": "Hi, I'm Claude. How can I help you?"},
+ /// {"role": "user", "content": "Can you explain LLMs in plain English?"},
+ /// ]
+ /// ```
+ /// Example with a partially-filled response from Claude:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "What's the Greek name for Sun? (A) Sol (B) Helios (C) Sun"},
+ /// {"role": "assistant", "content": "The best answer is ("},
+ /// ]
+ /// ```
+ /// Each input message `content` may be either a single `string` or an array of content blocks, where each block has a specific `type`. Using a `string` for `content` is shorthand for an array of one content block of type `"text"`. The following input messages are equivalent:
+ /// ```json
+ /// {"role": "user", "content": "Hello, Claude"}
+ /// ```
+ /// ```json
+ /// {"role": "user", "content": [{"type": "text", "text": "Hello, Claude"}]}
+ /// ```
+ /// Starting with Claude 3 models, you can also send image content blocks:
+ /// ```json
+ /// {"role": "user", "content": [
+ /// {
+ /// "type": "image",
+ /// "source": {
+ /// "type": "base64",
+ /// "media_type": "image/jpeg",
+ /// "data": "/9j/4AAQSkZJRg...",
+ /// }
+ /// },
+ /// {"type": "text", "text": "What is in this image?"}
+ /// ]}
+ /// ```
+ /// We currently support the `base64` source type for images, and the `image/jpeg`, `image/png`, `image/gif`, and `image/webp` media types.
+ /// See [examples](https://docs.anthropic.com/en/api/messages-examples#vision) for more input examples.
+ /// Note that if you want to include a [system prompt](https://docs.anthropic.com/en/docs/system-prompts), you can use the top-level `system` parameter — there is no `"system"` role for input messages in the Messages API.
+ ///
+ ///
+ /// The maximum number of tokens to generate before stopping.
+ /// Note that our models may stop _before_ reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate.
+ /// Different models have different maximum values for this parameter. See [models](https://docs.anthropic.com/en/docs/models-overview) for details.
+ /// Example: 1024
+ ///
+ ///
+ /// An object describing metadata about the request.
+ ///
+ ///
+ /// Custom text sequences that will cause the model to stop generating.
+ /// Our models will normally stop when they have naturally completed their turn, which will result in a response `stop_reason` of `"end_turn"`.
+ /// If you want the model to stop generating when it encounters custom strings of text, you can use the `stop_sequences` parameter. If the model encounters one of the custom sequences, the response `stop_reason` value will be `"stop_sequence"` and the response `stop_sequence` value will contain the matched stop sequence.
+ ///
+ ///
+ /// Whether to incrementally stream the response using server-sent events.
+ /// See [streaming](https://docs.anthropic.com/en/api/messages-streaming) for details.
+ ///
+ ///
+ /// System prompt.
+ /// A system prompt is a way of providing context and instructions to Claude, such as specifying a particular goal or role. See our [guide to system prompts](https://docs.anthropic.com/en/docs/system-prompts).
+ /// Example: []
+ ///
+ ///
+ /// Amount of randomness injected into the response.
+ /// Defaults to `1.0`. Ranges from `0.0` to `1.0`. Use `temperature` closer to `0.0` for analytical / multiple choice, and closer to `1.0` for creative and generative tasks.
+ /// Note that even with `temperature` of `0.0`, the results will not be fully deterministic.
+ /// Example: 1
+ ///
+ ///
+ /// How the model should use the provided tools. The model can use a specific tool, any available tool, or decide by itself.
+ ///
+ ///
+ /// Definitions of tools that the model may use.
+ /// If you include `tools` in your API request, the model may return `tool_use` content blocks that represent the model's use of those tools. You can then run those tools using the tool input generated by the model and then optionally return results back to the model using `tool_result` content blocks.
+ /// Each tool definition includes:
+ /// * `name`: Name of the tool.
+ /// * `description`: Optional, but strongly-recommended description of the tool.
+ /// * `input_schema`: [JSON schema](https://json-schema.org/) for the tool `input` shape that the model will produce in `tool_use` output content blocks.
+ /// For example, if you defined `tools` as:
+ /// ```json
+ /// [
+ /// {
+ /// "name": "get_stock_price",
+ /// "description": "Get the current stock price for a given ticker symbol.",
+ /// "input_schema": {
+ /// "type": "object",
+ /// "properties": {
+ /// "ticker": {
+ /// "type": "string",
+ /// "description": "The stock ticker symbol, e.g. AAPL for Apple Inc."
+ /// }
+ /// },
+ /// "required": ["ticker"]
+ /// }
+ /// }
+ /// ]
+ /// ```
+ /// And then asked the model "What's the S&P 500 at today?", the model might produce `tool_use` content blocks in the response like this:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_use",
+ /// "id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "name": "get_stock_price",
+ /// "input": { "ticker": "^GSPC" }
+ /// }
+ /// ]
+ /// ```
+ /// You might then run your `get_stock_price` tool with `{"ticker": "^GSPC"}` as an input, and return the following back to the model in a subsequent `user` message:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_result",
+ /// "tool_use_id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "content": "259.75 USD"
+ /// }
+ /// ]
+ /// ```
+ /// Tools can be used for workflows that include running client-side tools and functions, or more generally whenever you want the model to produce a particular JSON structure of output.
+ /// See our [guide](https://docs.anthropic.com/en/docs/tool-use) for more details.
+ ///
+ ///
+ /// Only sample from the top K options for each subsequent token.
+ /// Used to remove "long tail" low probability responses. [Learn more technical details here](https://towardsdatascience.com/how-to-sample-from-language-models-682bceb97277).
+ /// Recommended for advanced use cases only. You usually only need to use `temperature`.
+ /// Example: 5
+ ///
+ ///
+ /// Use nucleus sampling.
+ /// In nucleus sampling, we compute the cumulative distribution over all the options for each subsequent token in decreasing probability order and cut it off once it reaches a particular probability specified by `top_p`. You should either alter `temperature` or `top_p`, but not both.
+ /// Recommended for advanced use cases only. You usually only need to use `temperature`.
+ /// Example: 0.7
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task BetaMessagesPostAsync(
+ global::Anthropic.Model model,
+ global::System.Collections.Generic.IList messages,
+ int maxTokens,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::Anthropic.BetaMetadata? metadata = default,
+ global::System.Collections.Generic.IList? stopSequences = default,
+ bool? stream = default,
+ global::Anthropic.AnyOf>? system = default,
+ double? temperature = default,
+ global::Anthropic.BetaToolChoice? toolChoice = default,
+ global::System.Collections.Generic.IList? tools = default,
+ int? topK = default,
+ double? topP = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.MessagesPost.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.MessagesPost.g.cs
new file mode 100644
index 0000000..def8cea
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.MessagesPost.g.cs
@@ -0,0 +1,197 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public partial interface IMessagesClient
+ {
+ ///
+ /// Create a Message
+ /// Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
+ /// The Messages API can be used for either single queries or stateless multi-turn conversations.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task MessagesPostAsync(
+ global::Anthropic.CreateMessageParams request,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+
+ ///
+ /// Create a Message
+ /// Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
+ /// The Messages API can be used for either single queries or stateless multi-turn conversations.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The model that will complete your prompt.\n\nSee [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options.
+ ///
+ ///
+ /// Input messages.
+ /// Our models are trained to operate on alternating `user` and `assistant` conversational turns. When creating a new `Message`, you specify the prior conversational turns with the `messages` parameter, and the model then generates the next `Message` in the conversation. Consecutive `user` or `assistant` turns in your request will be combined into a single turn.
+ /// Each input message must be an object with a `role` and `content`. You can specify a single `user`-role message, or you can include multiple `user` and `assistant` messages.
+ /// If the final message uses the `assistant` role, the response content will continue immediately from the content in that message. This can be used to constrain part of the model's response.
+ /// Example with a single `user` message:
+ /// ```json
+ /// [{"role": "user", "content": "Hello, Claude"}]
+ /// ```
+ /// Example with multiple conversational turns:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "Hello there."},
+ /// {"role": "assistant", "content": "Hi, I'm Claude. How can I help you?"},
+ /// {"role": "user", "content": "Can you explain LLMs in plain English?"},
+ /// ]
+ /// ```
+ /// Example with a partially-filled response from Claude:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "What's the Greek name for Sun? (A) Sol (B) Helios (C) Sun"},
+ /// {"role": "assistant", "content": "The best answer is ("},
+ /// ]
+ /// ```
+ /// Each input message `content` may be either a single `string` or an array of content blocks, where each block has a specific `type`. Using a `string` for `content` is shorthand for an array of one content block of type `"text"`. The following input messages are equivalent:
+ /// ```json
+ /// {"role": "user", "content": "Hello, Claude"}
+ /// ```
+ /// ```json
+ /// {"role": "user", "content": [{"type": "text", "text": "Hello, Claude"}]}
+ /// ```
+ /// Starting with Claude 3 models, you can also send image content blocks:
+ /// ```json
+ /// {"role": "user", "content": [
+ /// {
+ /// "type": "image",
+ /// "source": {
+ /// "type": "base64",
+ /// "media_type": "image/jpeg",
+ /// "data": "/9j/4AAQSkZJRg...",
+ /// }
+ /// },
+ /// {"type": "text", "text": "What is in this image?"}
+ /// ]}
+ /// ```
+ /// We currently support the `base64` source type for images, and the `image/jpeg`, `image/png`, `image/gif`, and `image/webp` media types.
+ /// See [examples](https://docs.anthropic.com/en/api/messages-examples#vision) for more input examples.
+ /// Note that if you want to include a [system prompt](https://docs.anthropic.com/en/docs/system-prompts), you can use the top-level `system` parameter — there is no `"system"` role for input messages in the Messages API.
+ ///
+ ///
+ /// The maximum number of tokens to generate before stopping.
+ /// Note that our models may stop _before_ reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate.
+ /// Different models have different maximum values for this parameter. See [models](https://docs.anthropic.com/en/docs/models-overview) for details.
+ /// Example: 1024
+ ///
+ ///
+ /// An object describing metadata about the request.
+ ///
+ ///
+ /// Custom text sequences that will cause the model to stop generating.
+ /// Our models will normally stop when they have naturally completed their turn, which will result in a response `stop_reason` of `"end_turn"`.
+ /// If you want the model to stop generating when it encounters custom strings of text, you can use the `stop_sequences` parameter. If the model encounters one of the custom sequences, the response `stop_reason` value will be `"stop_sequence"` and the response `stop_sequence` value will contain the matched stop sequence.
+ ///
+ ///
+ /// Whether to incrementally stream the response using server-sent events.
+ /// See [streaming](https://docs.anthropic.com/en/api/messages-streaming) for details.
+ ///
+ ///
+ /// System prompt.
+ /// A system prompt is a way of providing context and instructions to Claude, such as specifying a particular goal or role. See our [guide to system prompts](https://docs.anthropic.com/en/docs/system-prompts).
+ /// Example: []
+ ///
+ ///
+ /// Amount of randomness injected into the response.
+ /// Defaults to `1.0`. Ranges from `0.0` to `1.0`. Use `temperature` closer to `0.0` for analytical / multiple choice, and closer to `1.0` for creative and generative tasks.
+ /// Note that even with `temperature` of `0.0`, the results will not be fully deterministic.
+ /// Example: 1
+ ///
+ ///
+ /// How the model should use the provided tools. The model can use a specific tool, any available tool, or decide by itself.
+ ///
+ ///
+ /// Definitions of tools that the model may use.
+ /// If you include `tools` in your API request, the model may return `tool_use` content blocks that represent the model's use of those tools. You can then run those tools using the tool input generated by the model and then optionally return results back to the model using `tool_result` content blocks.
+ /// Each tool definition includes:
+ /// * `name`: Name of the tool.
+ /// * `description`: Optional, but strongly-recommended description of the tool.
+ /// * `input_schema`: [JSON schema](https://json-schema.org/) for the tool `input` shape that the model will produce in `tool_use` output content blocks.
+ /// For example, if you defined `tools` as:
+ /// ```json
+ /// [
+ /// {
+ /// "name": "get_stock_price",
+ /// "description": "Get the current stock price for a given ticker symbol.",
+ /// "input_schema": {
+ /// "type": "object",
+ /// "properties": {
+ /// "ticker": {
+ /// "type": "string",
+ /// "description": "The stock ticker symbol, e.g. AAPL for Apple Inc."
+ /// }
+ /// },
+ /// "required": ["ticker"]
+ /// }
+ /// }
+ /// ]
+ /// ```
+ /// And then asked the model "What's the S&P 500 at today?", the model might produce `tool_use` content blocks in the response like this:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_use",
+ /// "id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "name": "get_stock_price",
+ /// "input": { "ticker": "^GSPC" }
+ /// }
+ /// ]
+ /// ```
+ /// You might then run your `get_stock_price` tool with `{"ticker": "^GSPC"}` as an input, and return the following back to the model in a subsequent `user` message:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_result",
+ /// "tool_use_id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "content": "259.75 USD"
+ /// }
+ /// ]
+ /// ```
+ /// Tools can be used for workflows that include running client-side tools and functions, or more generally whenever you want the model to produce a particular JSON structure of output.
+ /// See our [guide](https://docs.anthropic.com/en/docs/tool-use) for more details.
+ ///
+ ///
+ /// Only sample from the top K options for each subsequent token.
+ /// Used to remove "long tail" low probability responses. [Learn more technical details here](https://towardsdatascience.com/how-to-sample-from-language-models-682bceb97277).
+ /// Recommended for advanced use cases only. You usually only need to use `temperature`.
+ /// Example: 5
+ ///
+ ///
+ /// Use nucleus sampling.
+ /// In nucleus sampling, we compute the cumulative distribution over all the options for each subsequent token in decreasing probability order and cut it off once it reaches a particular probability specified by `top_p`. You should either alter `temperature` or `top_p`, but not both.
+ /// Recommended for advanced use cases only. You usually only need to use `temperature`.
+ /// Example: 0.7
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task MessagesPostAsync(
+ global::Anthropic.Model model,
+ global::System.Collections.Generic.IList messages,
+ int maxTokens,
+ string? anthropicVersion = default,
+ global::Anthropic.Metadata? metadata = default,
+ global::System.Collections.Generic.IList? stopSequences = default,
+ bool? stream = default,
+ global::Anthropic.AnyOf>? system = default,
+ double? temperature = default,
+ global::Anthropic.ToolChoice? toolChoice = default,
+ global::System.Collections.Generic.IList? tools = default,
+ int? topK = default,
+ double? topP = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.PromptCachingBetaMessagesPost.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.PromptCachingBetaMessagesPost.g.cs
new file mode 100644
index 0000000..4ad29a4
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.PromptCachingBetaMessagesPost.g.cs
@@ -0,0 +1,207 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public partial interface IMessagesClient
+ {
+ ///
+ /// Create a Message
+ /// Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
+ /// The Messages API can be used for either single queries or stateless multi-turn conversations.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task PromptCachingBetaMessagesPostAsync(
+ global::Anthropic.PromptCachingBetaCreateMessageParams request,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+
+ ///
+ /// Create a Message
+ /// Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
+ /// The Messages API can be used for either single queries or stateless multi-turn conversations.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The model that will complete your prompt.\n\nSee [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options.
+ ///
+ ///
+ /// Input messages.
+ /// Our models are trained to operate on alternating `user` and `assistant` conversational turns. When creating a new `Message`, you specify the prior conversational turns with the `messages` parameter, and the model then generates the next `Message` in the conversation. Consecutive `user` or `assistant` turns in your request will be combined into a single turn.
+ /// Each input message must be an object with a `role` and `content`. You can specify a single `user`-role message, or you can include multiple `user` and `assistant` messages.
+ /// If the final message uses the `assistant` role, the response content will continue immediately from the content in that message. This can be used to constrain part of the model's response.
+ /// Example with a single `user` message:
+ /// ```json
+ /// [{"role": "user", "content": "Hello, Claude"}]
+ /// ```
+ /// Example with multiple conversational turns:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "Hello there."},
+ /// {"role": "assistant", "content": "Hi, I'm Claude. How can I help you?"},
+ /// {"role": "user", "content": "Can you explain LLMs in plain English?"},
+ /// ]
+ /// ```
+ /// Example with a partially-filled response from Claude:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "What's the Greek name for Sun? (A) Sol (B) Helios (C) Sun"},
+ /// {"role": "assistant", "content": "The best answer is ("},
+ /// ]
+ /// ```
+ /// Each input message `content` may be either a single `string` or an array of content blocks, where each block has a specific `type`. Using a `string` for `content` is shorthand for an array of one content block of type `"text"`. The following input messages are equivalent:
+ /// ```json
+ /// {"role": "user", "content": "Hello, Claude"}
+ /// ```
+ /// ```json
+ /// {"role": "user", "content": [{"type": "text", "text": "Hello, Claude"}]}
+ /// ```
+ /// Starting with Claude 3 models, you can also send image content blocks:
+ /// ```json
+ /// {"role": "user", "content": [
+ /// {
+ /// "type": "image",
+ /// "source": {
+ /// "type": "base64",
+ /// "media_type": "image/jpeg",
+ /// "data": "/9j/4AAQSkZJRg...",
+ /// }
+ /// },
+ /// {"type": "text", "text": "What is in this image?"}
+ /// ]}
+ /// ```
+ /// We currently support the `base64` source type for images, and the `image/jpeg`, `image/png`, `image/gif`, and `image/webp` media types.
+ /// See [examples](https://docs.anthropic.com/en/api/messages-examples#vision) for more input examples.
+ /// Note that if you want to include a [system prompt](https://docs.anthropic.com/en/docs/system-prompts), you can use the top-level `system` parameter — there is no `"system"` role for input messages in the Messages API.
+ ///
+ ///
+ /// The maximum number of tokens to generate before stopping.
+ /// Note that our models may stop _before_ reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate.
+ /// Different models have different maximum values for this parameter. See [models](https://docs.anthropic.com/en/docs/models-overview) for details.
+ /// Example: 1024
+ ///
+ ///
+ /// An object describing metadata about the request.
+ ///
+ ///
+ /// Custom text sequences that will cause the model to stop generating.
+ /// Our models will normally stop when they have naturally completed their turn, which will result in a response `stop_reason` of `"end_turn"`.
+ /// If you want the model to stop generating when it encounters custom strings of text, you can use the `stop_sequences` parameter. If the model encounters one of the custom sequences, the response `stop_reason` value will be `"stop_sequence"` and the response `stop_sequence` value will contain the matched stop sequence.
+ ///
+ ///
+ /// Whether to incrementally stream the response using server-sent events.
+ /// See [streaming](https://docs.anthropic.com/en/api/messages-streaming) for details.
+ ///
+ ///
+ /// System prompt.
+ /// A system prompt is a way of providing context and instructions to Claude, such as specifying a particular goal or role. See our [guide to system prompts](https://docs.anthropic.com/en/docs/system-prompts).
+ /// Example: []
+ ///
+ ///
+ /// Amount of randomness injected into the response.
+ /// Defaults to `1.0`. Ranges from `0.0` to `1.0`. Use `temperature` closer to `0.0` for analytical / multiple choice, and closer to `1.0` for creative and generative tasks.
+ /// Note that even with `temperature` of `0.0`, the results will not be fully deterministic.
+ /// Example: 1
+ ///
+ ///
+ /// How the model should use the provided tools. The model can use a specific tool, any available tool, or decide by itself.
+ ///
+ ///
+ /// Definitions of tools that the model may use.
+ /// If you include `tools` in your API request, the model may return `tool_use` content blocks that represent the model's use of those tools. You can then run those tools using the tool input generated by the model and then optionally return results back to the model using `tool_result` content blocks.
+ /// Each tool definition includes:
+ /// * `name`: Name of the tool.
+ /// * `description`: Optional, but strongly-recommended description of the tool.
+ /// * `input_schema`: [JSON schema](https://json-schema.org/) for the tool `input` shape that the model will produce in `tool_use` output content blocks.
+ /// For example, if you defined `tools` as:
+ /// ```json
+ /// [
+ /// {
+ /// "name": "get_stock_price",
+ /// "description": "Get the current stock price for a given ticker symbol.",
+ /// "input_schema": {
+ /// "type": "object",
+ /// "properties": {
+ /// "ticker": {
+ /// "type": "string",
+ /// "description": "The stock ticker symbol, e.g. AAPL for Apple Inc."
+ /// }
+ /// },
+ /// "required": ["ticker"]
+ /// }
+ /// }
+ /// ]
+ /// ```
+ /// And then asked the model "What's the S&P 500 at today?", the model might produce `tool_use` content blocks in the response like this:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_use",
+ /// "id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "name": "get_stock_price",
+ /// "input": { "ticker": "^GSPC" }
+ /// }
+ /// ]
+ /// ```
+ /// You might then run your `get_stock_price` tool with `{"ticker": "^GSPC"}` as an input, and return the following back to the model in a subsequent `user` message:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_result",
+ /// "tool_use_id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "content": "259.75 USD"
+ /// }
+ /// ]
+ /// ```
+ /// Tools can be used for workflows that include running client-side tools and functions, or more generally whenever you want the model to produce a particular JSON structure of output.
+ /// See our [guide](https://docs.anthropic.com/en/docs/tool-use) for more details.
+ ///
+ ///
+ /// Only sample from the top K options for each subsequent token.
+ /// Used to remove "long tail" low probability responses. [Learn more technical details here](https://towardsdatascience.com/how-to-sample-from-language-models-682bceb97277).
+ /// Recommended for advanced use cases only. You usually only need to use `temperature`.
+ /// Example: 5
+ ///
+ ///
+ /// Use nucleus sampling.
+ /// In nucleus sampling, we compute the cumulative distribution over all the options for each subsequent token in decreasing probability order and cut it off once it reaches a particular probability specified by `top_p`. You should either alter `temperature` or `top_p`, but not both.
+ /// Recommended for advanced use cases only. You usually only need to use `temperature`.
+ /// Example: 0.7
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task PromptCachingBetaMessagesPostAsync(
+ global::Anthropic.Model model,
+ global::System.Collections.Generic.IList messages,
+ int maxTokens,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::Anthropic.Metadata? metadata = default,
+ global::System.Collections.Generic.IList? stopSequences = default,
+ bool? stream = default,
+ global::Anthropic.AnyOf>? system = default,
+ double? temperature = default,
+ global::Anthropic.ToolChoice? toolChoice = default,
+ global::System.Collections.Generic.IList? tools = default,
+ int? topK = default,
+ double? topP = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.g.cs b/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.g.cs
new file mode 100644
index 0000000..dcd9ea4
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.IMessagesClient.g.cs
@@ -0,0 +1,40 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ ///
+ /// If no httpClient is provided, a new one will be created.
+ /// If no baseUri is provided, the default baseUri from OpenAPI spec will be used.
+ ///
+ public partial interface IMessagesClient : global::System.IDisposable
+ {
+ ///
+ /// The HttpClient instance.
+ ///
+ public global::System.Net.Http.HttpClient HttpClient { get; }
+
+ ///
+ /// The base URL for the API.
+ ///
+ public System.Uri? BaseUri { get; }
+
+ ///
+ /// The authorizations to use for the requests.
+ ///
+ public global::System.Collections.Generic.List Authorizations { get; }
+
+ ///
+ /// Gets or sets a value indicating whether the response content should be read as a string.
+ /// True by default in debug builds, false otherwise.
+ ///
+ public bool ReadResponseAsString { get; set; }
+
+ ///
+ ///
+ ///
+ global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; }
+
+
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.ITextCompletionsClient.CompletePost.g.cs b/src/libs/Anthropic/Generated/Anthropic.ITextCompletionsClient.CompletePost.g.cs
new file mode 100644
index 0000000..e1d6ae0
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.ITextCompletionsClient.CompletePost.g.cs
@@ -0,0 +1,98 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public partial interface ITextCompletionsClient
+ {
+ ///
+ /// Create a Text Completion
+ /// [Legacy] Create a Text Completion.
+ /// The Text Completions API is a legacy API. We recommend using the [Messages API](https://docs.anthropic.com/en/api/messages) going forward.
+ /// Future models and features will not be compatible with Text Completions. See our [migration guide](https://docs.anthropic.com/en/api/migrating-from-text-completions-to-messages) for guidance in migrating from Text Completions to Messages.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task CompletePostAsync(
+ global::Anthropic.CompletionRequest request,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+
+ ///
+ /// Create a Text Completion
+ /// [Legacy] Create a Text Completion.
+ /// The Text Completions API is a legacy API. We recommend using the [Messages API](https://docs.anthropic.com/en/api/messages) going forward.
+ /// Future models and features will not be compatible with Text Completions. See our [migration guide](https://docs.anthropic.com/en/api/migrating-from-text-completions-to-messages) for guidance in migrating from Text Completions to Messages.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The model that will complete your prompt.\n\nSee [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options.
+ ///
+ ///
+ /// The prompt that you want Claude to complete.
+ /// For proper response generation you will need to format your prompt using alternating `\n\nHuman:` and `\n\nAssistant:` conversational turns. For example:
+ /// ```
+ /// "\n\nHuman: {userQuestion}\n\nAssistant:"
+ /// ```
+ /// See [prompt validation](https://docs.anthropic.com/en/api/prompt-validation) and our guide to [prompt design](https://docs.anthropic.com/en/docs/intro-to-prompting) for more details.
+ /// Example:
+ /// Human: Hello, world!
+ /// Assistant:
+ ///
+ ///
+ /// The maximum number of tokens to generate before stopping.
+ /// Note that our models may stop _before_ reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate.
+ /// Example: 256
+ ///
+ ///
+ /// Sequences that will cause the model to stop generating.
+ /// Our models stop on `"\n\nHuman:"`, and may include additional built-in stop sequences in the future. By providing the stop_sequences parameter, you may include additional strings that will cause the model to stop generating.
+ ///
+ ///
+ /// Amount of randomness injected into the response.
+ /// Defaults to `1.0`. Ranges from `0.0` to `1.0`. Use `temperature` closer to `0.0` for analytical / multiple choice, and closer to `1.0` for creative and generative tasks.
+ /// Note that even with `temperature` of `0.0`, the results will not be fully deterministic.
+ /// Example: 1
+ ///
+ ///
+ /// Use nucleus sampling.
+ /// In nucleus sampling, we compute the cumulative distribution over all the options for each subsequent token in decreasing probability order and cut it off once it reaches a particular probability specified by `top_p`. You should either alter `temperature` or `top_p`, but not both.
+ /// Recommended for advanced use cases only. You usually only need to use `temperature`.
+ /// Example: 0.7
+ ///
+ ///
+ /// Only sample from the top K options for each subsequent token.
+ /// Used to remove "long tail" low probability responses. [Learn more technical details here](https://towardsdatascience.com/how-to-sample-from-language-models-682bceb97277).
+ /// Recommended for advanced use cases only. You usually only need to use `temperature`.
+ /// Example: 5
+ ///
+ ///
+ /// An object describing metadata about the request.
+ ///
+ ///
+ /// Whether to incrementally stream the response using server-sent events.
+ /// See [streaming](https://docs.anthropic.com/en/api/streaming) for details.
+ ///
+ /// The token to cancel the operation with
+ ///
+ global::System.Threading.Tasks.Task CompletePostAsync(
+ global::Anthropic.Model model,
+ string prompt,
+ int maxTokensToSample,
+ string? anthropicVersion = default,
+ global::System.Collections.Generic.IList? stopSequences = default,
+ double? temperature = default,
+ double? topP = default,
+ int? topK = default,
+ global::Anthropic.Metadata? metadata = default,
+ bool? stream = default,
+ global::System.Threading.CancellationToken cancellationToken = default);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.ITextCompletionsClient.g.cs b/src/libs/Anthropic/Generated/Anthropic.ITextCompletionsClient.g.cs
new file mode 100644
index 0000000..8f11612
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.ITextCompletionsClient.g.cs
@@ -0,0 +1,40 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ ///
+ /// If no httpClient is provided, a new one will be created.
+ /// If no baseUri is provided, the default baseUri from OpenAPI spec will be used.
+ ///
+ public partial interface ITextCompletionsClient : global::System.IDisposable
+ {
+ ///
+ /// The HttpClient instance.
+ ///
+ public global::System.Net.Http.HttpClient HttpClient { get; }
+
+ ///
+ /// The base URL for the API.
+ ///
+ public System.Uri? BaseUri { get; }
+
+ ///
+ /// The authorizations to use for the requests.
+ ///
+ public global::System.Collections.Generic.List Authorizations { get; }
+
+ ///
+ /// Gets or sets a value indicating whether the response content should be read as a string.
+ /// True by default in debug builds, false otherwise.
+ ///
+ public bool ReadResponseAsString { get; set; }
+
+ ///
+ ///
+ ///
+ global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; }
+
+
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesCancel.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesCancel.g.cs
new file mode 100644
index 0000000..730639f
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesCancel.g.cs
@@ -0,0 +1,212 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ public partial class MessageBatchesClient
+ {
+ partial void PrepareBetaMessageBatchesCancelArguments(
+ global::System.Net.Http.HttpClient httpClient,
+ ref string messageBatchId,
+ ref string? anthropicBeta,
+ ref string? anthropicVersion);
+ partial void PrepareBetaMessageBatchesCancelRequest(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpRequestMessage httpRequestMessage,
+ string messageBatchId,
+ string? anthropicBeta,
+ string? anthropicVersion);
+ partial void ProcessBetaMessageBatchesCancelResponse(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage);
+
+ partial void ProcessBetaMessageBatchesCancelResponseContent(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage,
+ ref string content);
+
+ ///
+ /// Cancel a Message Batch
+ /// Batches may be canceled any time before processing ends. Once cancellation is initiated, the batch enters a `canceling` state, at which time the system may complete any in-progress, non-interruptible requests before finalizing cancellation.
+ /// The number of canceled requests is specified in `request_counts`. To determine which requests were canceled, check the individual results within the batch. Note that cancellation may not result in any canceled requests if they were non-interruptible.
+ ///
+ ///
+ /// ID of the Message Batch.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task BetaMessageBatchesCancelAsync(
+ string messageBatchId,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ PrepareArguments(
+ client: HttpClient);
+ PrepareBetaMessageBatchesCancelArguments(
+ httpClient: HttpClient,
+ messageBatchId: ref messageBatchId,
+ anthropicBeta: ref anthropicBeta,
+ anthropicVersion: ref anthropicVersion);
+
+ var __pathBuilder = new PathBuilder(
+ path: $"/v1/messages/batches/{messageBatchId}/cancel?beta=true",
+ baseUri: HttpClient.BaseAddress);
+ var __path = __pathBuilder.ToString();
+ using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
+ method: global::System.Net.Http.HttpMethod.Post,
+ requestUri: new global::System.Uri(__path, global::System.UriKind.RelativeOrAbsolute));
+#if NET6_0_OR_GREATER
+ __httpRequest.Version = global::System.Net.HttpVersion.Version11;
+ __httpRequest.VersionPolicy = global::System.Net.Http.HttpVersionPolicy.RequestVersionOrHigher;
+#endif
+
+ foreach (var __authorization in Authorizations)
+ {
+ if (__authorization.Type == "Http" ||
+ __authorization.Type == "OAuth2")
+ {
+ __httpRequest.Headers.Authorization = new global::System.Net.Http.Headers.AuthenticationHeaderValue(
+ scheme: __authorization.Name,
+ parameter: __authorization.Value);
+ }
+ else if (__authorization.Type == "ApiKey" &&
+ __authorization.Location == "Header")
+ {
+ __httpRequest.Headers.Add(__authorization.Name, __authorization.Value);
+ }
+ }
+
+ if (anthropicBeta != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-beta", anthropicBeta.ToString());
+ }
+ if (anthropicVersion != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-version", anthropicVersion.ToString());
+ }
+
+
+ PrepareRequest(
+ client: HttpClient,
+ request: __httpRequest);
+ PrepareBetaMessageBatchesCancelRequest(
+ httpClient: HttpClient,
+ httpRequestMessage: __httpRequest,
+ messageBatchId: messageBatchId,
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion);
+
+ using var __response = await HttpClient.SendAsync(
+ request: __httpRequest,
+ completionOption: global::System.Net.Http.HttpCompletionOption.ResponseContentRead,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+
+ ProcessResponse(
+ client: HttpClient,
+ response: __response);
+ ProcessBetaMessageBatchesCancelResponse(
+ httpClient: HttpClient,
+ httpResponseMessage: __response);
+ // Error response. See our [errors documentation](https://docs.anthropic.com/en/api/errors) for more details.
+ if ((int)__response.StatusCode >= 400 && (int)__response.StatusCode <= 499)
+ {
+ string? __content_4XX = null;
+ global::Anthropic.BetaErrorResponse? __value_4XX = null;
+ if (ReadResponseAsString)
+ {
+ __content_4XX = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = global::Anthropic.BetaErrorResponse.FromJson(__content_4XX, JsonSerializerContext);
+ }
+ else
+ {
+ var __contentStream_4XX = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = await global::Anthropic.BetaErrorResponse.FromJsonStreamAsync(__contentStream_4XX, JsonSerializerContext).ConfigureAwait(false);
+ }
+
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content_4XX,
+ ResponseObject = __value_4XX,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ if (ReadResponseAsString)
+ {
+ var __content = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+
+ ProcessResponseContent(
+ client: HttpClient,
+ response: __response,
+ content: ref __content);
+ ProcessBetaMessageBatchesCancelResponseContent(
+ httpClient: HttpClient,
+ httpResponseMessage: __response,
+ content: ref __content);
+
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __content ?? __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ return
+ global::Anthropic.BetaMessageBatch.FromJson(__content, JsonSerializerContext) ??
+ throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" ");
+ }
+ else
+ {
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ using var __content = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+
+ return
+ await global::Anthropic.BetaMessageBatch.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ??
+ throw new global::System.InvalidOperationException("Response deserialization failed.");
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.AnthropicClient.RetrieveMessageBatch.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesCancel2.g.cs
similarity index 53%
rename from src/libs/Anthropic/Generated/Anthropic.AnthropicClient.RetrieveMessageBatch.g.cs
rename to src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesCancel2.g.cs
index 7dece9d..66654b5 100644
--- a/src/libs/Anthropic/Generated/Anthropic.AnthropicClient.RetrieveMessageBatch.g.cs
+++ b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesCancel2.g.cs
@@ -3,49 +3,66 @@
namespace Anthropic
{
- public partial class AnthropicClient
+ public partial class MessageBatchesClient
{
- partial void PrepareRetrieveMessageBatchArguments(
+ partial void PrepareBetaMessageBatchesCancel2Arguments(
global::System.Net.Http.HttpClient httpClient,
- ref string id);
- partial void PrepareRetrieveMessageBatchRequest(
+ ref string messageBatchId,
+ ref string? anthropicBeta,
+ ref string? anthropicVersion);
+ partial void PrepareBetaMessageBatchesCancel2Request(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpRequestMessage httpRequestMessage,
- string id);
- partial void ProcessRetrieveMessageBatchResponse(
+ string messageBatchId,
+ string? anthropicBeta,
+ string? anthropicVersion);
+ partial void ProcessBetaMessageBatchesCancel2Response(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpResponseMessage httpResponseMessage);
- partial void ProcessRetrieveMessageBatchResponseContent(
+ partial void ProcessBetaMessageBatchesCancel2ResponseContent(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpResponseMessage httpResponseMessage,
ref string content);
///
- /// Retrieve a Message Batch
- /// This endpoint is idempotent and can be used to poll for Message Batch
- /// completion. To access the results of a Message Batch, make a request to the
- /// `results_url` field in the response.
+ /// Cancel a Message Batch
+ /// Batches may be canceled any time before processing ends. Once cancellation is initiated, the batch enters a `canceling` state, at which time the system may complete any in-progress, non-interruptible requests before finalizing cancellation.
+ /// The number of canceled requests is specified in `request_counts`. To determine which requests were canceled, check the individual results within the batch. Note that cancellation may not result in any canceled requests if they were non-interruptible.
///
- ///
+ ///
+ /// ID of the Message Batch.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
/// The token to cancel the operation with
///
- public async global::System.Threading.Tasks.Task RetrieveMessageBatchAsync(
- string id,
+ public async global::System.Threading.Tasks.Task BetaMessageBatchesCancel2Async(
+ string messageBatchId,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
global::System.Threading.CancellationToken cancellationToken = default)
{
PrepareArguments(
client: HttpClient);
- PrepareRetrieveMessageBatchArguments(
+ PrepareBetaMessageBatchesCancel2Arguments(
httpClient: HttpClient,
- id: ref id);
+ messageBatchId: ref messageBatchId,
+ anthropicBeta: ref anthropicBeta,
+ anthropicVersion: ref anthropicVersion);
var __pathBuilder = new PathBuilder(
- path: $"/messages/batches/{id}",
+ path: $"/v1/messages/batches/{messageBatchId}/cancel",
baseUri: HttpClient.BaseAddress);
var __path = __pathBuilder.ToString();
using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
- method: global::System.Net.Http.HttpMethod.Get,
+ method: global::System.Net.Http.HttpMethod.Post,
requestUri: new global::System.Uri(__path, global::System.UriKind.RelativeOrAbsolute));
#if NET6_0_OR_GREATER
__httpRequest.Version = global::System.Net.HttpVersion.Version11;
@@ -68,13 +85,25 @@ partial void ProcessRetrieveMessageBatchResponseContent(
}
}
+ if (anthropicBeta != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-beta", anthropicBeta.ToString());
+ }
+ if (anthropicVersion != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-version", anthropicVersion.ToString());
+ }
+
+
PrepareRequest(
client: HttpClient,
request: __httpRequest);
- PrepareRetrieveMessageBatchRequest(
+ PrepareBetaMessageBatchesCancel2Request(
httpClient: HttpClient,
httpRequestMessage: __httpRequest,
- id: id);
+ messageBatchId: messageBatchId,
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion);
using var __response = await HttpClient.SendAsync(
request: __httpRequest,
@@ -84,9 +113,37 @@ partial void ProcessRetrieveMessageBatchResponseContent(
ProcessResponse(
client: HttpClient,
response: __response);
- ProcessRetrieveMessageBatchResponse(
+ ProcessBetaMessageBatchesCancel2Response(
httpClient: HttpClient,
httpResponseMessage: __response);
+ // Error response. See our [errors documentation](https://docs.anthropic.com/en/api/errors) for more details.
+ if ((int)__response.StatusCode >= 400 && (int)__response.StatusCode <= 499)
+ {
+ string? __content_4XX = null;
+ global::Anthropic.BetaErrorResponse? __value_4XX = null;
+ if (ReadResponseAsString)
+ {
+ __content_4XX = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = global::Anthropic.BetaErrorResponse.FromJson(__content_4XX, JsonSerializerContext);
+ }
+ else
+ {
+ var __contentStream_4XX = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = await global::Anthropic.BetaErrorResponse.FromJsonStreamAsync(__contentStream_4XX, JsonSerializerContext).ConfigureAwait(false);
+ }
+
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content_4XX,
+ ResponseObject = __value_4XX,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
if (ReadResponseAsString)
{
@@ -96,7 +153,7 @@ partial void ProcessRetrieveMessageBatchResponseContent(
client: HttpClient,
response: __response,
content: ref __content);
- ProcessRetrieveMessageBatchResponseContent(
+ ProcessBetaMessageBatchesCancel2ResponseContent(
httpClient: HttpClient,
httpResponseMessage: __response,
content: ref __content);
@@ -121,7 +178,7 @@ partial void ProcessRetrieveMessageBatchResponseContent(
}
return
- global::Anthropic.MessageBatch.FromJson(__content, JsonSerializerContext) ??
+ global::Anthropic.BetaMessageBatch.FromJson(__content, JsonSerializerContext) ??
throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" ");
}
else
@@ -147,7 +204,7 @@ partial void ProcessRetrieveMessageBatchResponseContent(
using var __content = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
return
- await global::Anthropic.MessageBatch.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ??
+ await global::Anthropic.BetaMessageBatch.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ??
throw new global::System.InvalidOperationException("Response deserialization failed.");
}
}
diff --git a/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesList.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesList.g.cs
new file mode 100644
index 0000000..5a4b0cc
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesList.g.cs
@@ -0,0 +1,247 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ public partial class MessageBatchesClient
+ {
+ partial void PrepareBetaMessageBatchesListArguments(
+ global::System.Net.Http.HttpClient httpClient,
+ ref string? beforeId,
+ ref string? afterId,
+ ref int? limit,
+ ref string? anthropicBeta,
+ ref string? anthropicVersion,
+ ref string? xApiKey);
+ partial void PrepareBetaMessageBatchesListRequest(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpRequestMessage httpRequestMessage,
+ string? beforeId,
+ string? afterId,
+ int? limit,
+ string? anthropicBeta,
+ string? anthropicVersion,
+ string? xApiKey);
+ partial void ProcessBetaMessageBatchesListResponse(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage);
+
+ partial void ProcessBetaMessageBatchesListResponseContent(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage,
+ ref string content);
+
+ ///
+ /// List Message Batches
+ /// List all Message Batches within a Workspace. Most recently created batches are returned first.
+ ///
+ ///
+ /// ID of the object to use as a cursor for pagination. When provided, returns the page of results immediately before this object.
+ ///
+ ///
+ /// ID of the object to use as a cursor for pagination. When provided, returns the page of results immediately after this object.
+ ///
+ ///
+ /// Number of items to return per page.
+ /// Defaults to `20`. Ranges from `1` to `100`.
+ /// Default Value: 20
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// Your unique API key for authentication.
+ /// This key is required in the header of all API requests, to authenticate your account and access Anthropic's services. Get your API key through the [Console](https://console.anthropic.com/settings/keys). Each key is scoped to a Workspace.
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task BetaMessageBatchesListAsync(
+ string? beforeId = default,
+ string? afterId = default,
+ int? limit = default,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ string? xApiKey = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ PrepareArguments(
+ client: HttpClient);
+ PrepareBetaMessageBatchesListArguments(
+ httpClient: HttpClient,
+ beforeId: ref beforeId,
+ afterId: ref afterId,
+ limit: ref limit,
+ anthropicBeta: ref anthropicBeta,
+ anthropicVersion: ref anthropicVersion,
+ xApiKey: ref xApiKey);
+
+ var __pathBuilder = new PathBuilder(
+ path: "/v1/messages/batches?beta=true",
+ baseUri: HttpClient.BaseAddress);
+ __pathBuilder
+ .AddOptionalParameter("before_id", beforeId)
+ .AddOptionalParameter("after_id", afterId)
+ .AddOptionalParameter("limit", limit?.ToString())
+ ;
+ var __path = __pathBuilder.ToString();
+ using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
+ method: global::System.Net.Http.HttpMethod.Get,
+ requestUri: new global::System.Uri(__path, global::System.UriKind.RelativeOrAbsolute));
+#if NET6_0_OR_GREATER
+ __httpRequest.Version = global::System.Net.HttpVersion.Version11;
+ __httpRequest.VersionPolicy = global::System.Net.Http.HttpVersionPolicy.RequestVersionOrHigher;
+#endif
+
+ foreach (var __authorization in Authorizations)
+ {
+ if (__authorization.Type == "Http" ||
+ __authorization.Type == "OAuth2")
+ {
+ __httpRequest.Headers.Authorization = new global::System.Net.Http.Headers.AuthenticationHeaderValue(
+ scheme: __authorization.Name,
+ parameter: __authorization.Value);
+ }
+ else if (__authorization.Type == "ApiKey" &&
+ __authorization.Location == "Header")
+ {
+ __httpRequest.Headers.Add(__authorization.Name, __authorization.Value);
+ }
+ }
+
+ if (anthropicBeta != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-beta", anthropicBeta.ToString());
+ }
+ if (anthropicVersion != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-version", anthropicVersion.ToString());
+ }
+ if (xApiKey != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("x-api-key", xApiKey.ToString());
+ }
+
+
+ PrepareRequest(
+ client: HttpClient,
+ request: __httpRequest);
+ PrepareBetaMessageBatchesListRequest(
+ httpClient: HttpClient,
+ httpRequestMessage: __httpRequest,
+ beforeId: beforeId,
+ afterId: afterId,
+ limit: limit,
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
+ xApiKey: xApiKey);
+
+ using var __response = await HttpClient.SendAsync(
+ request: __httpRequest,
+ completionOption: global::System.Net.Http.HttpCompletionOption.ResponseContentRead,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+
+ ProcessResponse(
+ client: HttpClient,
+ response: __response);
+ ProcessBetaMessageBatchesListResponse(
+ httpClient: HttpClient,
+ httpResponseMessage: __response);
+ // Error response. See our [errors documentation](https://docs.anthropic.com/en/api/errors) for more details.
+ if ((int)__response.StatusCode >= 400 && (int)__response.StatusCode <= 499)
+ {
+ string? __content_4XX = null;
+ global::Anthropic.BetaErrorResponse? __value_4XX = null;
+ if (ReadResponseAsString)
+ {
+ __content_4XX = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = global::Anthropic.BetaErrorResponse.FromJson(__content_4XX, JsonSerializerContext);
+ }
+ else
+ {
+ var __contentStream_4XX = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = await global::Anthropic.BetaErrorResponse.FromJsonStreamAsync(__contentStream_4XX, JsonSerializerContext).ConfigureAwait(false);
+ }
+
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content_4XX,
+ ResponseObject = __value_4XX,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ if (ReadResponseAsString)
+ {
+ var __content = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+
+ ProcessResponseContent(
+ client: HttpClient,
+ response: __response,
+ content: ref __content);
+ ProcessBetaMessageBatchesListResponseContent(
+ httpClient: HttpClient,
+ httpResponseMessage: __response,
+ content: ref __content);
+
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __content ?? __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ return
+ global::Anthropic.BetaListResponseMessageBatch.FromJson(__content, JsonSerializerContext) ??
+ throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" ");
+ }
+ else
+ {
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ using var __content = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+
+ return
+ await global::Anthropic.BetaListResponseMessageBatch.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ??
+ throw new global::System.InvalidOperationException("Response deserialization failed.");
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesList2.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesList2.g.cs
new file mode 100644
index 0000000..b67270a
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesList2.g.cs
@@ -0,0 +1,247 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ public partial class MessageBatchesClient
+ {
+ partial void PrepareBetaMessageBatchesList2Arguments(
+ global::System.Net.Http.HttpClient httpClient,
+ ref string? beforeId,
+ ref string? afterId,
+ ref int? limit,
+ ref string? anthropicBeta,
+ ref string? anthropicVersion,
+ ref string? xApiKey);
+ partial void PrepareBetaMessageBatchesList2Request(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpRequestMessage httpRequestMessage,
+ string? beforeId,
+ string? afterId,
+ int? limit,
+ string? anthropicBeta,
+ string? anthropicVersion,
+ string? xApiKey);
+ partial void ProcessBetaMessageBatchesList2Response(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage);
+
+ partial void ProcessBetaMessageBatchesList2ResponseContent(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage,
+ ref string content);
+
+ ///
+ /// List Message Batches
+ /// List all Message Batches within a Workspace. Most recently created batches are returned first.
+ ///
+ ///
+ /// ID of the object to use as a cursor for pagination. When provided, returns the page of results immediately before this object.
+ ///
+ ///
+ /// ID of the object to use as a cursor for pagination. When provided, returns the page of results immediately after this object.
+ ///
+ ///
+ /// Number of items to return per page.
+ /// Defaults to `20`. Ranges from `1` to `100`.
+ /// Default Value: 20
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// Your unique API key for authentication.
+ /// This key is required in the header of all API requests, to authenticate your account and access Anthropic's services. Get your API key through the [Console](https://console.anthropic.com/settings/keys). Each key is scoped to a Workspace.
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task BetaMessageBatchesList2Async(
+ string? beforeId = default,
+ string? afterId = default,
+ int? limit = default,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ string? xApiKey = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ PrepareArguments(
+ client: HttpClient);
+ PrepareBetaMessageBatchesList2Arguments(
+ httpClient: HttpClient,
+ beforeId: ref beforeId,
+ afterId: ref afterId,
+ limit: ref limit,
+ anthropicBeta: ref anthropicBeta,
+ anthropicVersion: ref anthropicVersion,
+ xApiKey: ref xApiKey);
+
+ var __pathBuilder = new PathBuilder(
+ path: "/v1/messages/batches",
+ baseUri: HttpClient.BaseAddress);
+ __pathBuilder
+ .AddOptionalParameter("before_id", beforeId)
+ .AddOptionalParameter("after_id", afterId)
+ .AddOptionalParameter("limit", limit?.ToString())
+ ;
+ var __path = __pathBuilder.ToString();
+ using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
+ method: global::System.Net.Http.HttpMethod.Get,
+ requestUri: new global::System.Uri(__path, global::System.UriKind.RelativeOrAbsolute));
+#if NET6_0_OR_GREATER
+ __httpRequest.Version = global::System.Net.HttpVersion.Version11;
+ __httpRequest.VersionPolicy = global::System.Net.Http.HttpVersionPolicy.RequestVersionOrHigher;
+#endif
+
+ foreach (var __authorization in Authorizations)
+ {
+ if (__authorization.Type == "Http" ||
+ __authorization.Type == "OAuth2")
+ {
+ __httpRequest.Headers.Authorization = new global::System.Net.Http.Headers.AuthenticationHeaderValue(
+ scheme: __authorization.Name,
+ parameter: __authorization.Value);
+ }
+ else if (__authorization.Type == "ApiKey" &&
+ __authorization.Location == "Header")
+ {
+ __httpRequest.Headers.Add(__authorization.Name, __authorization.Value);
+ }
+ }
+
+ if (anthropicBeta != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-beta", anthropicBeta.ToString());
+ }
+ if (anthropicVersion != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-version", anthropicVersion.ToString());
+ }
+ if (xApiKey != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("x-api-key", xApiKey.ToString());
+ }
+
+
+ PrepareRequest(
+ client: HttpClient,
+ request: __httpRequest);
+ PrepareBetaMessageBatchesList2Request(
+ httpClient: HttpClient,
+ httpRequestMessage: __httpRequest,
+ beforeId: beforeId,
+ afterId: afterId,
+ limit: limit,
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
+ xApiKey: xApiKey);
+
+ using var __response = await HttpClient.SendAsync(
+ request: __httpRequest,
+ completionOption: global::System.Net.Http.HttpCompletionOption.ResponseContentRead,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+
+ ProcessResponse(
+ client: HttpClient,
+ response: __response);
+ ProcessBetaMessageBatchesList2Response(
+ httpClient: HttpClient,
+ httpResponseMessage: __response);
+ // Error response. See our [errors documentation](https://docs.anthropic.com/en/api/errors) for more details.
+ if ((int)__response.StatusCode >= 400 && (int)__response.StatusCode <= 499)
+ {
+ string? __content_4XX = null;
+ global::Anthropic.BetaErrorResponse? __value_4XX = null;
+ if (ReadResponseAsString)
+ {
+ __content_4XX = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = global::Anthropic.BetaErrorResponse.FromJson(__content_4XX, JsonSerializerContext);
+ }
+ else
+ {
+ var __contentStream_4XX = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = await global::Anthropic.BetaErrorResponse.FromJsonStreamAsync(__contentStream_4XX, JsonSerializerContext).ConfigureAwait(false);
+ }
+
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content_4XX,
+ ResponseObject = __value_4XX,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ if (ReadResponseAsString)
+ {
+ var __content = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+
+ ProcessResponseContent(
+ client: HttpClient,
+ response: __response,
+ content: ref __content);
+ ProcessBetaMessageBatchesList2ResponseContent(
+ httpClient: HttpClient,
+ httpResponseMessage: __response,
+ content: ref __content);
+
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __content ?? __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ return
+ global::Anthropic.BetaListResponseMessageBatch.FromJson(__content, JsonSerializerContext) ??
+ throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" ");
+ }
+ else
+ {
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ using var __content = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+
+ return
+ await global::Anthropic.BetaListResponseMessageBatch.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ??
+ throw new global::System.InvalidOperationException("Response deserialization failed.");
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.AnthropicClient.CreateMessageBatch.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesPost.g.cs
similarity index 55%
rename from src/libs/Anthropic/Generated/Anthropic.AnthropicClient.CreateMessageBatch.g.cs
rename to src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesPost.g.cs
index 4235691..6e09d23 100644
--- a/src/libs/Anthropic/Generated/Anthropic.AnthropicClient.CreateMessageBatch.g.cs
+++ b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesPost.g.cs
@@ -3,45 +3,62 @@
namespace Anthropic
{
- public partial class AnthropicClient
+ public partial class MessageBatchesClient
{
- partial void PrepareCreateMessageBatchArguments(
+ partial void PrepareBetaMessageBatchesPostArguments(
global::System.Net.Http.HttpClient httpClient,
- global::Anthropic.CreateMessageBatchRequest request);
- partial void PrepareCreateMessageBatchRequest(
+ ref string? anthropicBeta,
+ ref string? anthropicVersion,
+ global::Anthropic.BetaCreateMessageBatchParams request);
+ partial void PrepareBetaMessageBatchesPostRequest(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpRequestMessage httpRequestMessage,
- global::Anthropic.CreateMessageBatchRequest request);
- partial void ProcessCreateMessageBatchResponse(
+ string? anthropicBeta,
+ string? anthropicVersion,
+ global::Anthropic.BetaCreateMessageBatchParams request);
+ partial void ProcessBetaMessageBatchesPostResponse(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpResponseMessage httpResponseMessage);
- partial void ProcessCreateMessageBatchResponseContent(
+ partial void ProcessBetaMessageBatchesPostResponseContent(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpResponseMessage httpResponseMessage,
ref string content);
///
/// Create a Message Batch
- /// Send a batch of Message creation requests.
+ /// Send a batch of Message creation requests.
+ /// The Message Batches API can be used to process multiple Messages API requests at once. Once a Message Batch is created, it begins processing immediately. Batches can take up to 24 hours to complete.
///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
///
/// The token to cancel the operation with
///
- public async global::System.Threading.Tasks.Task CreateMessageBatchAsync(
- global::Anthropic.CreateMessageBatchRequest request,
+ public async global::System.Threading.Tasks.Task BetaMessageBatchesPostAsync(
+ global::Anthropic.BetaCreateMessageBatchParams request,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
global::System.Threading.CancellationToken cancellationToken = default)
{
request = request ?? throw new global::System.ArgumentNullException(nameof(request));
PrepareArguments(
client: HttpClient);
- PrepareCreateMessageBatchArguments(
+ PrepareBetaMessageBatchesPostArguments(
httpClient: HttpClient,
+ anthropicBeta: ref anthropicBeta,
+ anthropicVersion: ref anthropicVersion,
request: request);
var __pathBuilder = new PathBuilder(
- path: "/messages/batches",
+ path: "/v1/messages/batches?beta=true",
baseUri: HttpClient.BaseAddress);
var __path = __pathBuilder.ToString();
using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
@@ -67,6 +84,16 @@ partial void ProcessCreateMessageBatchResponseContent(
__httpRequest.Headers.Add(__authorization.Name, __authorization.Value);
}
}
+
+ if (anthropicBeta != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-beta", anthropicBeta.ToString());
+ }
+ if (anthropicVersion != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-version", anthropicVersion.ToString());
+ }
+
var __httpRequestContentBody = request.ToJson(JsonSerializerContext);
var __httpRequestContent = new global::System.Net.Http.StringContent(
content: __httpRequestContentBody,
@@ -77,9 +104,11 @@ partial void ProcessCreateMessageBatchResponseContent(
PrepareRequest(
client: HttpClient,
request: __httpRequest);
- PrepareCreateMessageBatchRequest(
+ PrepareBetaMessageBatchesPostRequest(
httpClient: HttpClient,
httpRequestMessage: __httpRequest,
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
request: request);
using var __response = await HttpClient.SendAsync(
@@ -90,9 +119,37 @@ partial void ProcessCreateMessageBatchResponseContent(
ProcessResponse(
client: HttpClient,
response: __response);
- ProcessCreateMessageBatchResponse(
+ ProcessBetaMessageBatchesPostResponse(
httpClient: HttpClient,
httpResponseMessage: __response);
+ // Error response. See our [errors documentation](https://docs.anthropic.com/en/api/errors) for more details.
+ if ((int)__response.StatusCode >= 400 && (int)__response.StatusCode <= 499)
+ {
+ string? __content_4XX = null;
+ global::Anthropic.BetaErrorResponse? __value_4XX = null;
+ if (ReadResponseAsString)
+ {
+ __content_4XX = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = global::Anthropic.BetaErrorResponse.FromJson(__content_4XX, JsonSerializerContext);
+ }
+ else
+ {
+ var __contentStream_4XX = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = await global::Anthropic.BetaErrorResponse.FromJsonStreamAsync(__contentStream_4XX, JsonSerializerContext).ConfigureAwait(false);
+ }
+
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content_4XX,
+ ResponseObject = __value_4XX,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
if (ReadResponseAsString)
{
@@ -102,7 +159,7 @@ partial void ProcessCreateMessageBatchResponseContent(
client: HttpClient,
response: __response,
content: ref __content);
- ProcessCreateMessageBatchResponseContent(
+ ProcessBetaMessageBatchesPostResponseContent(
httpClient: HttpClient,
httpResponseMessage: __response,
content: ref __content);
@@ -127,7 +184,7 @@ partial void ProcessCreateMessageBatchResponseContent(
}
return
- global::Anthropic.MessageBatch.FromJson(__content, JsonSerializerContext) ??
+ global::Anthropic.BetaMessageBatch.FromJson(__content, JsonSerializerContext) ??
throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" ");
}
else
@@ -153,30 +210,43 @@ partial void ProcessCreateMessageBatchResponseContent(
using var __content = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
return
- await global::Anthropic.MessageBatch.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ??
+ await global::Anthropic.BetaMessageBatch.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ??
throw new global::System.InvalidOperationException("Response deserialization failed.");
}
}
///
/// Create a Message Batch
- /// Send a batch of Message creation requests.
+ /// Send a batch of Message creation requests.
+ /// The Message Batches API can be used to process multiple Messages API requests at once. Once a Message Batch is created, it begins processing immediately. Batches can take up to 24 hours to complete.
///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
///
/// List of requests for prompt completion. Each is an individual request to create a Message.
///
/// The token to cancel the operation with
///
- public async global::System.Threading.Tasks.Task CreateMessageBatchAsync(
- global::System.Collections.Generic.IList requests,
+ public async global::System.Threading.Tasks.Task BetaMessageBatchesPostAsync(
+ global::System.Collections.Generic.IList requests,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
global::System.Threading.CancellationToken cancellationToken = default)
{
- var __request = new global::Anthropic.CreateMessageBatchRequest
+ var __request = new global::Anthropic.BetaCreateMessageBatchParams
{
Requests = requests,
};
- return await CreateMessageBatchAsync(
+ return await BetaMessageBatchesPostAsync(
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
request: __request,
cancellationToken: cancellationToken).ConfigureAwait(false);
}
diff --git a/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesPost2.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesPost2.g.cs
new file mode 100644
index 0000000..c9f5927
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesPost2.g.cs
@@ -0,0 +1,254 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ public partial class MessageBatchesClient
+ {
+ partial void PrepareBetaMessageBatchesPost2Arguments(
+ global::System.Net.Http.HttpClient httpClient,
+ ref string? anthropicBeta,
+ ref string? anthropicVersion,
+ global::Anthropic.BetaCreateMessageBatchParams request);
+ partial void PrepareBetaMessageBatchesPost2Request(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpRequestMessage httpRequestMessage,
+ string? anthropicBeta,
+ string? anthropicVersion,
+ global::Anthropic.BetaCreateMessageBatchParams request);
+ partial void ProcessBetaMessageBatchesPost2Response(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage);
+
+ partial void ProcessBetaMessageBatchesPost2ResponseContent(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage,
+ ref string content);
+
+ ///
+ /// Create a Message Batch
+ /// Send a batch of Message creation requests.
+ /// The Message Batches API can be used to process multiple Messages API requests at once. Once a Message Batch is created, it begins processing immediately. Batches can take up to 24 hours to complete.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task BetaMessageBatchesPost2Async(
+ global::Anthropic.BetaCreateMessageBatchParams request,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ request = request ?? throw new global::System.ArgumentNullException(nameof(request));
+
+ PrepareArguments(
+ client: HttpClient);
+ PrepareBetaMessageBatchesPost2Arguments(
+ httpClient: HttpClient,
+ anthropicBeta: ref anthropicBeta,
+ anthropicVersion: ref anthropicVersion,
+ request: request);
+
+ var __pathBuilder = new PathBuilder(
+ path: "/v1/messages/batches",
+ baseUri: HttpClient.BaseAddress);
+ var __path = __pathBuilder.ToString();
+ using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
+ method: global::System.Net.Http.HttpMethod.Post,
+ requestUri: new global::System.Uri(__path, global::System.UriKind.RelativeOrAbsolute));
+#if NET6_0_OR_GREATER
+ __httpRequest.Version = global::System.Net.HttpVersion.Version11;
+ __httpRequest.VersionPolicy = global::System.Net.Http.HttpVersionPolicy.RequestVersionOrHigher;
+#endif
+
+ foreach (var __authorization in Authorizations)
+ {
+ if (__authorization.Type == "Http" ||
+ __authorization.Type == "OAuth2")
+ {
+ __httpRequest.Headers.Authorization = new global::System.Net.Http.Headers.AuthenticationHeaderValue(
+ scheme: __authorization.Name,
+ parameter: __authorization.Value);
+ }
+ else if (__authorization.Type == "ApiKey" &&
+ __authorization.Location == "Header")
+ {
+ __httpRequest.Headers.Add(__authorization.Name, __authorization.Value);
+ }
+ }
+
+ if (anthropicBeta != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-beta", anthropicBeta.ToString());
+ }
+ if (anthropicVersion != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-version", anthropicVersion.ToString());
+ }
+
+ var __httpRequestContentBody = request.ToJson(JsonSerializerContext);
+ var __httpRequestContent = new global::System.Net.Http.StringContent(
+ content: __httpRequestContentBody,
+ encoding: global::System.Text.Encoding.UTF8,
+ mediaType: "application/json");
+ __httpRequest.Content = __httpRequestContent;
+
+ PrepareRequest(
+ client: HttpClient,
+ request: __httpRequest);
+ PrepareBetaMessageBatchesPost2Request(
+ httpClient: HttpClient,
+ httpRequestMessage: __httpRequest,
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
+ request: request);
+
+ using var __response = await HttpClient.SendAsync(
+ request: __httpRequest,
+ completionOption: global::System.Net.Http.HttpCompletionOption.ResponseContentRead,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+
+ ProcessResponse(
+ client: HttpClient,
+ response: __response);
+ ProcessBetaMessageBatchesPost2Response(
+ httpClient: HttpClient,
+ httpResponseMessage: __response);
+ // Error response. See our [errors documentation](https://docs.anthropic.com/en/api/errors) for more details.
+ if ((int)__response.StatusCode >= 400 && (int)__response.StatusCode <= 499)
+ {
+ string? __content_4XX = null;
+ global::Anthropic.BetaErrorResponse? __value_4XX = null;
+ if (ReadResponseAsString)
+ {
+ __content_4XX = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = global::Anthropic.BetaErrorResponse.FromJson(__content_4XX, JsonSerializerContext);
+ }
+ else
+ {
+ var __contentStream_4XX = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = await global::Anthropic.BetaErrorResponse.FromJsonStreamAsync(__contentStream_4XX, JsonSerializerContext).ConfigureAwait(false);
+ }
+
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content_4XX,
+ ResponseObject = __value_4XX,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ if (ReadResponseAsString)
+ {
+ var __content = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+
+ ProcessResponseContent(
+ client: HttpClient,
+ response: __response,
+ content: ref __content);
+ ProcessBetaMessageBatchesPost2ResponseContent(
+ httpClient: HttpClient,
+ httpResponseMessage: __response,
+ content: ref __content);
+
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __content ?? __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ return
+ global::Anthropic.BetaMessageBatch.FromJson(__content, JsonSerializerContext) ??
+ throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" ");
+ }
+ else
+ {
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ using var __content = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+
+ return
+ await global::Anthropic.BetaMessageBatch.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ??
+ throw new global::System.InvalidOperationException("Response deserialization failed.");
+ }
+ }
+
+ ///
+ /// Create a Message Batch
+ /// Send a batch of Message creation requests.
+ /// The Message Batches API can be used to process multiple Messages API requests at once. Once a Message Batch is created, it begins processing immediately. Batches can take up to 24 hours to complete.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// List of requests for prompt completion. Each is an individual request to create a Message.
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task BetaMessageBatchesPost2Async(
+ global::System.Collections.Generic.IList requests,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ var __request = new global::Anthropic.BetaCreateMessageBatchParams
+ {
+ Requests = requests,
+ };
+
+ return await BetaMessageBatchesPost2Async(
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
+ request: __request,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesResults.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesResults.g.cs
new file mode 100644
index 0000000..6386a06
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesResults.g.cs
@@ -0,0 +1,225 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ public partial class MessageBatchesClient
+ {
+ partial void PrepareBetaMessageBatchesResultsArguments(
+ global::System.Net.Http.HttpClient httpClient,
+ ref string messageBatchId,
+ ref string? anthropicBeta,
+ ref string? anthropicVersion,
+ ref string? xApiKey);
+ partial void PrepareBetaMessageBatchesResultsRequest(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpRequestMessage httpRequestMessage,
+ string messageBatchId,
+ string? anthropicBeta,
+ string? anthropicVersion,
+ string? xApiKey);
+ partial void ProcessBetaMessageBatchesResultsResponse(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage);
+
+ partial void ProcessBetaMessageBatchesResultsResponseContent(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage,
+ ref string content);
+
+ ///
+ /// Retrieve Message Batch results
+ /// Streams the results of a Message Batch as a `.jsonl` file.
+ /// Each line in the file is a JSON object containing the result of a single request in the Message Batch. Results are not guaranteed to be in the same order as requests. Use the `custom_id` field to match results to requests.
+ ///
+ ///
+ /// ID of the Message Batch.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// Your unique API key for authentication.
+ /// This key is required in the header of all API requests, to authenticate your account and access Anthropic's services. Get your API key through the [Console](https://console.anthropic.com/settings/keys). Each key is scoped to a Workspace.
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task BetaMessageBatchesResultsAsync(
+ string messageBatchId,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ string? xApiKey = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ PrepareArguments(
+ client: HttpClient);
+ PrepareBetaMessageBatchesResultsArguments(
+ httpClient: HttpClient,
+ messageBatchId: ref messageBatchId,
+ anthropicBeta: ref anthropicBeta,
+ anthropicVersion: ref anthropicVersion,
+ xApiKey: ref xApiKey);
+
+ var __pathBuilder = new PathBuilder(
+ path: $"/v1/messages/batches/{messageBatchId}/results?beta=true",
+ baseUri: HttpClient.BaseAddress);
+ var __path = __pathBuilder.ToString();
+ using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
+ method: global::System.Net.Http.HttpMethod.Get,
+ requestUri: new global::System.Uri(__path, global::System.UriKind.RelativeOrAbsolute));
+#if NET6_0_OR_GREATER
+ __httpRequest.Version = global::System.Net.HttpVersion.Version11;
+ __httpRequest.VersionPolicy = global::System.Net.Http.HttpVersionPolicy.RequestVersionOrHigher;
+#endif
+
+ foreach (var __authorization in Authorizations)
+ {
+ if (__authorization.Type == "Http" ||
+ __authorization.Type == "OAuth2")
+ {
+ __httpRequest.Headers.Authorization = new global::System.Net.Http.Headers.AuthenticationHeaderValue(
+ scheme: __authorization.Name,
+ parameter: __authorization.Value);
+ }
+ else if (__authorization.Type == "ApiKey" &&
+ __authorization.Location == "Header")
+ {
+ __httpRequest.Headers.Add(__authorization.Name, __authorization.Value);
+ }
+ }
+
+ if (anthropicBeta != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-beta", anthropicBeta.ToString());
+ }
+ if (anthropicVersion != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-version", anthropicVersion.ToString());
+ }
+ if (xApiKey != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("x-api-key", xApiKey.ToString());
+ }
+
+
+ PrepareRequest(
+ client: HttpClient,
+ request: __httpRequest);
+ PrepareBetaMessageBatchesResultsRequest(
+ httpClient: HttpClient,
+ httpRequestMessage: __httpRequest,
+ messageBatchId: messageBatchId,
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
+ xApiKey: xApiKey);
+
+ using var __response = await HttpClient.SendAsync(
+ request: __httpRequest,
+ completionOption: global::System.Net.Http.HttpCompletionOption.ResponseContentRead,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+
+ ProcessResponse(
+ client: HttpClient,
+ response: __response);
+ ProcessBetaMessageBatchesResultsResponse(
+ httpClient: HttpClient,
+ httpResponseMessage: __response);
+ // Error response. See our [errors documentation](https://docs.anthropic.com/en/api/errors) for more details.
+ if ((int)__response.StatusCode >= 400 && (int)__response.StatusCode <= 499)
+ {
+ string? __content_4XX = null;
+ global::Anthropic.BetaErrorResponse? __value_4XX = null;
+ if (ReadResponseAsString)
+ {
+ __content_4XX = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = global::Anthropic.BetaErrorResponse.FromJson(__content_4XX, JsonSerializerContext);
+ }
+ else
+ {
+ var __contentStream_4XX = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = await global::Anthropic.BetaErrorResponse.FromJsonStreamAsync(__contentStream_4XX, JsonSerializerContext).ConfigureAwait(false);
+ }
+
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content_4XX,
+ ResponseObject = __value_4XX,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ if (ReadResponseAsString)
+ {
+ var __content = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+
+ ProcessResponseContent(
+ client: HttpClient,
+ response: __response,
+ content: ref __content);
+ ProcessBetaMessageBatchesResultsResponseContent(
+ httpClient: HttpClient,
+ httpResponseMessage: __response,
+ content: ref __content);
+
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __content ?? __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ return
+ global::System.Text.Json.JsonSerializer.Deserialize(__content, typeof(byte[]), JsonSerializerContext) as byte[] ??
+ throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" ");
+ }
+ else
+ {
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ using var __content = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+
+ return
+ await global::System.Text.Json.JsonSerializer.DeserializeAsync(__content, typeof(byte[]), JsonSerializerContext).ConfigureAwait(false) as byte[] ??
+ throw new global::System.InvalidOperationException("Response deserialization failed.");
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesResults2.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesResults2.g.cs
new file mode 100644
index 0000000..5ccc176
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesResults2.g.cs
@@ -0,0 +1,225 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ public partial class MessageBatchesClient
+ {
+ partial void PrepareBetaMessageBatchesResults2Arguments(
+ global::System.Net.Http.HttpClient httpClient,
+ ref string messageBatchId,
+ ref string? anthropicBeta,
+ ref string? anthropicVersion,
+ ref string? xApiKey);
+ partial void PrepareBetaMessageBatchesResults2Request(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpRequestMessage httpRequestMessage,
+ string messageBatchId,
+ string? anthropicBeta,
+ string? anthropicVersion,
+ string? xApiKey);
+ partial void ProcessBetaMessageBatchesResults2Response(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage);
+
+ partial void ProcessBetaMessageBatchesResults2ResponseContent(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage,
+ ref string content);
+
+ ///
+ /// Retrieve Message Batch results
+ /// Streams the results of a Message Batch as a `.jsonl` file.
+ /// Each line in the file is a JSON object containing the result of a single request in the Message Batch. Results are not guaranteed to be in the same order as requests. Use the `custom_id` field to match results to requests.
+ ///
+ ///
+ /// ID of the Message Batch.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// Your unique API key for authentication.
+ /// This key is required in the header of all API requests, to authenticate your account and access Anthropic's services. Get your API key through the [Console](https://console.anthropic.com/settings/keys). Each key is scoped to a Workspace.
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task BetaMessageBatchesResults2Async(
+ string messageBatchId,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ string? xApiKey = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ PrepareArguments(
+ client: HttpClient);
+ PrepareBetaMessageBatchesResults2Arguments(
+ httpClient: HttpClient,
+ messageBatchId: ref messageBatchId,
+ anthropicBeta: ref anthropicBeta,
+ anthropicVersion: ref anthropicVersion,
+ xApiKey: ref xApiKey);
+
+ var __pathBuilder = new PathBuilder(
+ path: $"/v1/messages/batches/{messageBatchId}/results",
+ baseUri: HttpClient.BaseAddress);
+ var __path = __pathBuilder.ToString();
+ using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
+ method: global::System.Net.Http.HttpMethod.Get,
+ requestUri: new global::System.Uri(__path, global::System.UriKind.RelativeOrAbsolute));
+#if NET6_0_OR_GREATER
+ __httpRequest.Version = global::System.Net.HttpVersion.Version11;
+ __httpRequest.VersionPolicy = global::System.Net.Http.HttpVersionPolicy.RequestVersionOrHigher;
+#endif
+
+ foreach (var __authorization in Authorizations)
+ {
+ if (__authorization.Type == "Http" ||
+ __authorization.Type == "OAuth2")
+ {
+ __httpRequest.Headers.Authorization = new global::System.Net.Http.Headers.AuthenticationHeaderValue(
+ scheme: __authorization.Name,
+ parameter: __authorization.Value);
+ }
+ else if (__authorization.Type == "ApiKey" &&
+ __authorization.Location == "Header")
+ {
+ __httpRequest.Headers.Add(__authorization.Name, __authorization.Value);
+ }
+ }
+
+ if (anthropicBeta != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-beta", anthropicBeta.ToString());
+ }
+ if (anthropicVersion != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-version", anthropicVersion.ToString());
+ }
+ if (xApiKey != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("x-api-key", xApiKey.ToString());
+ }
+
+
+ PrepareRequest(
+ client: HttpClient,
+ request: __httpRequest);
+ PrepareBetaMessageBatchesResults2Request(
+ httpClient: HttpClient,
+ httpRequestMessage: __httpRequest,
+ messageBatchId: messageBatchId,
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
+ xApiKey: xApiKey);
+
+ using var __response = await HttpClient.SendAsync(
+ request: __httpRequest,
+ completionOption: global::System.Net.Http.HttpCompletionOption.ResponseContentRead,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+
+ ProcessResponse(
+ client: HttpClient,
+ response: __response);
+ ProcessBetaMessageBatchesResults2Response(
+ httpClient: HttpClient,
+ httpResponseMessage: __response);
+ // Error response. See our [errors documentation](https://docs.anthropic.com/en/api/errors) for more details.
+ if ((int)__response.StatusCode >= 400 && (int)__response.StatusCode <= 499)
+ {
+ string? __content_4XX = null;
+ global::Anthropic.BetaErrorResponse? __value_4XX = null;
+ if (ReadResponseAsString)
+ {
+ __content_4XX = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = global::Anthropic.BetaErrorResponse.FromJson(__content_4XX, JsonSerializerContext);
+ }
+ else
+ {
+ var __contentStream_4XX = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = await global::Anthropic.BetaErrorResponse.FromJsonStreamAsync(__contentStream_4XX, JsonSerializerContext).ConfigureAwait(false);
+ }
+
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content_4XX,
+ ResponseObject = __value_4XX,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ if (ReadResponseAsString)
+ {
+ var __content = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+
+ ProcessResponseContent(
+ client: HttpClient,
+ response: __response,
+ content: ref __content);
+ ProcessBetaMessageBatchesResults2ResponseContent(
+ httpClient: HttpClient,
+ httpResponseMessage: __response,
+ content: ref __content);
+
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __content ?? __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ return
+ global::System.Text.Json.JsonSerializer.Deserialize(__content, typeof(byte[]), JsonSerializerContext) as byte[] ??
+ throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" ");
+ }
+ else
+ {
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ using var __content = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+
+ return
+ await global::System.Text.Json.JsonSerializer.DeserializeAsync(__content, typeof(byte[]), JsonSerializerContext).ConfigureAwait(false) as byte[] ??
+ throw new global::System.InvalidOperationException("Response deserialization failed.");
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesRetrieve.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesRetrieve.g.cs
new file mode 100644
index 0000000..03d2de2
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesRetrieve.g.cs
@@ -0,0 +1,224 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ public partial class MessageBatchesClient
+ {
+ partial void PrepareBetaMessageBatchesRetrieveArguments(
+ global::System.Net.Http.HttpClient httpClient,
+ ref string messageBatchId,
+ ref string? anthropicBeta,
+ ref string? anthropicVersion,
+ ref string? xApiKey);
+ partial void PrepareBetaMessageBatchesRetrieveRequest(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpRequestMessage httpRequestMessage,
+ string messageBatchId,
+ string? anthropicBeta,
+ string? anthropicVersion,
+ string? xApiKey);
+ partial void ProcessBetaMessageBatchesRetrieveResponse(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage);
+
+ partial void ProcessBetaMessageBatchesRetrieveResponseContent(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage,
+ ref string content);
+
+ ///
+ /// Retrieve a Message Batch
+ /// This endpoint is idempotent and can be used to poll for Message Batch completion. To access the results of a Message Batch, make a request to the `results_url` field in the response.
+ ///
+ ///
+ /// ID of the Message Batch.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// Your unique API key for authentication.
+ /// This key is required in the header of all API requests, to authenticate your account and access Anthropic's services. Get your API key through the [Console](https://console.anthropic.com/settings/keys). Each key is scoped to a Workspace.
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task BetaMessageBatchesRetrieveAsync(
+ string messageBatchId,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ string? xApiKey = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ PrepareArguments(
+ client: HttpClient);
+ PrepareBetaMessageBatchesRetrieveArguments(
+ httpClient: HttpClient,
+ messageBatchId: ref messageBatchId,
+ anthropicBeta: ref anthropicBeta,
+ anthropicVersion: ref anthropicVersion,
+ xApiKey: ref xApiKey);
+
+ var __pathBuilder = new PathBuilder(
+ path: $"/v1/messages/batches/{messageBatchId}?beta=true",
+ baseUri: HttpClient.BaseAddress);
+ var __path = __pathBuilder.ToString();
+ using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
+ method: global::System.Net.Http.HttpMethod.Get,
+ requestUri: new global::System.Uri(__path, global::System.UriKind.RelativeOrAbsolute));
+#if NET6_0_OR_GREATER
+ __httpRequest.Version = global::System.Net.HttpVersion.Version11;
+ __httpRequest.VersionPolicy = global::System.Net.Http.HttpVersionPolicy.RequestVersionOrHigher;
+#endif
+
+ foreach (var __authorization in Authorizations)
+ {
+ if (__authorization.Type == "Http" ||
+ __authorization.Type == "OAuth2")
+ {
+ __httpRequest.Headers.Authorization = new global::System.Net.Http.Headers.AuthenticationHeaderValue(
+ scheme: __authorization.Name,
+ parameter: __authorization.Value);
+ }
+ else if (__authorization.Type == "ApiKey" &&
+ __authorization.Location == "Header")
+ {
+ __httpRequest.Headers.Add(__authorization.Name, __authorization.Value);
+ }
+ }
+
+ if (anthropicBeta != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-beta", anthropicBeta.ToString());
+ }
+ if (anthropicVersion != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-version", anthropicVersion.ToString());
+ }
+ if (xApiKey != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("x-api-key", xApiKey.ToString());
+ }
+
+
+ PrepareRequest(
+ client: HttpClient,
+ request: __httpRequest);
+ PrepareBetaMessageBatchesRetrieveRequest(
+ httpClient: HttpClient,
+ httpRequestMessage: __httpRequest,
+ messageBatchId: messageBatchId,
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
+ xApiKey: xApiKey);
+
+ using var __response = await HttpClient.SendAsync(
+ request: __httpRequest,
+ completionOption: global::System.Net.Http.HttpCompletionOption.ResponseContentRead,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+
+ ProcessResponse(
+ client: HttpClient,
+ response: __response);
+ ProcessBetaMessageBatchesRetrieveResponse(
+ httpClient: HttpClient,
+ httpResponseMessage: __response);
+ // Error response. See our [errors documentation](https://docs.anthropic.com/en/api/errors) for more details.
+ if ((int)__response.StatusCode >= 400 && (int)__response.StatusCode <= 499)
+ {
+ string? __content_4XX = null;
+ global::Anthropic.BetaErrorResponse? __value_4XX = null;
+ if (ReadResponseAsString)
+ {
+ __content_4XX = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = global::Anthropic.BetaErrorResponse.FromJson(__content_4XX, JsonSerializerContext);
+ }
+ else
+ {
+ var __contentStream_4XX = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = await global::Anthropic.BetaErrorResponse.FromJsonStreamAsync(__contentStream_4XX, JsonSerializerContext).ConfigureAwait(false);
+ }
+
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content_4XX,
+ ResponseObject = __value_4XX,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ if (ReadResponseAsString)
+ {
+ var __content = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+
+ ProcessResponseContent(
+ client: HttpClient,
+ response: __response,
+ content: ref __content);
+ ProcessBetaMessageBatchesRetrieveResponseContent(
+ httpClient: HttpClient,
+ httpResponseMessage: __response,
+ content: ref __content);
+
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __content ?? __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ return
+ global::Anthropic.BetaMessageBatch.FromJson(__content, JsonSerializerContext) ??
+ throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" ");
+ }
+ else
+ {
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ using var __content = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+
+ return
+ await global::Anthropic.BetaMessageBatch.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ??
+ throw new global::System.InvalidOperationException("Response deserialization failed.");
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesRetrieve2.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesRetrieve2.g.cs
new file mode 100644
index 0000000..d1cf21c
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.BetaMessageBatchesRetrieve2.g.cs
@@ -0,0 +1,224 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ public partial class MessageBatchesClient
+ {
+ partial void PrepareBetaMessageBatchesRetrieve2Arguments(
+ global::System.Net.Http.HttpClient httpClient,
+ ref string messageBatchId,
+ ref string? anthropicBeta,
+ ref string? anthropicVersion,
+ ref string? xApiKey);
+ partial void PrepareBetaMessageBatchesRetrieve2Request(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpRequestMessage httpRequestMessage,
+ string messageBatchId,
+ string? anthropicBeta,
+ string? anthropicVersion,
+ string? xApiKey);
+ partial void ProcessBetaMessageBatchesRetrieve2Response(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage);
+
+ partial void ProcessBetaMessageBatchesRetrieve2ResponseContent(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage,
+ ref string content);
+
+ ///
+ /// Retrieve a Message Batch
+ /// This endpoint is idempotent and can be used to poll for Message Batch completion. To access the results of a Message Batch, make a request to the `results_url` field in the response.
+ ///
+ ///
+ /// ID of the Message Batch.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// Your unique API key for authentication.
+ /// This key is required in the header of all API requests, to authenticate your account and access Anthropic's services. Get your API key through the [Console](https://console.anthropic.com/settings/keys). Each key is scoped to a Workspace.
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task BetaMessageBatchesRetrieve2Async(
+ string messageBatchId,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ string? xApiKey = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ PrepareArguments(
+ client: HttpClient);
+ PrepareBetaMessageBatchesRetrieve2Arguments(
+ httpClient: HttpClient,
+ messageBatchId: ref messageBatchId,
+ anthropicBeta: ref anthropicBeta,
+ anthropicVersion: ref anthropicVersion,
+ xApiKey: ref xApiKey);
+
+ var __pathBuilder = new PathBuilder(
+ path: $"/v1/messages/batches/{messageBatchId}",
+ baseUri: HttpClient.BaseAddress);
+ var __path = __pathBuilder.ToString();
+ using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
+ method: global::System.Net.Http.HttpMethod.Get,
+ requestUri: new global::System.Uri(__path, global::System.UriKind.RelativeOrAbsolute));
+#if NET6_0_OR_GREATER
+ __httpRequest.Version = global::System.Net.HttpVersion.Version11;
+ __httpRequest.VersionPolicy = global::System.Net.Http.HttpVersionPolicy.RequestVersionOrHigher;
+#endif
+
+ foreach (var __authorization in Authorizations)
+ {
+ if (__authorization.Type == "Http" ||
+ __authorization.Type == "OAuth2")
+ {
+ __httpRequest.Headers.Authorization = new global::System.Net.Http.Headers.AuthenticationHeaderValue(
+ scheme: __authorization.Name,
+ parameter: __authorization.Value);
+ }
+ else if (__authorization.Type == "ApiKey" &&
+ __authorization.Location == "Header")
+ {
+ __httpRequest.Headers.Add(__authorization.Name, __authorization.Value);
+ }
+ }
+
+ if (anthropicBeta != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-beta", anthropicBeta.ToString());
+ }
+ if (anthropicVersion != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-version", anthropicVersion.ToString());
+ }
+ if (xApiKey != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("x-api-key", xApiKey.ToString());
+ }
+
+
+ PrepareRequest(
+ client: HttpClient,
+ request: __httpRequest);
+ PrepareBetaMessageBatchesRetrieve2Request(
+ httpClient: HttpClient,
+ httpRequestMessage: __httpRequest,
+ messageBatchId: messageBatchId,
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
+ xApiKey: xApiKey);
+
+ using var __response = await HttpClient.SendAsync(
+ request: __httpRequest,
+ completionOption: global::System.Net.Http.HttpCompletionOption.ResponseContentRead,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+
+ ProcessResponse(
+ client: HttpClient,
+ response: __response);
+ ProcessBetaMessageBatchesRetrieve2Response(
+ httpClient: HttpClient,
+ httpResponseMessage: __response);
+ // Error response. See our [errors documentation](https://docs.anthropic.com/en/api/errors) for more details.
+ if ((int)__response.StatusCode >= 400 && (int)__response.StatusCode <= 499)
+ {
+ string? __content_4XX = null;
+ global::Anthropic.BetaErrorResponse? __value_4XX = null;
+ if (ReadResponseAsString)
+ {
+ __content_4XX = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = global::Anthropic.BetaErrorResponse.FromJson(__content_4XX, JsonSerializerContext);
+ }
+ else
+ {
+ var __contentStream_4XX = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = await global::Anthropic.BetaErrorResponse.FromJsonStreamAsync(__contentStream_4XX, JsonSerializerContext).ConfigureAwait(false);
+ }
+
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content_4XX,
+ ResponseObject = __value_4XX,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ if (ReadResponseAsString)
+ {
+ var __content = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+
+ ProcessResponseContent(
+ client: HttpClient,
+ response: __response,
+ content: ref __content);
+ ProcessBetaMessageBatchesRetrieve2ResponseContent(
+ httpClient: HttpClient,
+ httpResponseMessage: __response,
+ content: ref __content);
+
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __content ?? __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ return
+ global::Anthropic.BetaMessageBatch.FromJson(__content, JsonSerializerContext) ??
+ throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" ");
+ }
+ else
+ {
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ using var __content = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+
+ return
+ await global::Anthropic.BetaMessageBatch.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ??
+ throw new global::System.InvalidOperationException("Response deserialization failed.");
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.g.cs
new file mode 100644
index 0000000..470d473
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.MessageBatchesClient.g.cs
@@ -0,0 +1,86 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ ///
+ /// If no httpClient is provided, a new one will be created.
+ /// If no baseUri is provided, the default baseUri from OpenAPI spec will be used.
+ ///
+ public sealed partial class MessageBatchesClient : global::Anthropic.IMessageBatchesClient, global::System.IDisposable
+ {
+ ///
+ ///
+ ///
+ public const string DefaultBaseUrl = "https://api.anthropic.com";
+
+ private bool _disposeHttpClient = true;
+
+ ///
+ public global::System.Net.Http.HttpClient HttpClient { get; }
+
+ ///
+ public System.Uri? BaseUri => HttpClient.BaseAddress;
+
+ ///
+ public global::System.Collections.Generic.List Authorizations { get; }
+
+ ///
+ public bool ReadResponseAsString { get; set; }
+#if DEBUG
+ = true;
+#endif
+ ///
+ ///
+ ///
+ public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::Anthropic.SourceGenerationContext.Default;
+
+
+ ///
+ /// Creates a new instance of the MessageBatchesClient.
+ /// If no httpClient is provided, a new one will be created.
+ /// If no baseUri is provided, the default baseUri from OpenAPI spec will be used.
+ ///
+ /// The HttpClient instance. If not provided, a new one will be created.
+ /// The base URL for the API. If not provided, the default baseUri from OpenAPI spec will be used.
+ /// The authorizations to use for the requests.
+ /// Dispose the HttpClient when the instance is disposed. True by default.
+ public MessageBatchesClient(
+ global::System.Net.Http.HttpClient? httpClient = null,
+ global::System.Uri? baseUri = null,
+ global::System.Collections.Generic.List? authorizations = null,
+ bool disposeHttpClient = true)
+ {
+ HttpClient = httpClient ?? new global::System.Net.Http.HttpClient();
+ HttpClient.BaseAddress ??= baseUri ?? new global::System.Uri(DefaultBaseUrl);
+ Authorizations = authorizations ?? new global::System.Collections.Generic.List();
+ _disposeHttpClient = disposeHttpClient;
+
+ Initialized(HttpClient);
+ }
+
+ ///
+ public void Dispose()
+ {
+ if (_disposeHttpClient)
+ {
+ HttpClient.Dispose();
+ }
+ }
+
+ partial void Initialized(
+ global::System.Net.Http.HttpClient client);
+ partial void PrepareArguments(
+ global::System.Net.Http.HttpClient client);
+ partial void PrepareRequest(
+ global::System.Net.Http.HttpClient client,
+ global::System.Net.Http.HttpRequestMessage request);
+ partial void ProcessResponse(
+ global::System.Net.Http.HttpClient client,
+ global::System.Net.Http.HttpResponseMessage response);
+ partial void ProcessResponseContent(
+ global::System.Net.Http.HttpClient client,
+ global::System.Net.Http.HttpResponseMessage response,
+ ref string content);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.MessagesClient.BetaMessagesCountTokensPost.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessagesClient.BetaMessagesCountTokensPost.g.cs
new file mode 100644
index 0000000..112a39b
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.MessagesClient.BetaMessagesCountTokensPost.g.cs
@@ -0,0 +1,369 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ public partial class MessagesClient
+ {
+ partial void PrepareBetaMessagesCountTokensPostArguments(
+ global::System.Net.Http.HttpClient httpClient,
+ ref string? anthropicBeta,
+ ref string? anthropicVersion,
+ global::Anthropic.BetaCountMessageTokensParams request);
+ partial void PrepareBetaMessagesCountTokensPostRequest(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpRequestMessage httpRequestMessage,
+ string? anthropicBeta,
+ string? anthropicVersion,
+ global::Anthropic.BetaCountMessageTokensParams request);
+ partial void ProcessBetaMessagesCountTokensPostResponse(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage);
+
+ partial void ProcessBetaMessagesCountTokensPostResponseContent(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage,
+ ref string content);
+
+ ///
+ /// Count tokens in a Message
+ /// Count the number of tokens in a Message.
+ /// The Token Count API can be used to count the number of tokens in a Message, including tools, images, and documents, without creating it.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task BetaMessagesCountTokensPostAsync(
+ global::Anthropic.BetaCountMessageTokensParams request,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ request = request ?? throw new global::System.ArgumentNullException(nameof(request));
+
+ PrepareArguments(
+ client: HttpClient);
+ PrepareBetaMessagesCountTokensPostArguments(
+ httpClient: HttpClient,
+ anthropicBeta: ref anthropicBeta,
+ anthropicVersion: ref anthropicVersion,
+ request: request);
+
+ var __pathBuilder = new PathBuilder(
+ path: "/v1/messages/count_tokens?beta=true",
+ baseUri: HttpClient.BaseAddress);
+ var __path = __pathBuilder.ToString();
+ using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
+ method: global::System.Net.Http.HttpMethod.Post,
+ requestUri: new global::System.Uri(__path, global::System.UriKind.RelativeOrAbsolute));
+#if NET6_0_OR_GREATER
+ __httpRequest.Version = global::System.Net.HttpVersion.Version11;
+ __httpRequest.VersionPolicy = global::System.Net.Http.HttpVersionPolicy.RequestVersionOrHigher;
+#endif
+
+ foreach (var __authorization in Authorizations)
+ {
+ if (__authorization.Type == "Http" ||
+ __authorization.Type == "OAuth2")
+ {
+ __httpRequest.Headers.Authorization = new global::System.Net.Http.Headers.AuthenticationHeaderValue(
+ scheme: __authorization.Name,
+ parameter: __authorization.Value);
+ }
+ else if (__authorization.Type == "ApiKey" &&
+ __authorization.Location == "Header")
+ {
+ __httpRequest.Headers.Add(__authorization.Name, __authorization.Value);
+ }
+ }
+
+ if (anthropicBeta != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-beta", anthropicBeta.ToString());
+ }
+ if (anthropicVersion != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-version", anthropicVersion.ToString());
+ }
+
+ var __httpRequestContentBody = request.ToJson(JsonSerializerContext);
+ var __httpRequestContent = new global::System.Net.Http.StringContent(
+ content: __httpRequestContentBody,
+ encoding: global::System.Text.Encoding.UTF8,
+ mediaType: "application/json");
+ __httpRequest.Content = __httpRequestContent;
+
+ PrepareRequest(
+ client: HttpClient,
+ request: __httpRequest);
+ PrepareBetaMessagesCountTokensPostRequest(
+ httpClient: HttpClient,
+ httpRequestMessage: __httpRequest,
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
+ request: request);
+
+ using var __response = await HttpClient.SendAsync(
+ request: __httpRequest,
+ completionOption: global::System.Net.Http.HttpCompletionOption.ResponseContentRead,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+
+ ProcessResponse(
+ client: HttpClient,
+ response: __response);
+ ProcessBetaMessagesCountTokensPostResponse(
+ httpClient: HttpClient,
+ httpResponseMessage: __response);
+ // Error response. See our [errors documentation](https://docs.anthropic.com/en/api/errors) for more details.
+ if ((int)__response.StatusCode >= 400 && (int)__response.StatusCode <= 499)
+ {
+ string? __content_4XX = null;
+ global::Anthropic.BetaErrorResponse? __value_4XX = null;
+ if (ReadResponseAsString)
+ {
+ __content_4XX = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = global::Anthropic.BetaErrorResponse.FromJson(__content_4XX, JsonSerializerContext);
+ }
+ else
+ {
+ var __contentStream_4XX = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = await global::Anthropic.BetaErrorResponse.FromJsonStreamAsync(__contentStream_4XX, JsonSerializerContext).ConfigureAwait(false);
+ }
+
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content_4XX,
+ ResponseObject = __value_4XX,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ if (ReadResponseAsString)
+ {
+ var __content = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+
+ ProcessResponseContent(
+ client: HttpClient,
+ response: __response,
+ content: ref __content);
+ ProcessBetaMessagesCountTokensPostResponseContent(
+ httpClient: HttpClient,
+ httpResponseMessage: __response,
+ content: ref __content);
+
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __content ?? __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ return
+ global::Anthropic.BetaCountMessageTokensResponse.FromJson(__content, JsonSerializerContext) ??
+ throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" ");
+ }
+ else
+ {
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ using var __content = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+
+ return
+ await global::Anthropic.BetaCountMessageTokensResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ??
+ throw new global::System.InvalidOperationException("Response deserialization failed.");
+ }
+ }
+
+ ///
+ /// Count tokens in a Message
+ /// Count the number of tokens in a Message.
+ /// The Token Count API can be used to count the number of tokens in a Message, including tools, images, and documents, without creating it.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// How the model should use the provided tools. The model can use a specific tool, any available tool, or decide by itself.
+ ///
+ ///
+ /// Definitions of tools that the model may use.
+ /// If you include `tools` in your API request, the model may return `tool_use` content blocks that represent the model's use of those tools. You can then run those tools using the tool input generated by the model and then optionally return results back to the model using `tool_result` content blocks.
+ /// Each tool definition includes:
+ /// * `name`: Name of the tool.
+ /// * `description`: Optional, but strongly-recommended description of the tool.
+ /// * `input_schema`: [JSON schema](https://json-schema.org/) for the tool `input` shape that the model will produce in `tool_use` output content blocks.
+ /// For example, if you defined `tools` as:
+ /// ```json
+ /// [
+ /// {
+ /// "name": "get_stock_price",
+ /// "description": "Get the current stock price for a given ticker symbol.",
+ /// "input_schema": {
+ /// "type": "object",
+ /// "properties": {
+ /// "ticker": {
+ /// "type": "string",
+ /// "description": "The stock ticker symbol, e.g. AAPL for Apple Inc."
+ /// }
+ /// },
+ /// "required": ["ticker"]
+ /// }
+ /// }
+ /// ]
+ /// ```
+ /// And then asked the model "What's the S&P 500 at today?", the model might produce `tool_use` content blocks in the response like this:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_use",
+ /// "id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "name": "get_stock_price",
+ /// "input": { "ticker": "^GSPC" }
+ /// }
+ /// ]
+ /// ```
+ /// You might then run your `get_stock_price` tool with `{"ticker": "^GSPC"}` as an input, and return the following back to the model in a subsequent `user` message:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_result",
+ /// "tool_use_id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "content": "259.75 USD"
+ /// }
+ /// ]
+ /// ```
+ /// Tools can be used for workflows that include running client-side tools and functions, or more generally whenever you want the model to produce a particular JSON structure of output.
+ /// See our [guide](https://docs.anthropic.com/en/docs/tool-use) for more details.
+ ///
+ ///
+ /// Input messages.
+ /// Our models are trained to operate on alternating `user` and `assistant` conversational turns. When creating a new `Message`, you specify the prior conversational turns with the `messages` parameter, and the model then generates the next `Message` in the conversation. Consecutive `user` or `assistant` turns in your request will be combined into a single turn.
+ /// Each input message must be an object with a `role` and `content`. You can specify a single `user`-role message, or you can include multiple `user` and `assistant` messages.
+ /// If the final message uses the `assistant` role, the response content will continue immediately from the content in that message. This can be used to constrain part of the model's response.
+ /// Example with a single `user` message:
+ /// ```json
+ /// [{"role": "user", "content": "Hello, Claude"}]
+ /// ```
+ /// Example with multiple conversational turns:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "Hello there."},
+ /// {"role": "assistant", "content": "Hi, I'm Claude. How can I help you?"},
+ /// {"role": "user", "content": "Can you explain LLMs in plain English?"},
+ /// ]
+ /// ```
+ /// Example with a partially-filled response from Claude:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "What's the Greek name for Sun? (A) Sol (B) Helios (C) Sun"},
+ /// {"role": "assistant", "content": "The best answer is ("},
+ /// ]
+ /// ```
+ /// Each input message `content` may be either a single `string` or an array of content blocks, where each block has a specific `type`. Using a `string` for `content` is shorthand for an array of one content block of type `"text"`. The following input messages are equivalent:
+ /// ```json
+ /// {"role": "user", "content": "Hello, Claude"}
+ /// ```
+ /// ```json
+ /// {"role": "user", "content": [{"type": "text", "text": "Hello, Claude"}]}
+ /// ```
+ /// Starting with Claude 3 models, you can also send image content blocks:
+ /// ```json
+ /// {"role": "user", "content": [
+ /// {
+ /// "type": "image",
+ /// "source": {
+ /// "type": "base64",
+ /// "media_type": "image/jpeg",
+ /// "data": "/9j/4AAQSkZJRg...",
+ /// }
+ /// },
+ /// {"type": "text", "text": "What is in this image?"}
+ /// ]}
+ /// ```
+ /// We currently support the `base64` source type for images, and the `image/jpeg`, `image/png`, `image/gif`, and `image/webp` media types.
+ /// See [examples](https://docs.anthropic.com/en/api/messages-examples#vision) for more input examples.
+ /// Note that if you want to include a [system prompt](https://docs.anthropic.com/en/docs/system-prompts), you can use the top-level `system` parameter — there is no `"system"` role for input messages in the Messages API.
+ ///
+ ///
+ /// System prompt.
+ /// A system prompt is a way of providing context and instructions to Claude, such as specifying a particular goal or role. See our [guide to system prompts](https://docs.anthropic.com/en/docs/system-prompts).
+ /// Example: []
+ ///
+ ///
+ /// The model that will complete your prompt.\n\nSee [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options.
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task BetaMessagesCountTokensPostAsync(
+ global::System.Collections.Generic.IList messages,
+ global::Anthropic.Model model,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::Anthropic.BetaToolChoice? toolChoice = default,
+ global::System.Collections.Generic.IList? tools = default,
+ global::Anthropic.AnyOf>? system = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ var __request = new global::Anthropic.BetaCountMessageTokensParams
+ {
+ ToolChoice = toolChoice,
+ Tools = tools,
+ Messages = messages,
+ System = system,
+ Model = model,
+ };
+
+ return await BetaMessagesCountTokensPostAsync(
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
+ request: __request,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.MessagesClient.BetaMessagesCountTokensPost2.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessagesClient.BetaMessagesCountTokensPost2.g.cs
new file mode 100644
index 0000000..3feec18
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.MessagesClient.BetaMessagesCountTokensPost2.g.cs
@@ -0,0 +1,369 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ public partial class MessagesClient
+ {
+ partial void PrepareBetaMessagesCountTokensPost2Arguments(
+ global::System.Net.Http.HttpClient httpClient,
+ ref string? anthropicBeta,
+ ref string? anthropicVersion,
+ global::Anthropic.BetaCountMessageTokensParams request);
+ partial void PrepareBetaMessagesCountTokensPost2Request(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpRequestMessage httpRequestMessage,
+ string? anthropicBeta,
+ string? anthropicVersion,
+ global::Anthropic.BetaCountMessageTokensParams request);
+ partial void ProcessBetaMessagesCountTokensPost2Response(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage);
+
+ partial void ProcessBetaMessagesCountTokensPost2ResponseContent(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage,
+ ref string content);
+
+ ///
+ /// Count tokens in a Message
+ /// Count the number of tokens in a Message.
+ /// The Token Count API can be used to count the number of tokens in a Message, including tools, images, and documents, without creating it.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task BetaMessagesCountTokensPost2Async(
+ global::Anthropic.BetaCountMessageTokensParams request,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ request = request ?? throw new global::System.ArgumentNullException(nameof(request));
+
+ PrepareArguments(
+ client: HttpClient);
+ PrepareBetaMessagesCountTokensPost2Arguments(
+ httpClient: HttpClient,
+ anthropicBeta: ref anthropicBeta,
+ anthropicVersion: ref anthropicVersion,
+ request: request);
+
+ var __pathBuilder = new PathBuilder(
+ path: "/v1/messages/count_tokens",
+ baseUri: HttpClient.BaseAddress);
+ var __path = __pathBuilder.ToString();
+ using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
+ method: global::System.Net.Http.HttpMethod.Post,
+ requestUri: new global::System.Uri(__path, global::System.UriKind.RelativeOrAbsolute));
+#if NET6_0_OR_GREATER
+ __httpRequest.Version = global::System.Net.HttpVersion.Version11;
+ __httpRequest.VersionPolicy = global::System.Net.Http.HttpVersionPolicy.RequestVersionOrHigher;
+#endif
+
+ foreach (var __authorization in Authorizations)
+ {
+ if (__authorization.Type == "Http" ||
+ __authorization.Type == "OAuth2")
+ {
+ __httpRequest.Headers.Authorization = new global::System.Net.Http.Headers.AuthenticationHeaderValue(
+ scheme: __authorization.Name,
+ parameter: __authorization.Value);
+ }
+ else if (__authorization.Type == "ApiKey" &&
+ __authorization.Location == "Header")
+ {
+ __httpRequest.Headers.Add(__authorization.Name, __authorization.Value);
+ }
+ }
+
+ if (anthropicBeta != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-beta", anthropicBeta.ToString());
+ }
+ if (anthropicVersion != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-version", anthropicVersion.ToString());
+ }
+
+ var __httpRequestContentBody = request.ToJson(JsonSerializerContext);
+ var __httpRequestContent = new global::System.Net.Http.StringContent(
+ content: __httpRequestContentBody,
+ encoding: global::System.Text.Encoding.UTF8,
+ mediaType: "application/json");
+ __httpRequest.Content = __httpRequestContent;
+
+ PrepareRequest(
+ client: HttpClient,
+ request: __httpRequest);
+ PrepareBetaMessagesCountTokensPost2Request(
+ httpClient: HttpClient,
+ httpRequestMessage: __httpRequest,
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
+ request: request);
+
+ using var __response = await HttpClient.SendAsync(
+ request: __httpRequest,
+ completionOption: global::System.Net.Http.HttpCompletionOption.ResponseContentRead,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+
+ ProcessResponse(
+ client: HttpClient,
+ response: __response);
+ ProcessBetaMessagesCountTokensPost2Response(
+ httpClient: HttpClient,
+ httpResponseMessage: __response);
+ // Error response. See our [errors documentation](https://docs.anthropic.com/en/api/errors) for more details.
+ if ((int)__response.StatusCode >= 400 && (int)__response.StatusCode <= 499)
+ {
+ string? __content_4XX = null;
+ global::Anthropic.BetaErrorResponse? __value_4XX = null;
+ if (ReadResponseAsString)
+ {
+ __content_4XX = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = global::Anthropic.BetaErrorResponse.FromJson(__content_4XX, JsonSerializerContext);
+ }
+ else
+ {
+ var __contentStream_4XX = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = await global::Anthropic.BetaErrorResponse.FromJsonStreamAsync(__contentStream_4XX, JsonSerializerContext).ConfigureAwait(false);
+ }
+
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content_4XX,
+ ResponseObject = __value_4XX,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ if (ReadResponseAsString)
+ {
+ var __content = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+
+ ProcessResponseContent(
+ client: HttpClient,
+ response: __response,
+ content: ref __content);
+ ProcessBetaMessagesCountTokensPost2ResponseContent(
+ httpClient: HttpClient,
+ httpResponseMessage: __response,
+ content: ref __content);
+
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __content ?? __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ return
+ global::Anthropic.BetaCountMessageTokensResponse.FromJson(__content, JsonSerializerContext) ??
+ throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" ");
+ }
+ else
+ {
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ using var __content = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+
+ return
+ await global::Anthropic.BetaCountMessageTokensResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ??
+ throw new global::System.InvalidOperationException("Response deserialization failed.");
+ }
+ }
+
+ ///
+ /// Count tokens in a Message
+ /// Count the number of tokens in a Message.
+ /// The Token Count API can be used to count the number of tokens in a Message, including tools, images, and documents, without creating it.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// How the model should use the provided tools. The model can use a specific tool, any available tool, or decide by itself.
+ ///
+ ///
+ /// Definitions of tools that the model may use.
+ /// If you include `tools` in your API request, the model may return `tool_use` content blocks that represent the model's use of those tools. You can then run those tools using the tool input generated by the model and then optionally return results back to the model using `tool_result` content blocks.
+ /// Each tool definition includes:
+ /// * `name`: Name of the tool.
+ /// * `description`: Optional, but strongly-recommended description of the tool.
+ /// * `input_schema`: [JSON schema](https://json-schema.org/) for the tool `input` shape that the model will produce in `tool_use` output content blocks.
+ /// For example, if you defined `tools` as:
+ /// ```json
+ /// [
+ /// {
+ /// "name": "get_stock_price",
+ /// "description": "Get the current stock price for a given ticker symbol.",
+ /// "input_schema": {
+ /// "type": "object",
+ /// "properties": {
+ /// "ticker": {
+ /// "type": "string",
+ /// "description": "The stock ticker symbol, e.g. AAPL for Apple Inc."
+ /// }
+ /// },
+ /// "required": ["ticker"]
+ /// }
+ /// }
+ /// ]
+ /// ```
+ /// And then asked the model "What's the S&P 500 at today?", the model might produce `tool_use` content blocks in the response like this:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_use",
+ /// "id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "name": "get_stock_price",
+ /// "input": { "ticker": "^GSPC" }
+ /// }
+ /// ]
+ /// ```
+ /// You might then run your `get_stock_price` tool with `{"ticker": "^GSPC"}` as an input, and return the following back to the model in a subsequent `user` message:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_result",
+ /// "tool_use_id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "content": "259.75 USD"
+ /// }
+ /// ]
+ /// ```
+ /// Tools can be used for workflows that include running client-side tools and functions, or more generally whenever you want the model to produce a particular JSON structure of output.
+ /// See our [guide](https://docs.anthropic.com/en/docs/tool-use) for more details.
+ ///
+ ///
+ /// Input messages.
+ /// Our models are trained to operate on alternating `user` and `assistant` conversational turns. When creating a new `Message`, you specify the prior conversational turns with the `messages` parameter, and the model then generates the next `Message` in the conversation. Consecutive `user` or `assistant` turns in your request will be combined into a single turn.
+ /// Each input message must be an object with a `role` and `content`. You can specify a single `user`-role message, or you can include multiple `user` and `assistant` messages.
+ /// If the final message uses the `assistant` role, the response content will continue immediately from the content in that message. This can be used to constrain part of the model's response.
+ /// Example with a single `user` message:
+ /// ```json
+ /// [{"role": "user", "content": "Hello, Claude"}]
+ /// ```
+ /// Example with multiple conversational turns:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "Hello there."},
+ /// {"role": "assistant", "content": "Hi, I'm Claude. How can I help you?"},
+ /// {"role": "user", "content": "Can you explain LLMs in plain English?"},
+ /// ]
+ /// ```
+ /// Example with a partially-filled response from Claude:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "What's the Greek name for Sun? (A) Sol (B) Helios (C) Sun"},
+ /// {"role": "assistant", "content": "The best answer is ("},
+ /// ]
+ /// ```
+ /// Each input message `content` may be either a single `string` or an array of content blocks, where each block has a specific `type`. Using a `string` for `content` is shorthand for an array of one content block of type `"text"`. The following input messages are equivalent:
+ /// ```json
+ /// {"role": "user", "content": "Hello, Claude"}
+ /// ```
+ /// ```json
+ /// {"role": "user", "content": [{"type": "text", "text": "Hello, Claude"}]}
+ /// ```
+ /// Starting with Claude 3 models, you can also send image content blocks:
+ /// ```json
+ /// {"role": "user", "content": [
+ /// {
+ /// "type": "image",
+ /// "source": {
+ /// "type": "base64",
+ /// "media_type": "image/jpeg",
+ /// "data": "/9j/4AAQSkZJRg...",
+ /// }
+ /// },
+ /// {"type": "text", "text": "What is in this image?"}
+ /// ]}
+ /// ```
+ /// We currently support the `base64` source type for images, and the `image/jpeg`, `image/png`, `image/gif`, and `image/webp` media types.
+ /// See [examples](https://docs.anthropic.com/en/api/messages-examples#vision) for more input examples.
+ /// Note that if you want to include a [system prompt](https://docs.anthropic.com/en/docs/system-prompts), you can use the top-level `system` parameter — there is no `"system"` role for input messages in the Messages API.
+ ///
+ ///
+ /// System prompt.
+ /// A system prompt is a way of providing context and instructions to Claude, such as specifying a particular goal or role. See our [guide to system prompts](https://docs.anthropic.com/en/docs/system-prompts).
+ /// Example: []
+ ///
+ ///
+ /// The model that will complete your prompt.\n\nSee [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options.
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task BetaMessagesCountTokensPost2Async(
+ global::System.Collections.Generic.IList messages,
+ global::Anthropic.Model model,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::Anthropic.BetaToolChoice? toolChoice = default,
+ global::System.Collections.Generic.IList? tools = default,
+ global::Anthropic.AnyOf>? system = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ var __request = new global::Anthropic.BetaCountMessageTokensParams
+ {
+ ToolChoice = toolChoice,
+ Tools = tools,
+ Messages = messages,
+ System = system,
+ Model = model,
+ };
+
+ return await BetaMessagesCountTokensPost2Async(
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
+ request: __request,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.MessagesClient.BetaMessagesPost.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessagesClient.BetaMessagesPost.g.cs
new file mode 100644
index 0000000..f4819e8
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.MessagesClient.BetaMessagesPost.g.cs
@@ -0,0 +1,419 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ public partial class MessagesClient
+ {
+ partial void PrepareBetaMessagesPostArguments(
+ global::System.Net.Http.HttpClient httpClient,
+ ref string? anthropicBeta,
+ ref string? anthropicVersion,
+ global::Anthropic.BetaCreateMessageParams request);
+ partial void PrepareBetaMessagesPostRequest(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpRequestMessage httpRequestMessage,
+ string? anthropicBeta,
+ string? anthropicVersion,
+ global::Anthropic.BetaCreateMessageParams request);
+ partial void ProcessBetaMessagesPostResponse(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage);
+
+ partial void ProcessBetaMessagesPostResponseContent(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage,
+ ref string content);
+
+ ///
+ /// Create a Message
+ /// Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
+ /// The Messages API can be used for either single queries or stateless multi-turn conversations.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task BetaMessagesPostAsync(
+ global::Anthropic.BetaCreateMessageParams request,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ request = request ?? throw new global::System.ArgumentNullException(nameof(request));
+
+ PrepareArguments(
+ client: HttpClient);
+ PrepareBetaMessagesPostArguments(
+ httpClient: HttpClient,
+ anthropicBeta: ref anthropicBeta,
+ anthropicVersion: ref anthropicVersion,
+ request: request);
+
+ var __pathBuilder = new PathBuilder(
+ path: "/v1/messages?beta=true",
+ baseUri: HttpClient.BaseAddress);
+ var __path = __pathBuilder.ToString();
+ using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
+ method: global::System.Net.Http.HttpMethod.Post,
+ requestUri: new global::System.Uri(__path, global::System.UriKind.RelativeOrAbsolute));
+#if NET6_0_OR_GREATER
+ __httpRequest.Version = global::System.Net.HttpVersion.Version11;
+ __httpRequest.VersionPolicy = global::System.Net.Http.HttpVersionPolicy.RequestVersionOrHigher;
+#endif
+
+ foreach (var __authorization in Authorizations)
+ {
+ if (__authorization.Type == "Http" ||
+ __authorization.Type == "OAuth2")
+ {
+ __httpRequest.Headers.Authorization = new global::System.Net.Http.Headers.AuthenticationHeaderValue(
+ scheme: __authorization.Name,
+ parameter: __authorization.Value);
+ }
+ else if (__authorization.Type == "ApiKey" &&
+ __authorization.Location == "Header")
+ {
+ __httpRequest.Headers.Add(__authorization.Name, __authorization.Value);
+ }
+ }
+
+ if (anthropicBeta != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-beta", anthropicBeta.ToString());
+ }
+ if (anthropicVersion != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-version", anthropicVersion.ToString());
+ }
+
+ var __httpRequestContentBody = request.ToJson(JsonSerializerContext);
+ var __httpRequestContent = new global::System.Net.Http.StringContent(
+ content: __httpRequestContentBody,
+ encoding: global::System.Text.Encoding.UTF8,
+ mediaType: "application/json");
+ __httpRequest.Content = __httpRequestContent;
+
+ PrepareRequest(
+ client: HttpClient,
+ request: __httpRequest);
+ PrepareBetaMessagesPostRequest(
+ httpClient: HttpClient,
+ httpRequestMessage: __httpRequest,
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
+ request: request);
+
+ using var __response = await HttpClient.SendAsync(
+ request: __httpRequest,
+ completionOption: global::System.Net.Http.HttpCompletionOption.ResponseContentRead,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+
+ ProcessResponse(
+ client: HttpClient,
+ response: __response);
+ ProcessBetaMessagesPostResponse(
+ httpClient: HttpClient,
+ httpResponseMessage: __response);
+ // Error response. See our [errors documentation](https://docs.anthropic.com/en/api/errors) for more details.
+ if ((int)__response.StatusCode >= 400 && (int)__response.StatusCode <= 499)
+ {
+ string? __content_4XX = null;
+ global::Anthropic.BetaErrorResponse? __value_4XX = null;
+ if (ReadResponseAsString)
+ {
+ __content_4XX = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = global::Anthropic.BetaErrorResponse.FromJson(__content_4XX, JsonSerializerContext);
+ }
+ else
+ {
+ var __contentStream_4XX = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = await global::Anthropic.BetaErrorResponse.FromJsonStreamAsync(__contentStream_4XX, JsonSerializerContext).ConfigureAwait(false);
+ }
+
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content_4XX,
+ ResponseObject = __value_4XX,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ if (ReadResponseAsString)
+ {
+ var __content = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+
+ ProcessResponseContent(
+ client: HttpClient,
+ response: __response,
+ content: ref __content);
+ ProcessBetaMessagesPostResponseContent(
+ httpClient: HttpClient,
+ httpResponseMessage: __response,
+ content: ref __content);
+
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __content ?? __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ return
+ global::Anthropic.BetaMessage.FromJson(__content, JsonSerializerContext) ??
+ throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" ");
+ }
+ else
+ {
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ using var __content = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+
+ return
+ await global::Anthropic.BetaMessage.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ??
+ throw new global::System.InvalidOperationException("Response deserialization failed.");
+ }
+ }
+
+ ///
+ /// Create a Message
+ /// Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
+ /// The Messages API can be used for either single queries or stateless multi-turn conversations.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The model that will complete your prompt.\n\nSee [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options.
+ ///
+ ///
+ /// Input messages.
+ /// Our models are trained to operate on alternating `user` and `assistant` conversational turns. When creating a new `Message`, you specify the prior conversational turns with the `messages` parameter, and the model then generates the next `Message` in the conversation. Consecutive `user` or `assistant` turns in your request will be combined into a single turn.
+ /// Each input message must be an object with a `role` and `content`. You can specify a single `user`-role message, or you can include multiple `user` and `assistant` messages.
+ /// If the final message uses the `assistant` role, the response content will continue immediately from the content in that message. This can be used to constrain part of the model's response.
+ /// Example with a single `user` message:
+ /// ```json
+ /// [{"role": "user", "content": "Hello, Claude"}]
+ /// ```
+ /// Example with multiple conversational turns:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "Hello there."},
+ /// {"role": "assistant", "content": "Hi, I'm Claude. How can I help you?"},
+ /// {"role": "user", "content": "Can you explain LLMs in plain English?"},
+ /// ]
+ /// ```
+ /// Example with a partially-filled response from Claude:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "What's the Greek name for Sun? (A) Sol (B) Helios (C) Sun"},
+ /// {"role": "assistant", "content": "The best answer is ("},
+ /// ]
+ /// ```
+ /// Each input message `content` may be either a single `string` or an array of content blocks, where each block has a specific `type`. Using a `string` for `content` is shorthand for an array of one content block of type `"text"`. The following input messages are equivalent:
+ /// ```json
+ /// {"role": "user", "content": "Hello, Claude"}
+ /// ```
+ /// ```json
+ /// {"role": "user", "content": [{"type": "text", "text": "Hello, Claude"}]}
+ /// ```
+ /// Starting with Claude 3 models, you can also send image content blocks:
+ /// ```json
+ /// {"role": "user", "content": [
+ /// {
+ /// "type": "image",
+ /// "source": {
+ /// "type": "base64",
+ /// "media_type": "image/jpeg",
+ /// "data": "/9j/4AAQSkZJRg...",
+ /// }
+ /// },
+ /// {"type": "text", "text": "What is in this image?"}
+ /// ]}
+ /// ```
+ /// We currently support the `base64` source type for images, and the `image/jpeg`, `image/png`, `image/gif`, and `image/webp` media types.
+ /// See [examples](https://docs.anthropic.com/en/api/messages-examples#vision) for more input examples.
+ /// Note that if you want to include a [system prompt](https://docs.anthropic.com/en/docs/system-prompts), you can use the top-level `system` parameter — there is no `"system"` role for input messages in the Messages API.
+ ///
+ ///
+ /// The maximum number of tokens to generate before stopping.
+ /// Note that our models may stop _before_ reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate.
+ /// Different models have different maximum values for this parameter. See [models](https://docs.anthropic.com/en/docs/models-overview) for details.
+ /// Example: 1024
+ ///
+ ///
+ /// An object describing metadata about the request.
+ ///
+ ///
+ /// Custom text sequences that will cause the model to stop generating.
+ /// Our models will normally stop when they have naturally completed their turn, which will result in a response `stop_reason` of `"end_turn"`.
+ /// If you want the model to stop generating when it encounters custom strings of text, you can use the `stop_sequences` parameter. If the model encounters one of the custom sequences, the response `stop_reason` value will be `"stop_sequence"` and the response `stop_sequence` value will contain the matched stop sequence.
+ ///
+ ///
+ /// Whether to incrementally stream the response using server-sent events.
+ /// See [streaming](https://docs.anthropic.com/en/api/messages-streaming) for details.
+ ///
+ ///
+ /// System prompt.
+ /// A system prompt is a way of providing context and instructions to Claude, such as specifying a particular goal or role. See our [guide to system prompts](https://docs.anthropic.com/en/docs/system-prompts).
+ /// Example: []
+ ///
+ ///
+ /// Amount of randomness injected into the response.
+ /// Defaults to `1.0`. Ranges from `0.0` to `1.0`. Use `temperature` closer to `0.0` for analytical / multiple choice, and closer to `1.0` for creative and generative tasks.
+ /// Note that even with `temperature` of `0.0`, the results will not be fully deterministic.
+ /// Example: 1
+ ///
+ ///
+ /// How the model should use the provided tools. The model can use a specific tool, any available tool, or decide by itself.
+ ///
+ ///
+ /// Definitions of tools that the model may use.
+ /// If you include `tools` in your API request, the model may return `tool_use` content blocks that represent the model's use of those tools. You can then run those tools using the tool input generated by the model and then optionally return results back to the model using `tool_result` content blocks.
+ /// Each tool definition includes:
+ /// * `name`: Name of the tool.
+ /// * `description`: Optional, but strongly-recommended description of the tool.
+ /// * `input_schema`: [JSON schema](https://json-schema.org/) for the tool `input` shape that the model will produce in `tool_use` output content blocks.
+ /// For example, if you defined `tools` as:
+ /// ```json
+ /// [
+ /// {
+ /// "name": "get_stock_price",
+ /// "description": "Get the current stock price for a given ticker symbol.",
+ /// "input_schema": {
+ /// "type": "object",
+ /// "properties": {
+ /// "ticker": {
+ /// "type": "string",
+ /// "description": "The stock ticker symbol, e.g. AAPL for Apple Inc."
+ /// }
+ /// },
+ /// "required": ["ticker"]
+ /// }
+ /// }
+ /// ]
+ /// ```
+ /// And then asked the model "What's the S&P 500 at today?", the model might produce `tool_use` content blocks in the response like this:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_use",
+ /// "id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "name": "get_stock_price",
+ /// "input": { "ticker": "^GSPC" }
+ /// }
+ /// ]
+ /// ```
+ /// You might then run your `get_stock_price` tool with `{"ticker": "^GSPC"}` as an input, and return the following back to the model in a subsequent `user` message:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_result",
+ /// "tool_use_id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "content": "259.75 USD"
+ /// }
+ /// ]
+ /// ```
+ /// Tools can be used for workflows that include running client-side tools and functions, or more generally whenever you want the model to produce a particular JSON structure of output.
+ /// See our [guide](https://docs.anthropic.com/en/docs/tool-use) for more details.
+ ///
+ ///
+ /// Only sample from the top K options for each subsequent token.
+ /// Used to remove "long tail" low probability responses. [Learn more technical details here](https://towardsdatascience.com/how-to-sample-from-language-models-682bceb97277).
+ /// Recommended for advanced use cases only. You usually only need to use `temperature`.
+ /// Example: 5
+ ///
+ ///
+ /// Use nucleus sampling.
+ /// In nucleus sampling, we compute the cumulative distribution over all the options for each subsequent token in decreasing probability order and cut it off once it reaches a particular probability specified by `top_p`. You should either alter `temperature` or `top_p`, but not both.
+ /// Recommended for advanced use cases only. You usually only need to use `temperature`.
+ /// Example: 0.7
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task BetaMessagesPostAsync(
+ global::Anthropic.Model model,
+ global::System.Collections.Generic.IList messages,
+ int maxTokens,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::Anthropic.BetaMetadata? metadata = default,
+ global::System.Collections.Generic.IList? stopSequences = default,
+ bool? stream = default,
+ global::Anthropic.AnyOf>? system = default,
+ double? temperature = default,
+ global::Anthropic.BetaToolChoice? toolChoice = default,
+ global::System.Collections.Generic.IList? tools = default,
+ int? topK = default,
+ double? topP = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ var __request = new global::Anthropic.BetaCreateMessageParams
+ {
+ Model = model,
+ Messages = messages,
+ MaxTokens = maxTokens,
+ Metadata = metadata,
+ StopSequences = stopSequences,
+ Stream = stream,
+ System = system,
+ Temperature = temperature,
+ ToolChoice = toolChoice,
+ Tools = tools,
+ TopK = topK,
+ TopP = topP,
+ };
+
+ return await BetaMessagesPostAsync(
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
+ request: __request,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.AnthropicClient.CreateMessage.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessagesClient.MessagesPost.g.cs
similarity index 60%
rename from src/libs/Anthropic/Generated/Anthropic.AnthropicClient.CreateMessage.g.cs
rename to src/libs/Anthropic/Generated/Anthropic.MessagesClient.MessagesPost.g.cs
index 0f95fc9..ac6fe15 100644
--- a/src/libs/Anthropic/Generated/Anthropic.AnthropicClient.CreateMessage.g.cs
+++ b/src/libs/Anthropic/Generated/Anthropic.MessagesClient.MessagesPost.g.cs
@@ -3,48 +3,54 @@
namespace Anthropic
{
- public partial class AnthropicClient
+ public partial class MessagesClient
{
- partial void PrepareCreateMessageArguments(
+ partial void PrepareMessagesPostArguments(
global::System.Net.Http.HttpClient httpClient,
- global::Anthropic.CreateMessageRequest request);
- partial void PrepareCreateMessageRequest(
+ ref string? anthropicVersion,
+ global::Anthropic.CreateMessageParams request);
+ partial void PrepareMessagesPostRequest(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpRequestMessage httpRequestMessage,
- global::Anthropic.CreateMessageRequest request);
- partial void ProcessCreateMessageResponse(
+ string? anthropicVersion,
+ global::Anthropic.CreateMessageParams request);
+ partial void ProcessMessagesPostResponse(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpResponseMessage httpResponseMessage);
- partial void ProcessCreateMessageResponseContent(
+ partial void ProcessMessagesPostResponseContent(
global::System.Net.Http.HttpClient httpClient,
global::System.Net.Http.HttpResponseMessage httpResponseMessage,
ref string content);
///
/// Create a Message
- /// Send a structured list of input messages with text and/or image content, and the
- /// model will generate the next message in the conversation.
- /// The Messages API can be used for either single queries or stateless multi-turn
- /// conversations.
+ /// Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
+ /// The Messages API can be used for either single queries or stateless multi-turn conversations.
///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
///
/// The token to cancel the operation with
///
- public async global::System.Threading.Tasks.Task CreateMessageAsync(
- global::Anthropic.CreateMessageRequest request,
+ public async global::System.Threading.Tasks.Task MessagesPostAsync(
+ global::Anthropic.CreateMessageParams request,
+ string? anthropicVersion = default,
global::System.Threading.CancellationToken cancellationToken = default)
{
request = request ?? throw new global::System.ArgumentNullException(nameof(request));
PrepareArguments(
client: HttpClient);
- PrepareCreateMessageArguments(
+ PrepareMessagesPostArguments(
httpClient: HttpClient,
+ anthropicVersion: ref anthropicVersion,
request: request);
var __pathBuilder = new PathBuilder(
- path: "/messages",
+ path: "/v1/messages",
baseUri: HttpClient.BaseAddress);
var __path = __pathBuilder.ToString();
using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
@@ -70,6 +76,12 @@ partial void ProcessCreateMessageResponseContent(
__httpRequest.Headers.Add(__authorization.Name, __authorization.Value);
}
}
+
+ if (anthropicVersion != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-version", anthropicVersion.ToString());
+ }
+
var __httpRequestContentBody = request.ToJson(JsonSerializerContext);
var __httpRequestContent = new global::System.Net.Http.StringContent(
content: __httpRequestContentBody,
@@ -80,9 +92,10 @@ partial void ProcessCreateMessageResponseContent(
PrepareRequest(
client: HttpClient,
request: __httpRequest);
- PrepareCreateMessageRequest(
+ PrepareMessagesPostRequest(
httpClient: HttpClient,
httpRequestMessage: __httpRequest,
+ anthropicVersion: anthropicVersion,
request: request);
using var __response = await HttpClient.SendAsync(
@@ -93,9 +106,37 @@ partial void ProcessCreateMessageResponseContent(
ProcessResponse(
client: HttpClient,
response: __response);
- ProcessCreateMessageResponse(
+ ProcessMessagesPostResponse(
httpClient: HttpClient,
httpResponseMessage: __response);
+ // Error response. See our [errors documentation](https://docs.anthropic.com/en/api/errors) for more details.
+ if ((int)__response.StatusCode >= 400 && (int)__response.StatusCode <= 499)
+ {
+ string? __content_4XX = null;
+ global::Anthropic.ErrorResponse? __value_4XX = null;
+ if (ReadResponseAsString)
+ {
+ __content_4XX = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = global::Anthropic.ErrorResponse.FromJson(__content_4XX, JsonSerializerContext);
+ }
+ else
+ {
+ var __contentStream_4XX = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = await global::Anthropic.ErrorResponse.FromJsonStreamAsync(__contentStream_4XX, JsonSerializerContext).ConfigureAwait(false);
+ }
+
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content_4XX,
+ ResponseObject = __value_4XX,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
if (ReadResponseAsString)
{
@@ -105,7 +146,7 @@ partial void ProcessCreateMessageResponseContent(
client: HttpClient,
response: __response,
content: ref __content);
- ProcessCreateMessageResponseContent(
+ ProcessMessagesPostResponseContent(
httpClient: HttpClient,
httpResponseMessage: __response,
content: ref __content);
@@ -163,140 +204,104 @@ partial void ProcessCreateMessageResponseContent(
///
/// Create a Message
- /// Send a structured list of input messages with text and/or image content, and the
- /// model will generate the next message in the conversation.
- /// The Messages API can be used for either single queries or stateless multi-turn
- /// conversations.
+ /// Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
+ /// The Messages API can be used for either single queries or stateless multi-turn conversations.
///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
///
- /// The model that will complete your prompt.
- /// See [models](https://docs.anthropic.com/en/docs/models-overview) for additional
- /// details and options.
- /// Example: claude-3-5-sonnet-20241022
+ /// The model that will complete your prompt.\n\nSee [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options.
///
///
/// Input messages.
- /// Our models are trained to operate on alternating `user` and `assistant`
- /// conversational turns. When creating a new `Message`, you specify the prior
- /// conversational turns with the `messages` parameter, and the model then generates
- /// the next `Message` in the conversation.
- /// Each input message must be an object with a `role` and `content`. You can
- /// specify a single `user`-role message, or you can include multiple `user` and
- /// `assistant` messages. The first message must always use the `user` role.
- /// If the final message uses the `assistant` role, the response content will
- /// continue immediately from the content in that message. This can be used to
- /// constrain part of the model's response.
- /// See [message content](https://docs.anthropic.com/en/api/messages-content) for
- /// details on how to construct valid message objects.
+ /// Our models are trained to operate on alternating `user` and `assistant` conversational turns. When creating a new `Message`, you specify the prior conversational turns with the `messages` parameter, and the model then generates the next `Message` in the conversation. Consecutive `user` or `assistant` turns in your request will be combined into a single turn.
+ /// Each input message must be an object with a `role` and `content`. You can specify a single `user`-role message, or you can include multiple `user` and `assistant` messages.
+ /// If the final message uses the `assistant` role, the response content will continue immediately from the content in that message. This can be used to constrain part of the model's response.
/// Example with a single `user` message:
/// ```json
- /// [{ "role": "user", "content": "Hello, Claude" }]
+ /// [{"role": "user", "content": "Hello, Claude"}]
/// ```
/// Example with multiple conversational turns:
/// ```json
/// [
- /// { "role": "user", "content": "Hello there." },
- /// { "role": "assistant", "content": "Hi, I'm Claude. How can I help you?" },
- /// { "role": "user", "content": "Can you explain LLMs in plain English?" }
+ /// {"role": "user", "content": "Hello there."},
+ /// {"role": "assistant", "content": "Hi, I'm Claude. How can I help you?"},
+ /// {"role": "user", "content": "Can you explain LLMs in plain English?"},
/// ]
/// ```
/// Example with a partially-filled response from Claude:
/// ```json
/// [
- /// {
- /// "role": "user",
- /// "content": "What's the Greek name for Sun? (A) Sol (B) Helios (C) Sun"
- /// },
- /// { "role": "assistant", "content": "The best answer is (" }
+ /// {"role": "user", "content": "What's the Greek name for Sun? (A) Sol (B) Helios (C) Sun"},
+ /// {"role": "assistant", "content": "The best answer is ("},
/// ]
/// ```
- /// Each input message `content` may be either a single `string` or an array of
- /// content blocks, where each block has a specific `type`. Using a `string` for
- /// `content` is shorthand for an array of one content block of type `"text"`. The
- /// following input messages are equivalent:
+ /// Each input message `content` may be either a single `string` or an array of content blocks, where each block has a specific `type`. Using a `string` for `content` is shorthand for an array of one content block of type `"text"`. The following input messages are equivalent:
/// ```json
- /// { "role": "user", "content": "Hello, Claude" }
+ /// {"role": "user", "content": "Hello, Claude"}
/// ```
/// ```json
- /// { "role": "user", "content": [{ "type": "text", "text": "Hello, Claude" }] }
+ /// {"role": "user", "content": [{"type": "text", "text": "Hello, Claude"}]}
/// ```
/// Starting with Claude 3 models, you can also send image content blocks:
/// ```json
- /// {
- /// "role": "user",
- /// "content": [
- /// {
- /// "type": "image",
- /// "source": {
- /// "type": "base64",
- /// "media_type": "image/jpeg",
- /// "data": "/9j/4AAQSkZJRg..."
- /// }
- /// },
- /// { "type": "text", "text": "What is in this image?" }
- /// ]
- /// }
+ /// {"role": "user", "content": [
+ /// {
+ /// "type": "image",
+ /// "source": {
+ /// "type": "base64",
+ /// "media_type": "image/jpeg",
+ /// "data": "/9j/4AAQSkZJRg...",
+ /// }
+ /// },
+ /// {"type": "text", "text": "What is in this image?"}
+ /// ]}
/// ```
- /// We currently support the `base64` source type for images, and the `image/jpeg`,
- /// `image/png`, `image/gif`, and `image/webp` media types.
- /// See [examples](https://docs.anthropic.com/en/api/messages-examples) for more
- /// input examples.
- /// Note that if you want to include a
- /// [system prompt](https://docs.anthropic.com/en/docs/system-prompts), you can use
- /// the top-level `system` parameter — there is no `"system"` role for input
- /// messages in the Messages API.
+ /// We currently support the `base64` source type for images, and the `image/jpeg`, `image/png`, `image/gif`, and `image/webp` media types.
+ /// See [examples](https://docs.anthropic.com/en/api/messages-examples#vision) for more input examples.
+ /// Note that if you want to include a [system prompt](https://docs.anthropic.com/en/docs/system-prompts), you can use the top-level `system` parameter — there is no `"system"` role for input messages in the Messages API.
///
///
/// The maximum number of tokens to generate before stopping.
- /// Note that our models may stop _before_ reaching this maximum. This parameter
- /// only specifies the absolute maximum number of tokens to generate.
- /// Different models have different maximum values for this parameter. See
- /// [models](https://docs.anthropic.com/en/docs/models-overview) for details.
+ /// Note that our models may stop _before_ reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate.
+ /// Different models have different maximum values for this parameter. See [models](https://docs.anthropic.com/en/docs/models-overview) for details.
+ /// Example: 1024
///
///
/// An object describing metadata about the request.
///
///
/// Custom text sequences that will cause the model to stop generating.
- /// Our models will normally stop when they have naturally completed their turn,
- /// which will result in a response `stop_reason` of `"end_turn"`.
- /// If you want the model to stop generating when it encounters custom strings of
- /// text, you can use the `stop_sequences` parameter. If the model encounters one of
- /// the custom sequences, the response `stop_reason` value will be `"stop_sequence"`
- /// and the response `stop_sequence` value will contain the matched stop sequence.
+ /// Our models will normally stop when they have naturally completed their turn, which will result in a response `stop_reason` of `"end_turn"`.
+ /// If you want the model to stop generating when it encounters custom strings of text, you can use the `stop_sequences` parameter. If the model encounters one of the custom sequences, the response `stop_reason` value will be `"stop_sequence"` and the response `stop_sequence` value will contain the matched stop sequence.
+ ///
+ ///
+ /// Whether to incrementally stream the response using server-sent events.
+ /// See [streaming](https://docs.anthropic.com/en/api/messages-streaming) for details.
///
///
/// System prompt.
- /// A system prompt is a way of providing context and instructions to Claude, such
- /// as specifying a particular goal or role. See our
- /// [guide to system prompts](https://docs.anthropic.com/en/docs/system-prompts).
+ /// A system prompt is a way of providing context and instructions to Claude, such as specifying a particular goal or role. See our [guide to system prompts](https://docs.anthropic.com/en/docs/system-prompts).
+ /// Example: []
///
///
/// Amount of randomness injected into the response.
- /// Defaults to `1.0`. Ranges from `0.0` to `1.0`. Use `temperature` closer to `0.0`
- /// for analytical / multiple choice, and closer to `1.0` for creative and
- /// generative tasks.
- /// Note that even with `temperature` of `0.0`, the results will not be fully
- /// deterministic.
+ /// Defaults to `1.0`. Ranges from `0.0` to `1.0`. Use `temperature` closer to `0.0` for analytical / multiple choice, and closer to `1.0` for creative and generative tasks.
+ /// Note that even with `temperature` of `0.0`, the results will not be fully deterministic.
+ /// Example: 1
///
///
- /// How the model should use the provided tools. The model can use a specific tool,
- /// any available tool, or decide by itself.
- /// - `auto`: allows Claude to decide whether to call any provided tools or not. This is the default value.
- /// - `any`: tells Claude that it must use one of the provided tools, but doesn’t force a particular tool.
- /// - `tool`: allows us to force Claude to always use a particular tool specified in the `name` field.
+ /// How the model should use the provided tools. The model can use a specific tool, any available tool, or decide by itself.
///
///
/// Definitions of tools that the model may use.
- /// If you include `tools` in your API request, the model may return `tool_use`
- /// content blocks that represent the model's use of those tools. You can then run
- /// those tools using the tool input generated by the model and then optionally
- /// return results back to the model using `tool_result` content blocks.
+ /// If you include `tools` in your API request, the model may return `tool_use` content blocks that represent the model's use of those tools. You can then run those tools using the tool input generated by the model and then optionally return results back to the model using `tool_result` content blocks.
/// Each tool definition includes:
- /// - `name`: Name of the tool.
- /// - `description`: Optional, but strongly-recommended description of the tool.
- /// - `input_schema`: [JSON schema](https://json-schema.org/) for the tool `input`
- /// shape that the model will produce in `tool_use` output content blocks.
+ /// * `name`: Name of the tool.
+ /// * `description`: Optional, but strongly-recommended description of the tool.
+ /// * `input_schema`: [JSON schema](https://json-schema.org/) for the tool `input` shape that the model will produce in `tool_use` output content blocks.
/// For example, if you defined `tools` as:
/// ```json
/// [
@@ -316,8 +321,7 @@ partial void ProcessCreateMessageResponseContent(
/// }
/// ]
/// ```
- /// And then asked the model "What's the S&P 500 at today?", the model might produce
- /// `tool_use` content blocks in the response like this:
+ /// And then asked the model "What's the S&P 500 at today?", the model might produce `tool_use` content blocks in the response like this:
/// ```json
/// [
/// {
@@ -328,9 +332,7 @@ partial void ProcessCreateMessageResponseContent(
/// }
/// ]
/// ```
- /// You might then run your `get_stock_price` tool with `{"ticker": "^GSPC"}` as an
- /// input, and return the following back to the model in a subsequent `user`
- /// message:
+ /// You might then run your `get_stock_price` tool with `{"ticker": "^GSPC"}` as an input, and return the following back to the model in a subsequent `user` message:
/// ```json
/// [
/// {
@@ -340,67 +342,57 @@ partial void ProcessCreateMessageResponseContent(
/// }
/// ]
/// ```
- /// Tools can be used for workflows that include running client-side tools and
- /// functions, or more generally whenever you want the model to produce a particular
- /// JSON structure of output.
+ /// Tools can be used for workflows that include running client-side tools and functions, or more generally whenever you want the model to produce a particular JSON structure of output.
/// See our [guide](https://docs.anthropic.com/en/docs/tool-use) for more details.
///
///
/// Only sample from the top K options for each subsequent token.
- /// Used to remove "long tail" low probability responses.
- /// [Learn more technical details here](https://towardsdatascience.com/how-to-sample-from-language-models-682bceb97277).
- /// Recommended for advanced use cases only. You usually only need to use
- /// `temperature`.
+ /// Used to remove "long tail" low probability responses. [Learn more technical details here](https://towardsdatascience.com/how-to-sample-from-language-models-682bceb97277).
+ /// Recommended for advanced use cases only. You usually only need to use `temperature`.
+ /// Example: 5
///
///
/// Use nucleus sampling.
- /// In nucleus sampling, we compute the cumulative distribution over all the options
- /// for each subsequent token in decreasing probability order and cut it off once it
- /// reaches a particular probability specified by `top_p`. You should either alter
- /// `temperature` or `top_p`, but not both.
- /// Recommended for advanced use cases only. You usually only need to use
- /// `temperature`.
- ///
- ///
- /// Whether to incrementally stream the response using server-sent events.
- /// See [streaming](https://docs.anthropic.com/en/api/messages-streaming) for
- /// details.
- /// Default Value: false
+ /// In nucleus sampling, we compute the cumulative distribution over all the options for each subsequent token in decreasing probability order and cut it off once it reaches a particular probability specified by `top_p`. You should either alter `temperature` or `top_p`, but not both.
+ /// Recommended for advanced use cases only. You usually only need to use `temperature`.
+ /// Example: 0.7
///
/// The token to cancel the operation with
///
- public async global::System.Threading.Tasks.Task CreateMessageAsync(
- global::Anthropic.AnyOf model,
- global::System.Collections.Generic.IList messages,
+ public async global::System.Threading.Tasks.Task MessagesPostAsync(
+ global::Anthropic.Model model,
+ global::System.Collections.Generic.IList messages,
int maxTokens,
- global::Anthropic.CreateMessageRequestMetadata? metadata = default,
+ string? anthropicVersion = default,
+ global::Anthropic.Metadata? metadata = default,
global::System.Collections.Generic.IList? stopSequences = default,
- global::Anthropic.OneOf>? system = default,
+ bool? stream = default,
+ global::Anthropic.AnyOf>? system = default,
double? temperature = default,
global::Anthropic.ToolChoice? toolChoice = default,
global::System.Collections.Generic.IList? tools = default,
int? topK = default,
double? topP = default,
- bool? stream = default,
global::System.Threading.CancellationToken cancellationToken = default)
{
- var __request = new global::Anthropic.CreateMessageRequest
+ var __request = new global::Anthropic.CreateMessageParams
{
Model = model,
Messages = messages,
MaxTokens = maxTokens,
Metadata = metadata,
StopSequences = stopSequences,
+ Stream = stream,
System = system,
Temperature = temperature,
ToolChoice = toolChoice,
Tools = tools,
TopK = topK,
TopP = topP,
- Stream = stream,
};
- return await CreateMessageAsync(
+ return await MessagesPostAsync(
+ anthropicVersion: anthropicVersion,
request: __request,
cancellationToken: cancellationToken).ConfigureAwait(false);
}
diff --git a/src/libs/Anthropic/Generated/Anthropic.MessagesClient.PromptCachingBetaMessagesPost.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessagesClient.PromptCachingBetaMessagesPost.g.cs
new file mode 100644
index 0000000..958698e
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.MessagesClient.PromptCachingBetaMessagesPost.g.cs
@@ -0,0 +1,419 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ public partial class MessagesClient
+ {
+ partial void PreparePromptCachingBetaMessagesPostArguments(
+ global::System.Net.Http.HttpClient httpClient,
+ ref string? anthropicBeta,
+ ref string? anthropicVersion,
+ global::Anthropic.PromptCachingBetaCreateMessageParams request);
+ partial void PreparePromptCachingBetaMessagesPostRequest(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpRequestMessage httpRequestMessage,
+ string? anthropicBeta,
+ string? anthropicVersion,
+ global::Anthropic.PromptCachingBetaCreateMessageParams request);
+ partial void ProcessPromptCachingBetaMessagesPostResponse(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage);
+
+ partial void ProcessPromptCachingBetaMessagesPostResponseContent(
+ global::System.Net.Http.HttpClient httpClient,
+ global::System.Net.Http.HttpResponseMessage httpResponseMessage,
+ ref string content);
+
+ ///
+ /// Create a Message
+ /// Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
+ /// The Messages API can be used for either single queries or stateless multi-turn conversations.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task PromptCachingBetaMessagesPostAsync(
+ global::Anthropic.PromptCachingBetaCreateMessageParams request,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ request = request ?? throw new global::System.ArgumentNullException(nameof(request));
+
+ PrepareArguments(
+ client: HttpClient);
+ PreparePromptCachingBetaMessagesPostArguments(
+ httpClient: HttpClient,
+ anthropicBeta: ref anthropicBeta,
+ anthropicVersion: ref anthropicVersion,
+ request: request);
+
+ var __pathBuilder = new PathBuilder(
+ path: "/v1/messages?beta=prompt_caching",
+ baseUri: HttpClient.BaseAddress);
+ var __path = __pathBuilder.ToString();
+ using var __httpRequest = new global::System.Net.Http.HttpRequestMessage(
+ method: global::System.Net.Http.HttpMethod.Post,
+ requestUri: new global::System.Uri(__path, global::System.UriKind.RelativeOrAbsolute));
+#if NET6_0_OR_GREATER
+ __httpRequest.Version = global::System.Net.HttpVersion.Version11;
+ __httpRequest.VersionPolicy = global::System.Net.Http.HttpVersionPolicy.RequestVersionOrHigher;
+#endif
+
+ foreach (var __authorization in Authorizations)
+ {
+ if (__authorization.Type == "Http" ||
+ __authorization.Type == "OAuth2")
+ {
+ __httpRequest.Headers.Authorization = new global::System.Net.Http.Headers.AuthenticationHeaderValue(
+ scheme: __authorization.Name,
+ parameter: __authorization.Value);
+ }
+ else if (__authorization.Type == "ApiKey" &&
+ __authorization.Location == "Header")
+ {
+ __httpRequest.Headers.Add(__authorization.Name, __authorization.Value);
+ }
+ }
+
+ if (anthropicBeta != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-beta", anthropicBeta.ToString());
+ }
+ if (anthropicVersion != default)
+ {
+ __httpRequest.Headers.TryAddWithoutValidation("anthropic-version", anthropicVersion.ToString());
+ }
+
+ var __httpRequestContentBody = request.ToJson(JsonSerializerContext);
+ var __httpRequestContent = new global::System.Net.Http.StringContent(
+ content: __httpRequestContentBody,
+ encoding: global::System.Text.Encoding.UTF8,
+ mediaType: "application/json");
+ __httpRequest.Content = __httpRequestContent;
+
+ PrepareRequest(
+ client: HttpClient,
+ request: __httpRequest);
+ PreparePromptCachingBetaMessagesPostRequest(
+ httpClient: HttpClient,
+ httpRequestMessage: __httpRequest,
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
+ request: request);
+
+ using var __response = await HttpClient.SendAsync(
+ request: __httpRequest,
+ completionOption: global::System.Net.Http.HttpCompletionOption.ResponseContentRead,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+
+ ProcessResponse(
+ client: HttpClient,
+ response: __response);
+ ProcessPromptCachingBetaMessagesPostResponse(
+ httpClient: HttpClient,
+ httpResponseMessage: __response);
+ // Error response. See our [errors documentation](https://docs.anthropic.com/en/api/errors) for more details.
+ if ((int)__response.StatusCode >= 400 && (int)__response.StatusCode <= 499)
+ {
+ string? __content_4XX = null;
+ global::Anthropic.ErrorResponse? __value_4XX = null;
+ if (ReadResponseAsString)
+ {
+ __content_4XX = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = global::Anthropic.ErrorResponse.FromJson(__content_4XX, JsonSerializerContext);
+ }
+ else
+ {
+ var __contentStream_4XX = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ __value_4XX = await global::Anthropic.ErrorResponse.FromJsonStreamAsync(__contentStream_4XX, JsonSerializerContext).ConfigureAwait(false);
+ }
+
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content_4XX,
+ ResponseObject = __value_4XX,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ if (ReadResponseAsString)
+ {
+ var __content = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+
+ ProcessResponseContent(
+ client: HttpClient,
+ response: __response,
+ content: ref __content);
+ ProcessPromptCachingBetaMessagesPostResponseContent(
+ httpClient: HttpClient,
+ httpResponseMessage: __response,
+ content: ref __content);
+
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __content ?? __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseBody = __content,
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ return
+ global::Anthropic.PromptCachingBetaMessage.FromJson(__content, JsonSerializerContext) ??
+ throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" ");
+ }
+ else
+ {
+ try
+ {
+ __response.EnsureSuccessStatusCode();
+ }
+ catch (global::System.Net.Http.HttpRequestException __ex)
+ {
+ throw new global::Anthropic.ApiException(
+ message: __response.ReasonPhrase ?? string.Empty,
+ innerException: __ex,
+ statusCode: __response.StatusCode)
+ {
+ ResponseHeaders = global::System.Linq.Enumerable.ToDictionary(
+ __response.Headers,
+ h => h.Key,
+ h => h.Value),
+ };
+ }
+
+ using var __content = await __response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+
+ return
+ await global::Anthropic.PromptCachingBetaMessage.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ??
+ throw new global::System.InvalidOperationException("Response deserialization failed.");
+ }
+ }
+
+ ///
+ /// Create a Message
+ /// Send a structured list of input messages with text and/or image content, and the model will generate the next message in the conversation.
+ /// The Messages API can be used for either single queries or stateless multi-turn conversations.
+ ///
+ ///
+ /// Optional header to specify the beta version(s) you want to use.
+ /// To use multiple betas, use a comma separated list like `beta1,beta2` or specify the header multiple times for each beta.
+ ///
+ ///
+ /// The version of the Anthropic API you want to use.
+ /// Read more about versioning and our version history [here](https://docs.anthropic.com/en/api/versioning).
+ ///
+ ///
+ /// The model that will complete your prompt.\n\nSee [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options.
+ ///
+ ///
+ /// Input messages.
+ /// Our models are trained to operate on alternating `user` and `assistant` conversational turns. When creating a new `Message`, you specify the prior conversational turns with the `messages` parameter, and the model then generates the next `Message` in the conversation. Consecutive `user` or `assistant` turns in your request will be combined into a single turn.
+ /// Each input message must be an object with a `role` and `content`. You can specify a single `user`-role message, or you can include multiple `user` and `assistant` messages.
+ /// If the final message uses the `assistant` role, the response content will continue immediately from the content in that message. This can be used to constrain part of the model's response.
+ /// Example with a single `user` message:
+ /// ```json
+ /// [{"role": "user", "content": "Hello, Claude"}]
+ /// ```
+ /// Example with multiple conversational turns:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "Hello there."},
+ /// {"role": "assistant", "content": "Hi, I'm Claude. How can I help you?"},
+ /// {"role": "user", "content": "Can you explain LLMs in plain English?"},
+ /// ]
+ /// ```
+ /// Example with a partially-filled response from Claude:
+ /// ```json
+ /// [
+ /// {"role": "user", "content": "What's the Greek name for Sun? (A) Sol (B) Helios (C) Sun"},
+ /// {"role": "assistant", "content": "The best answer is ("},
+ /// ]
+ /// ```
+ /// Each input message `content` may be either a single `string` or an array of content blocks, where each block has a specific `type`. Using a `string` for `content` is shorthand for an array of one content block of type `"text"`. The following input messages are equivalent:
+ /// ```json
+ /// {"role": "user", "content": "Hello, Claude"}
+ /// ```
+ /// ```json
+ /// {"role": "user", "content": [{"type": "text", "text": "Hello, Claude"}]}
+ /// ```
+ /// Starting with Claude 3 models, you can also send image content blocks:
+ /// ```json
+ /// {"role": "user", "content": [
+ /// {
+ /// "type": "image",
+ /// "source": {
+ /// "type": "base64",
+ /// "media_type": "image/jpeg",
+ /// "data": "/9j/4AAQSkZJRg...",
+ /// }
+ /// },
+ /// {"type": "text", "text": "What is in this image?"}
+ /// ]}
+ /// ```
+ /// We currently support the `base64` source type for images, and the `image/jpeg`, `image/png`, `image/gif`, and `image/webp` media types.
+ /// See [examples](https://docs.anthropic.com/en/api/messages-examples#vision) for more input examples.
+ /// Note that if you want to include a [system prompt](https://docs.anthropic.com/en/docs/system-prompts), you can use the top-level `system` parameter — there is no `"system"` role for input messages in the Messages API.
+ ///
+ ///
+ /// The maximum number of tokens to generate before stopping.
+ /// Note that our models may stop _before_ reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate.
+ /// Different models have different maximum values for this parameter. See [models](https://docs.anthropic.com/en/docs/models-overview) for details.
+ /// Example: 1024
+ ///
+ ///
+ /// An object describing metadata about the request.
+ ///
+ ///
+ /// Custom text sequences that will cause the model to stop generating.
+ /// Our models will normally stop when they have naturally completed their turn, which will result in a response `stop_reason` of `"end_turn"`.
+ /// If you want the model to stop generating when it encounters custom strings of text, you can use the `stop_sequences` parameter. If the model encounters one of the custom sequences, the response `stop_reason` value will be `"stop_sequence"` and the response `stop_sequence` value will contain the matched stop sequence.
+ ///
+ ///
+ /// Whether to incrementally stream the response using server-sent events.
+ /// See [streaming](https://docs.anthropic.com/en/api/messages-streaming) for details.
+ ///
+ ///
+ /// System prompt.
+ /// A system prompt is a way of providing context and instructions to Claude, such as specifying a particular goal or role. See our [guide to system prompts](https://docs.anthropic.com/en/docs/system-prompts).
+ /// Example: []
+ ///
+ ///
+ /// Amount of randomness injected into the response.
+ /// Defaults to `1.0`. Ranges from `0.0` to `1.0`. Use `temperature` closer to `0.0` for analytical / multiple choice, and closer to `1.0` for creative and generative tasks.
+ /// Note that even with `temperature` of `0.0`, the results will not be fully deterministic.
+ /// Example: 1
+ ///
+ ///
+ /// How the model should use the provided tools. The model can use a specific tool, any available tool, or decide by itself.
+ ///
+ ///
+ /// Definitions of tools that the model may use.
+ /// If you include `tools` in your API request, the model may return `tool_use` content blocks that represent the model's use of those tools. You can then run those tools using the tool input generated by the model and then optionally return results back to the model using `tool_result` content blocks.
+ /// Each tool definition includes:
+ /// * `name`: Name of the tool.
+ /// * `description`: Optional, but strongly-recommended description of the tool.
+ /// * `input_schema`: [JSON schema](https://json-schema.org/) for the tool `input` shape that the model will produce in `tool_use` output content blocks.
+ /// For example, if you defined `tools` as:
+ /// ```json
+ /// [
+ /// {
+ /// "name": "get_stock_price",
+ /// "description": "Get the current stock price for a given ticker symbol.",
+ /// "input_schema": {
+ /// "type": "object",
+ /// "properties": {
+ /// "ticker": {
+ /// "type": "string",
+ /// "description": "The stock ticker symbol, e.g. AAPL for Apple Inc."
+ /// }
+ /// },
+ /// "required": ["ticker"]
+ /// }
+ /// }
+ /// ]
+ /// ```
+ /// And then asked the model "What's the S&P 500 at today?", the model might produce `tool_use` content blocks in the response like this:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_use",
+ /// "id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "name": "get_stock_price",
+ /// "input": { "ticker": "^GSPC" }
+ /// }
+ /// ]
+ /// ```
+ /// You might then run your `get_stock_price` tool with `{"ticker": "^GSPC"}` as an input, and return the following back to the model in a subsequent `user` message:
+ /// ```json
+ /// [
+ /// {
+ /// "type": "tool_result",
+ /// "tool_use_id": "toolu_01D7FLrfh4GYq7yT1ULFeyMV",
+ /// "content": "259.75 USD"
+ /// }
+ /// ]
+ /// ```
+ /// Tools can be used for workflows that include running client-side tools and functions, or more generally whenever you want the model to produce a particular JSON structure of output.
+ /// See our [guide](https://docs.anthropic.com/en/docs/tool-use) for more details.
+ ///
+ ///
+ /// Only sample from the top K options for each subsequent token.
+ /// Used to remove "long tail" low probability responses. [Learn more technical details here](https://towardsdatascience.com/how-to-sample-from-language-models-682bceb97277).
+ /// Recommended for advanced use cases only. You usually only need to use `temperature`.
+ /// Example: 5
+ ///
+ ///
+ /// Use nucleus sampling.
+ /// In nucleus sampling, we compute the cumulative distribution over all the options for each subsequent token in decreasing probability order and cut it off once it reaches a particular probability specified by `top_p`. You should either alter `temperature` or `top_p`, but not both.
+ /// Recommended for advanced use cases only. You usually only need to use `temperature`.
+ /// Example: 0.7
+ ///
+ /// The token to cancel the operation with
+ ///
+ public async global::System.Threading.Tasks.Task PromptCachingBetaMessagesPostAsync(
+ global::Anthropic.Model model,
+ global::System.Collections.Generic.IList messages,
+ int maxTokens,
+ string? anthropicBeta = default,
+ string? anthropicVersion = default,
+ global::Anthropic.Metadata? metadata = default,
+ global::System.Collections.Generic.IList? stopSequences = default,
+ bool? stream = default,
+ global::Anthropic.AnyOf>? system = default,
+ double? temperature = default,
+ global::Anthropic.ToolChoice? toolChoice = default,
+ global::System.Collections.Generic.IList? tools = default,
+ int? topK = default,
+ double? topP = default,
+ global::System.Threading.CancellationToken cancellationToken = default)
+ {
+ var __request = new global::Anthropic.PromptCachingBetaCreateMessageParams
+ {
+ Model = model,
+ Messages = messages,
+ MaxTokens = maxTokens,
+ Metadata = metadata,
+ StopSequences = stopSequences,
+ Stream = stream,
+ System = system,
+ Temperature = temperature,
+ ToolChoice = toolChoice,
+ Tools = tools,
+ TopK = topK,
+ TopP = topP,
+ };
+
+ return await PromptCachingBetaMessagesPostAsync(
+ anthropicBeta: anthropicBeta,
+ anthropicVersion: anthropicVersion,
+ request: __request,
+ cancellationToken: cancellationToken).ConfigureAwait(false);
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.MessagesClient.g.cs b/src/libs/Anthropic/Generated/Anthropic.MessagesClient.g.cs
new file mode 100644
index 0000000..8160952
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.MessagesClient.g.cs
@@ -0,0 +1,86 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ ///
+ /// If no httpClient is provided, a new one will be created.
+ /// If no baseUri is provided, the default baseUri from OpenAPI spec will be used.
+ ///
+ public sealed partial class MessagesClient : global::Anthropic.IMessagesClient, global::System.IDisposable
+ {
+ ///
+ ///
+ ///
+ public const string DefaultBaseUrl = "https://api.anthropic.com";
+
+ private bool _disposeHttpClient = true;
+
+ ///
+ public global::System.Net.Http.HttpClient HttpClient { get; }
+
+ ///
+ public System.Uri? BaseUri => HttpClient.BaseAddress;
+
+ ///
+ public global::System.Collections.Generic.List Authorizations { get; }
+
+ ///
+ public bool ReadResponseAsString { get; set; }
+#if DEBUG
+ = true;
+#endif
+ ///
+ ///
+ ///
+ public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::Anthropic.SourceGenerationContext.Default;
+
+
+ ///
+ /// Creates a new instance of the MessagesClient.
+ /// If no httpClient is provided, a new one will be created.
+ /// If no baseUri is provided, the default baseUri from OpenAPI spec will be used.
+ ///
+ /// The HttpClient instance. If not provided, a new one will be created.
+ /// The base URL for the API. If not provided, the default baseUri from OpenAPI spec will be used.
+ /// The authorizations to use for the requests.
+ /// Dispose the HttpClient when the instance is disposed. True by default.
+ public MessagesClient(
+ global::System.Net.Http.HttpClient? httpClient = null,
+ global::System.Uri? baseUri = null,
+ global::System.Collections.Generic.List? authorizations = null,
+ bool disposeHttpClient = true)
+ {
+ HttpClient = httpClient ?? new global::System.Net.Http.HttpClient();
+ HttpClient.BaseAddress ??= baseUri ?? new global::System.Uri(DefaultBaseUrl);
+ Authorizations = authorizations ?? new global::System.Collections.Generic.List();
+ _disposeHttpClient = disposeHttpClient;
+
+ Initialized(HttpClient);
+ }
+
+ ///
+ public void Dispose()
+ {
+ if (_disposeHttpClient)
+ {
+ HttpClient.Dispose();
+ }
+ }
+
+ partial void Initialized(
+ global::System.Net.Http.HttpClient client);
+ partial void PrepareArguments(
+ global::System.Net.Http.HttpClient client);
+ partial void PrepareRequest(
+ global::System.Net.Http.HttpClient client,
+ global::System.Net.Http.HttpRequestMessage request);
+ partial void ProcessResponse(
+ global::System.Net.Http.HttpClient client,
+ global::System.Net.Http.HttpResponseMessage response);
+ partial void ProcessResponseContent(
+ global::System.Net.Http.HttpClient client,
+ global::System.Net.Http.HttpResponseMessage response,
+ ref string content);
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.TextBlock.Json.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.APIError.Json.g.cs
similarity index 88%
rename from src/libs/Anthropic/Generated/Anthropic.Models.TextBlock.Json.g.cs
rename to src/libs/Anthropic/Generated/Anthropic.Models.APIError.Json.g.cs
index 6e616a5..a00f116 100644
--- a/src/libs/Anthropic/Generated/Anthropic.Models.TextBlock.Json.g.cs
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.APIError.Json.g.cs
@@ -2,7 +2,7 @@
namespace Anthropic
{
- public sealed partial class TextBlock
+ public sealed partial class APIError
{
///
/// Serializes the current instance to a JSON string using the provided JsonSerializerContext.
@@ -34,14 +34,14 @@ public string ToJson(
///
/// Deserializes a JSON string using the provided JsonSerializerContext.
///
- public static global::Anthropic.TextBlock? FromJson(
+ public static global::Anthropic.APIError? FromJson(
string json,
global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext)
{
return global::System.Text.Json.JsonSerializer.Deserialize(
json,
- typeof(global::Anthropic.TextBlock),
- jsonSerializerContext) as global::Anthropic.TextBlock;
+ typeof(global::Anthropic.APIError),
+ jsonSerializerContext) as global::Anthropic.APIError;
}
///
@@ -51,11 +51,11 @@ public string ToJson(
[global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")]
[global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")]
#endif
- public static global::Anthropic.TextBlock? FromJson(
+ public static global::Anthropic.APIError? FromJson(
string json,
global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
{
- return global::System.Text.Json.JsonSerializer.Deserialize(
+ return global::System.Text.Json.JsonSerializer.Deserialize(
json,
jsonSerializerOptions);
}
@@ -63,14 +63,14 @@ public string ToJson(
///
/// Deserializes a JSON stream using the provided JsonSerializerContext.
///
- public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
+ public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
global::System.IO.Stream jsonStream,
global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext)
{
return (await global::System.Text.Json.JsonSerializer.DeserializeAsync(
jsonStream,
- typeof(global::Anthropic.TextBlock),
- jsonSerializerContext).ConfigureAwait(false)) as global::Anthropic.TextBlock;
+ typeof(global::Anthropic.APIError),
+ jsonSerializerContext).ConfigureAwait(false)) as global::Anthropic.APIError;
}
///
@@ -80,11 +80,11 @@ public string ToJson(
[global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")]
[global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")]
#endif
- public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
+ public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
global::System.IO.Stream jsonStream,
global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
{
- return global::System.Text.Json.JsonSerializer.DeserializeAsync(
+ return global::System.Text.Json.JsonSerializer.DeserializeAsync(
jsonStream,
jsonSerializerOptions);
}
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.APIError.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.APIError.g.cs
new file mode 100644
index 0000000..7c48ee4
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.APIError.g.cs
@@ -0,0 +1,58 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ ///
+ ///
+ ///
+ public sealed partial class APIError
+ {
+ ///
+ /// Default Value: api_error
+ ///
+ /// global::Anthropic.APIErrorType.ApiError
+ [global::System.Text.Json.Serialization.JsonPropertyName("type")]
+ [global::System.Text.Json.Serialization.JsonConverter(typeof(global::Anthropic.JsonConverters.APIErrorTypeJsonConverter))]
+ public global::Anthropic.APIErrorType Type { get; set; } = global::Anthropic.APIErrorType.ApiError;
+
+ ///
+ /// Default Value: Internal server error
+ ///
+ /// "Internal server error"
+ [global::System.Text.Json.Serialization.JsonPropertyName("message")]
+ [global::System.Text.Json.Serialization.JsonRequired]
+ public required string Message { get; set; } = "Internal server error";
+
+ ///
+ /// Additional properties that are not explicitly defined in the schema
+ ///
+ [global::System.Text.Json.Serialization.JsonExtensionData]
+ public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary();
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ ///
+ /// Default Value: api_error
+ ///
+ ///
+ /// Default Value: Internal server error
+ ///
+ [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers]
+ public APIError(
+ string message,
+ global::Anthropic.APIErrorType type = global::Anthropic.APIErrorType.ApiError)
+ {
+ this.Message = message ?? throw new global::System.ArgumentNullException(nameof(message));
+ this.Type = type;
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ public APIError()
+ {
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.APIErrorType.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.APIErrorType.g.cs
new file mode 100644
index 0000000..c4d9149
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.APIErrorType.g.cs
@@ -0,0 +1,45 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ ///
+ /// Default Value: api_error
+ ///
+ public enum APIErrorType
+ {
+ ///
+ ///
+ ///
+ ApiError,
+ }
+
+ ///
+ /// Enum extensions to do fast conversions without the reflection.
+ ///
+ public static class APIErrorTypeExtensions
+ {
+ ///
+ /// Converts an enum to a string.
+ ///
+ public static string ToValueString(this APIErrorType value)
+ {
+ return value switch
+ {
+ APIErrorType.ApiError => "api_error",
+ _ => throw new global::System.ArgumentOutOfRangeException(nameof(value), value, null),
+ };
+ }
+ ///
+ /// Converts an string to a enum.
+ ///
+ public static APIErrorType? ToEnum(string value)
+ {
+ return value switch
+ {
+ "api_error" => APIErrorType.ApiError,
+ _ => null,
+ };
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/OneOf.2.Json.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.AnthropicBeta.Json.g.cs
similarity index 88%
rename from src/libs/Anthropic/Generated/OneOf.2.Json.g.cs
rename to src/libs/Anthropic/Generated/Anthropic.Models.AnthropicBeta.Json.g.cs
index 7ace408..673af2e 100644
--- a/src/libs/Anthropic/Generated/OneOf.2.Json.g.cs
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.AnthropicBeta.Json.g.cs
@@ -2,7 +2,7 @@
namespace Anthropic
{
- public readonly partial struct OneOf
+ public readonly partial struct AnthropicBeta
{
///
/// Serializes the current instance to a JSON string using the provided JsonSerializerContext.
@@ -34,14 +34,14 @@ public string ToJson(
///
/// Deserializes a JSON string using the provided JsonSerializerContext.
///
- public static global::Anthropic.OneOf? FromJson(
+ public static global::Anthropic.AnthropicBeta? FromJson(
string json,
global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext)
{
return global::System.Text.Json.JsonSerializer.Deserialize(
json,
- typeof(global::Anthropic.OneOf),
- jsonSerializerContext) as global::Anthropic.OneOf?;
+ typeof(global::Anthropic.AnthropicBeta),
+ jsonSerializerContext) as global::Anthropic.AnthropicBeta?;
}
///
@@ -51,11 +51,11 @@ public string ToJson(
[global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")]
[global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")]
#endif
- public static global::Anthropic.OneOf? FromJson(
+ public static global::Anthropic.AnthropicBeta? FromJson(
string json,
global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
{
- return global::System.Text.Json.JsonSerializer.Deserialize>(
+ return global::System.Text.Json.JsonSerializer.Deserialize(
json,
jsonSerializerOptions);
}
@@ -63,14 +63,14 @@ public string ToJson(
///
/// Deserializes a JSON stream using the provided JsonSerializerContext.
///
- public static async global::System.Threading.Tasks.ValueTask?> FromJsonStreamAsync(
+ public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
global::System.IO.Stream jsonStream,
global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext)
{
return (await global::System.Text.Json.JsonSerializer.DeserializeAsync(
jsonStream,
- typeof(global::Anthropic.OneOf),
- jsonSerializerContext).ConfigureAwait(false)) as global::Anthropic.OneOf?;
+ typeof(global::Anthropic.AnthropicBeta),
+ jsonSerializerContext).ConfigureAwait(false)) as global::Anthropic.AnthropicBeta?;
}
///
@@ -80,11 +80,11 @@ public string ToJson(
[global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")]
[global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")]
#endif
- public static global::System.Threading.Tasks.ValueTask?> FromJsonStreamAsync(
+ public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
global::System.IO.Stream jsonStream,
global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
{
- return global::System.Text.Json.JsonSerializer.DeserializeAsync?>(
+ return global::System.Text.Json.JsonSerializer.DeserializeAsync(
jsonStream,
jsonSerializerOptions);
}
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.AnthropicBeta.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.AnthropicBeta.g.cs
new file mode 100644
index 0000000..affc646
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.AnthropicBeta.g.cs
@@ -0,0 +1,214 @@
+#pragma warning disable CS0618 // Type or member is obsolete
+
+#nullable enable
+
+namespace Anthropic
+{
+ ///
+ ///
+ ///
+ public readonly partial struct AnthropicBeta : global::System.IEquatable
+ {
+ ///
+ ///
+ ///
+#if NET6_0_OR_GREATER
+ public string? Value1 { get; init; }
+#else
+ public string? Value1 { get; }
+#endif
+
+ ///
+ ///
+ ///
+#if NET6_0_OR_GREATER
+ [global::System.Diagnostics.CodeAnalysis.MemberNotNullWhen(true, nameof(Value1))]
+#endif
+ public bool IsValue1 => Value1 != null;
+
+ ///
+ ///
+ ///
+ public static implicit operator AnthropicBeta(string value) => new AnthropicBeta(value);
+
+ ///
+ ///
+ ///
+ public static implicit operator string?(AnthropicBeta @this) => @this.Value1;
+
+ ///
+ ///
+ ///
+ public AnthropicBeta(string? value)
+ {
+ Value1 = value;
+ }
+
+ ///
+ ///
+ ///
+#if NET6_0_OR_GREATER
+ public global::Anthropic.AnthropicBetaEnum? Value2 { get; init; }
+#else
+ public global::Anthropic.AnthropicBetaEnum? Value2 { get; }
+#endif
+
+ ///
+ ///
+ ///
+#if NET6_0_OR_GREATER
+ [global::System.Diagnostics.CodeAnalysis.MemberNotNullWhen(true, nameof(Value2))]
+#endif
+ public bool IsValue2 => Value2 != null;
+
+ ///
+ ///
+ ///
+ public static implicit operator AnthropicBeta(global::Anthropic.AnthropicBetaEnum value) => new AnthropicBeta(value);
+
+ ///
+ ///
+ ///
+ public static implicit operator global::Anthropic.AnthropicBetaEnum?(AnthropicBeta @this) => @this.Value2;
+
+ ///
+ ///
+ ///
+ public AnthropicBeta(global::Anthropic.AnthropicBetaEnum? value)
+ {
+ Value2 = value;
+ }
+
+ ///
+ ///
+ ///
+ public AnthropicBeta(
+ string? value1,
+ global::Anthropic.AnthropicBetaEnum? value2
+ )
+ {
+ Value1 = value1;
+ Value2 = value2;
+ }
+
+ ///
+ ///
+ ///
+ public object? Object =>
+ Value2 as object ??
+ Value1 as object
+ ;
+
+ ///
+ ///
+ ///
+ public bool Validate()
+ {
+ return IsValue1 || IsValue2;
+ }
+
+ ///
+ ///
+ ///
+ public TResult? Match(
+ global::System.Func? value1 = null,
+ global::System.Func? value2 = null,
+ bool validate = true)
+ {
+ if (validate)
+ {
+ Validate();
+ }
+
+ if (IsValue1 && value1 != null)
+ {
+ return value1(Value1!);
+ }
+ else if (IsValue2 && value2 != null)
+ {
+ return value2(Value2!);
+ }
+
+ return default(TResult);
+ }
+
+ ///
+ ///
+ ///
+ public void Match(
+ global::System.Action? value1 = null,
+ global::System.Action? value2 = null,
+ bool validate = true)
+ {
+ if (validate)
+ {
+ Validate();
+ }
+
+ if (IsValue1)
+ {
+ value1?.Invoke(Value1!);
+ }
+ else if (IsValue2)
+ {
+ value2?.Invoke(Value2!);
+ }
+ }
+
+ ///
+ ///
+ ///
+ public override int GetHashCode()
+ {
+ var fields = new object?[]
+ {
+ Value1,
+ typeof(string),
+ Value2,
+ typeof(global::Anthropic.AnthropicBetaEnum),
+ };
+ const int offset = unchecked((int)2166136261);
+ const int prime = 16777619;
+ static int HashCodeAggregator(int hashCode, object? value) => value == null
+ ? (hashCode ^ 0) * prime
+ : (hashCode ^ value.GetHashCode()) * prime;
+
+ return global::System.Linq.Enumerable.Aggregate(fields, offset, HashCodeAggregator);
+ }
+
+ ///
+ ///
+ ///
+ public bool Equals(AnthropicBeta other)
+ {
+ return
+ global::System.Collections.Generic.EqualityComparer.Default.Equals(Value1, other.Value1) &&
+ global::System.Collections.Generic.EqualityComparer.Default.Equals(Value2, other.Value2)
+ ;
+ }
+
+ ///
+ ///
+ ///
+ public static bool operator ==(AnthropicBeta obj1, AnthropicBeta obj2)
+ {
+ return global::System.Collections.Generic.EqualityComparer.Default.Equals(obj1, obj2);
+ }
+
+ ///
+ ///
+ ///
+ public static bool operator !=(AnthropicBeta obj1, AnthropicBeta obj2)
+ {
+ return !(obj1 == obj2);
+ }
+
+ ///
+ ///
+ ///
+ public override bool Equals(object? obj)
+ {
+ return obj is AnthropicBeta o && Equals(o);
+ }
+ }
+}
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.AnthropicBetaEnum.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.AnthropicBetaEnum.g.cs
new file mode 100644
index 0000000..3ef4f29
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.AnthropicBetaEnum.g.cs
@@ -0,0 +1,69 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ ///
+ ///
+ ///
+ public enum AnthropicBetaEnum
+ {
+ ///
+ ///
+ ///
+ MessageBatches20240924,
+ ///
+ ///
+ ///
+ PromptCaching20240731,
+ ///
+ ///
+ ///
+ ComputerUse20241022,
+ ///
+ ///
+ ///
+ Pdfs20240925,
+ ///
+ ///
+ ///
+ TokenCounting20241101,
+ }
+
+ ///
+ /// Enum extensions to do fast conversions without the reflection.
+ ///
+ public static class AnthropicBetaEnumExtensions
+ {
+ ///
+ /// Converts an enum to a string.
+ ///
+ public static string ToValueString(this AnthropicBetaEnum value)
+ {
+ return value switch
+ {
+ AnthropicBetaEnum.MessageBatches20240924 => "message-batches-2024-09-24",
+ AnthropicBetaEnum.PromptCaching20240731 => "prompt-caching-2024-07-31",
+ AnthropicBetaEnum.ComputerUse20241022 => "computer-use-2024-10-22",
+ AnthropicBetaEnum.Pdfs20240925 => "pdfs-2024-09-25",
+ AnthropicBetaEnum.TokenCounting20241101 => "token-counting-2024-11-01",
+ _ => throw new global::System.ArgumentOutOfRangeException(nameof(value), value, null),
+ };
+ }
+ ///
+ /// Converts an string to a enum.
+ ///
+ public static AnthropicBetaEnum? ToEnum(string value)
+ {
+ return value switch
+ {
+ "message-batches-2024-09-24" => AnthropicBetaEnum.MessageBatches20240924,
+ "prompt-caching-2024-07-31" => AnthropicBetaEnum.PromptCaching20240731,
+ "computer-use-2024-10-22" => AnthropicBetaEnum.ComputerUse20241022,
+ "pdfs-2024-09-25" => AnthropicBetaEnum.Pdfs20240925,
+ "token-counting-2024-11-01" => AnthropicBetaEnum.TokenCounting20241101,
+ _ => null,
+ };
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.BatchMessageRequest.Json.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.AuthenticationError.Json.g.cs
similarity index 87%
rename from src/libs/Anthropic/Generated/Anthropic.Models.BatchMessageRequest.Json.g.cs
rename to src/libs/Anthropic/Generated/Anthropic.Models.AuthenticationError.Json.g.cs
index 4ef03d5..0ac10de 100644
--- a/src/libs/Anthropic/Generated/Anthropic.Models.BatchMessageRequest.Json.g.cs
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.AuthenticationError.Json.g.cs
@@ -2,7 +2,7 @@
namespace Anthropic
{
- public sealed partial class BatchMessageRequest
+ public sealed partial class AuthenticationError
{
///
/// Serializes the current instance to a JSON string using the provided JsonSerializerContext.
@@ -34,14 +34,14 @@ public string ToJson(
///
/// Deserializes a JSON string using the provided JsonSerializerContext.
///
- public static global::Anthropic.BatchMessageRequest? FromJson(
+ public static global::Anthropic.AuthenticationError? FromJson(
string json,
global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext)
{
return global::System.Text.Json.JsonSerializer.Deserialize(
json,
- typeof(global::Anthropic.BatchMessageRequest),
- jsonSerializerContext) as global::Anthropic.BatchMessageRequest;
+ typeof(global::Anthropic.AuthenticationError),
+ jsonSerializerContext) as global::Anthropic.AuthenticationError;
}
///
@@ -51,11 +51,11 @@ public string ToJson(
[global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")]
[global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")]
#endif
- public static global::Anthropic.BatchMessageRequest? FromJson(
+ public static global::Anthropic.AuthenticationError? FromJson(
string json,
global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
{
- return global::System.Text.Json.JsonSerializer.Deserialize(
+ return global::System.Text.Json.JsonSerializer.Deserialize(
json,
jsonSerializerOptions);
}
@@ -63,14 +63,14 @@ public string ToJson(
///
/// Deserializes a JSON stream using the provided JsonSerializerContext.
///
- public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
+ public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
global::System.IO.Stream jsonStream,
global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext)
{
return (await global::System.Text.Json.JsonSerializer.DeserializeAsync(
jsonStream,
- typeof(global::Anthropic.BatchMessageRequest),
- jsonSerializerContext).ConfigureAwait(false)) as global::Anthropic.BatchMessageRequest;
+ typeof(global::Anthropic.AuthenticationError),
+ jsonSerializerContext).ConfigureAwait(false)) as global::Anthropic.AuthenticationError;
}
///
@@ -80,11 +80,11 @@ public string ToJson(
[global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")]
[global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")]
#endif
- public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
+ public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
global::System.IO.Stream jsonStream,
global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
{
- return global::System.Text.Json.JsonSerializer.DeserializeAsync(
+ return global::System.Text.Json.JsonSerializer.DeserializeAsync(
jsonStream,
jsonSerializerOptions);
}
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.AuthenticationError.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.AuthenticationError.g.cs
new file mode 100644
index 0000000..2ffa44c
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.AuthenticationError.g.cs
@@ -0,0 +1,58 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ ///
+ ///
+ ///
+ public sealed partial class AuthenticationError
+ {
+ ///
+ /// Default Value: authentication_error
+ ///
+ /// global::Anthropic.AuthenticationErrorType.AuthenticationError
+ [global::System.Text.Json.Serialization.JsonPropertyName("type")]
+ [global::System.Text.Json.Serialization.JsonConverter(typeof(global::Anthropic.JsonConverters.AuthenticationErrorTypeJsonConverter))]
+ public global::Anthropic.AuthenticationErrorType Type { get; set; } = global::Anthropic.AuthenticationErrorType.AuthenticationError;
+
+ ///
+ /// Default Value: Authentication error
+ ///
+ /// "Authentication error"
+ [global::System.Text.Json.Serialization.JsonPropertyName("message")]
+ [global::System.Text.Json.Serialization.JsonRequired]
+ public required string Message { get; set; } = "Authentication error";
+
+ ///
+ /// Additional properties that are not explicitly defined in the schema
+ ///
+ [global::System.Text.Json.Serialization.JsonExtensionData]
+ public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary();
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ ///
+ /// Default Value: authentication_error
+ ///
+ ///
+ /// Default Value: Authentication error
+ ///
+ [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers]
+ public AuthenticationError(
+ string message,
+ global::Anthropic.AuthenticationErrorType type = global::Anthropic.AuthenticationErrorType.AuthenticationError)
+ {
+ this.Message = message ?? throw new global::System.ArgumentNullException(nameof(message));
+ this.Type = type;
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ public AuthenticationError()
+ {
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.AuthenticationErrorType.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.AuthenticationErrorType.g.cs
new file mode 100644
index 0000000..5552139
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.AuthenticationErrorType.g.cs
@@ -0,0 +1,45 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ ///
+ /// Default Value: authentication_error
+ ///
+ public enum AuthenticationErrorType
+ {
+ ///
+ ///
+ ///
+ AuthenticationError,
+ }
+
+ ///
+ /// Enum extensions to do fast conversions without the reflection.
+ ///
+ public static class AuthenticationErrorTypeExtensions
+ {
+ ///
+ /// Converts an enum to a string.
+ ///
+ public static string ToValueString(this AuthenticationErrorType value)
+ {
+ return value switch
+ {
+ AuthenticationErrorType.AuthenticationError => "authentication_error",
+ _ => throw new global::System.ArgumentOutOfRangeException(nameof(value), value, null),
+ };
+ }
+ ///
+ /// Converts an string to a enum.
+ ///
+ public static AuthenticationErrorType? ToEnum(string value)
+ {
+ return value switch
+ {
+ "authentication_error" => AuthenticationErrorType.AuthenticationError,
+ _ => null,
+ };
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.ToolDiscriminator.Json.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.Base64ImageSource.Json.g.cs
similarity index 87%
rename from src/libs/Anthropic/Generated/Anthropic.Models.ToolDiscriminator.Json.g.cs
rename to src/libs/Anthropic/Generated/Anthropic.Models.Base64ImageSource.Json.g.cs
index 19f52df..0e37e1f 100644
--- a/src/libs/Anthropic/Generated/Anthropic.Models.ToolDiscriminator.Json.g.cs
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.Base64ImageSource.Json.g.cs
@@ -2,7 +2,7 @@
namespace Anthropic
{
- public sealed partial class ToolDiscriminator
+ public sealed partial class Base64ImageSource
{
///
/// Serializes the current instance to a JSON string using the provided JsonSerializerContext.
@@ -34,14 +34,14 @@ public string ToJson(
///
/// Deserializes a JSON string using the provided JsonSerializerContext.
///
- public static global::Anthropic.ToolDiscriminator? FromJson(
+ public static global::Anthropic.Base64ImageSource? FromJson(
string json,
global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext)
{
return global::System.Text.Json.JsonSerializer.Deserialize(
json,
- typeof(global::Anthropic.ToolDiscriminator),
- jsonSerializerContext) as global::Anthropic.ToolDiscriminator;
+ typeof(global::Anthropic.Base64ImageSource),
+ jsonSerializerContext) as global::Anthropic.Base64ImageSource;
}
///
@@ -51,11 +51,11 @@ public string ToJson(
[global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")]
[global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")]
#endif
- public static global::Anthropic.ToolDiscriminator? FromJson(
+ public static global::Anthropic.Base64ImageSource? FromJson(
string json,
global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
{
- return global::System.Text.Json.JsonSerializer.Deserialize(
+ return global::System.Text.Json.JsonSerializer.Deserialize(
json,
jsonSerializerOptions);
}
@@ -63,14 +63,14 @@ public string ToJson(
///
/// Deserializes a JSON stream using the provided JsonSerializerContext.
///
- public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
+ public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
global::System.IO.Stream jsonStream,
global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext)
{
return (await global::System.Text.Json.JsonSerializer.DeserializeAsync(
jsonStream,
- typeof(global::Anthropic.ToolDiscriminator),
- jsonSerializerContext).ConfigureAwait(false)) as global::Anthropic.ToolDiscriminator;
+ typeof(global::Anthropic.Base64ImageSource),
+ jsonSerializerContext).ConfigureAwait(false)) as global::Anthropic.Base64ImageSource;
}
///
@@ -80,11 +80,11 @@ public string ToJson(
[global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")]
[global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")]
#endif
- public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
+ public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
global::System.IO.Stream jsonStream,
global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
{
- return global::System.Text.Json.JsonSerializer.DeserializeAsync(
+ return global::System.Text.Json.JsonSerializer.DeserializeAsync(
jsonStream,
jsonSerializerOptions);
}
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.Base64ImageSource.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.Base64ImageSource.g.cs
new file mode 100644
index 0000000..064074f
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.Base64ImageSource.g.cs
@@ -0,0 +1,63 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ ///
+ ///
+ ///
+ public sealed partial class Base64ImageSource
+ {
+ ///
+ ///
+ ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("type")]
+ [global::System.Text.Json.Serialization.JsonConverter(typeof(global::Anthropic.JsonConverters.Base64ImageSourceTypeJsonConverter))]
+ public global::Anthropic.Base64ImageSourceType Type { get; set; }
+
+ ///
+ ///
+ ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("media_type")]
+ [global::System.Text.Json.Serialization.JsonConverter(typeof(global::Anthropic.JsonConverters.Base64ImageSourceMediaTypeJsonConverter))]
+ [global::System.Text.Json.Serialization.JsonRequired]
+ public required global::Anthropic.Base64ImageSourceMediaType MediaType { get; set; }
+
+ ///
+ ///
+ ///
+ [global::System.Text.Json.Serialization.JsonPropertyName("data")]
+ [global::System.Text.Json.Serialization.JsonRequired]
+ public required byte[] Data { get; set; }
+
+ ///
+ /// Additional properties that are not explicitly defined in the schema
+ ///
+ [global::System.Text.Json.Serialization.JsonExtensionData]
+ public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary();
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ ///
+ ///
+ ///
+ [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers]
+ public Base64ImageSource(
+ global::Anthropic.Base64ImageSourceMediaType mediaType,
+ byte[] data,
+ global::Anthropic.Base64ImageSourceType type)
+ {
+ this.MediaType = mediaType;
+ this.Data = data ?? throw new global::System.ArgumentNullException(nameof(data));
+ this.Type = type;
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ public Base64ImageSource()
+ {
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.ImageBlockSourceMediaType.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.Base64ImageSourceMediaType.g.cs
similarity index 54%
rename from src/libs/Anthropic/Generated/Anthropic.Models.ImageBlockSourceMediaType.g.cs
rename to src/libs/Anthropic/Generated/Anthropic.Models.Base64ImageSourceMediaType.g.cs
index da68b99..3860c88 100644
--- a/src/libs/Anthropic/Generated/Anthropic.Models.ImageBlockSourceMediaType.g.cs
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.Base64ImageSourceMediaType.g.cs
@@ -4,9 +4,9 @@
namespace Anthropic
{
///
- /// The media type of the image.
+ ///
///
- public enum ImageBlockSourceMediaType
+ public enum Base64ImageSourceMediaType
{
///
///
@@ -29,33 +29,33 @@ public enum ImageBlockSourceMediaType
///
/// Enum extensions to do fast conversions without the reflection.
///
- public static class ImageBlockSourceMediaTypeExtensions
+ public static class Base64ImageSourceMediaTypeExtensions
{
///
/// Converts an enum to a string.
///
- public static string ToValueString(this ImageBlockSourceMediaType value)
+ public static string ToValueString(this Base64ImageSourceMediaType value)
{
return value switch
{
- ImageBlockSourceMediaType.ImageJpeg => "image/jpeg",
- ImageBlockSourceMediaType.ImagePng => "image/png",
- ImageBlockSourceMediaType.ImageGif => "image/gif",
- ImageBlockSourceMediaType.ImageWebp => "image/webp",
+ Base64ImageSourceMediaType.ImageJpeg => "image/jpeg",
+ Base64ImageSourceMediaType.ImagePng => "image/png",
+ Base64ImageSourceMediaType.ImageGif => "image/gif",
+ Base64ImageSourceMediaType.ImageWebp => "image/webp",
_ => throw new global::System.ArgumentOutOfRangeException(nameof(value), value, null),
};
}
///
/// Converts an string to a enum.
///
- public static ImageBlockSourceMediaType? ToEnum(string value)
+ public static Base64ImageSourceMediaType? ToEnum(string value)
{
return value switch
{
- "image/jpeg" => ImageBlockSourceMediaType.ImageJpeg,
- "image/png" => ImageBlockSourceMediaType.ImagePng,
- "image/gif" => ImageBlockSourceMediaType.ImageGif,
- "image/webp" => ImageBlockSourceMediaType.ImageWebp,
+ "image/jpeg" => Base64ImageSourceMediaType.ImageJpeg,
+ "image/png" => Base64ImageSourceMediaType.ImagePng,
+ "image/gif" => Base64ImageSourceMediaType.ImageGif,
+ "image/webp" => Base64ImageSourceMediaType.ImageWebp,
_ => null,
};
}
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.ImageBlockSourceType.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.Base64ImageSourceType.g.cs
similarity index 66%
rename from src/libs/Anthropic/Generated/Anthropic.Models.ImageBlockSourceType.g.cs
rename to src/libs/Anthropic/Generated/Anthropic.Models.Base64ImageSourceType.g.cs
index 254a1b2..2cc204a 100644
--- a/src/libs/Anthropic/Generated/Anthropic.Models.ImageBlockSourceType.g.cs
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.Base64ImageSourceType.g.cs
@@ -4,9 +4,9 @@
namespace Anthropic
{
///
- /// The type of image source.
+ ///
///
- public enum ImageBlockSourceType
+ public enum Base64ImageSourceType
{
///
///
@@ -17,27 +17,27 @@ public enum ImageBlockSourceType
///
/// Enum extensions to do fast conversions without the reflection.
///
- public static class ImageBlockSourceTypeExtensions
+ public static class Base64ImageSourceTypeExtensions
{
///
/// Converts an enum to a string.
///
- public static string ToValueString(this ImageBlockSourceType value)
+ public static string ToValueString(this Base64ImageSourceType value)
{
return value switch
{
- ImageBlockSourceType.Base64 => "base64",
+ Base64ImageSourceType.Base64 => "base64",
_ => throw new global::System.ArgumentOutOfRangeException(nameof(value), value, null),
};
}
///
/// Converts an string to a enum.
///
- public static ImageBlockSourceType? ToEnum(string value)
+ public static Base64ImageSourceType? ToEnum(string value)
{
return value switch
{
- "base64" => ImageBlockSourceType.Base64,
+ "base64" => Base64ImageSourceType.Base64,
_ => null,
};
}
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.BetaAPIError.Json.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.BetaAPIError.Json.g.cs
new file mode 100644
index 0000000..d88a7d7
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.BetaAPIError.Json.g.cs
@@ -0,0 +1,92 @@
+#nullable enable
+
+namespace Anthropic
+{
+ public sealed partial class BetaAPIError
+ {
+ ///
+ /// Serializes the current instance to a JSON string using the provided JsonSerializerContext.
+ ///
+ public string ToJson(
+ global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext)
+ {
+ return global::System.Text.Json.JsonSerializer.Serialize(
+ this,
+ this.GetType(),
+ jsonSerializerContext);
+ }
+
+ ///
+ /// Serializes the current instance to a JSON string using the provided JsonSerializerOptions.
+ ///
+#if NET8_0_OR_GREATER
+ [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")]
+ [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")]
+#endif
+ public string ToJson(
+ global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
+ {
+ return global::System.Text.Json.JsonSerializer.Serialize(
+ this,
+ jsonSerializerOptions);
+ }
+
+ ///
+ /// Deserializes a JSON string using the provided JsonSerializerContext.
+ ///
+ public static global::Anthropic.BetaAPIError? FromJson(
+ string json,
+ global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext)
+ {
+ return global::System.Text.Json.JsonSerializer.Deserialize(
+ json,
+ typeof(global::Anthropic.BetaAPIError),
+ jsonSerializerContext) as global::Anthropic.BetaAPIError;
+ }
+
+ ///
+ /// Deserializes a JSON string using the provided JsonSerializerOptions.
+ ///
+#if NET8_0_OR_GREATER
+ [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")]
+ [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")]
+#endif
+ public static global::Anthropic.BetaAPIError? FromJson(
+ string json,
+ global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
+ {
+ return global::System.Text.Json.JsonSerializer.Deserialize(
+ json,
+ jsonSerializerOptions);
+ }
+
+ ///
+ /// Deserializes a JSON stream using the provided JsonSerializerContext.
+ ///
+ public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
+ global::System.IO.Stream jsonStream,
+ global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext)
+ {
+ return (await global::System.Text.Json.JsonSerializer.DeserializeAsync(
+ jsonStream,
+ typeof(global::Anthropic.BetaAPIError),
+ jsonSerializerContext).ConfigureAwait(false)) as global::Anthropic.BetaAPIError;
+ }
+
+ ///
+ /// Deserializes a JSON stream using the provided JsonSerializerOptions.
+ ///
+#if NET8_0_OR_GREATER
+ [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")]
+ [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")]
+#endif
+ public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
+ global::System.IO.Stream jsonStream,
+ global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
+ {
+ return global::System.Text.Json.JsonSerializer.DeserializeAsync(
+ jsonStream,
+ jsonSerializerOptions);
+ }
+ }
+}
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.BetaAPIError.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.BetaAPIError.g.cs
new file mode 100644
index 0000000..a55c51f
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.BetaAPIError.g.cs
@@ -0,0 +1,58 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ ///
+ ///
+ ///
+ public sealed partial class BetaAPIError
+ {
+ ///
+ /// Default Value: api_error
+ ///
+ /// global::Anthropic.BetaAPIErrorType.ApiError
+ [global::System.Text.Json.Serialization.JsonPropertyName("type")]
+ [global::System.Text.Json.Serialization.JsonConverter(typeof(global::Anthropic.JsonConverters.BetaAPIErrorTypeJsonConverter))]
+ public global::Anthropic.BetaAPIErrorType Type { get; set; } = global::Anthropic.BetaAPIErrorType.ApiError;
+
+ ///
+ /// Default Value: Internal server error
+ ///
+ /// "Internal server error"
+ [global::System.Text.Json.Serialization.JsonPropertyName("message")]
+ [global::System.Text.Json.Serialization.JsonRequired]
+ public required string Message { get; set; } = "Internal server error";
+
+ ///
+ /// Additional properties that are not explicitly defined in the schema
+ ///
+ [global::System.Text.Json.Serialization.JsonExtensionData]
+ public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary();
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ ///
+ /// Default Value: api_error
+ ///
+ ///
+ /// Default Value: Internal server error
+ ///
+ [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers]
+ public BetaAPIError(
+ string message,
+ global::Anthropic.BetaAPIErrorType type = global::Anthropic.BetaAPIErrorType.ApiError)
+ {
+ this.Message = message ?? throw new global::System.ArgumentNullException(nameof(message));
+ this.Type = type;
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ public BetaAPIError()
+ {
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.BetaAPIErrorType.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.BetaAPIErrorType.g.cs
new file mode 100644
index 0000000..5db9243
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.BetaAPIErrorType.g.cs
@@ -0,0 +1,45 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ ///
+ /// Default Value: api_error
+ ///
+ public enum BetaAPIErrorType
+ {
+ ///
+ ///
+ ///
+ ApiError,
+ }
+
+ ///
+ /// Enum extensions to do fast conversions without the reflection.
+ ///
+ public static class BetaAPIErrorTypeExtensions
+ {
+ ///
+ /// Converts an enum to a string.
+ ///
+ public static string ToValueString(this BetaAPIErrorType value)
+ {
+ return value switch
+ {
+ BetaAPIErrorType.ApiError => "api_error",
+ _ => throw new global::System.ArgumentOutOfRangeException(nameof(value), value, null),
+ };
+ }
+ ///
+ /// Converts an string to a enum.
+ ///
+ public static BetaAPIErrorType? ToEnum(string value)
+ {
+ return value switch
+ {
+ "api_error" => BetaAPIErrorType.ApiError,
+ _ => null,
+ };
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.BlockDeltaDiscriminator.Json.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.BetaAuthenticationError.Json.g.cs
similarity index 87%
rename from src/libs/Anthropic/Generated/Anthropic.Models.BlockDeltaDiscriminator.Json.g.cs
rename to src/libs/Anthropic/Generated/Anthropic.Models.BetaAuthenticationError.Json.g.cs
index f6d3982..bb6dceb 100644
--- a/src/libs/Anthropic/Generated/Anthropic.Models.BlockDeltaDiscriminator.Json.g.cs
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.BetaAuthenticationError.Json.g.cs
@@ -2,7 +2,7 @@
namespace Anthropic
{
- public sealed partial class BlockDeltaDiscriminator
+ public sealed partial class BetaAuthenticationError
{
///
/// Serializes the current instance to a JSON string using the provided JsonSerializerContext.
@@ -34,14 +34,14 @@ public string ToJson(
///
/// Deserializes a JSON string using the provided JsonSerializerContext.
///
- public static global::Anthropic.BlockDeltaDiscriminator? FromJson(
+ public static global::Anthropic.BetaAuthenticationError? FromJson(
string json,
global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext)
{
return global::System.Text.Json.JsonSerializer.Deserialize(
json,
- typeof(global::Anthropic.BlockDeltaDiscriminator),
- jsonSerializerContext) as global::Anthropic.BlockDeltaDiscriminator;
+ typeof(global::Anthropic.BetaAuthenticationError),
+ jsonSerializerContext) as global::Anthropic.BetaAuthenticationError;
}
///
@@ -51,11 +51,11 @@ public string ToJson(
[global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")]
[global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")]
#endif
- public static global::Anthropic.BlockDeltaDiscriminator? FromJson(
+ public static global::Anthropic.BetaAuthenticationError? FromJson(
string json,
global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
{
- return global::System.Text.Json.JsonSerializer.Deserialize(
+ return global::System.Text.Json.JsonSerializer.Deserialize(
json,
jsonSerializerOptions);
}
@@ -63,14 +63,14 @@ public string ToJson(
///
/// Deserializes a JSON stream using the provided JsonSerializerContext.
///
- public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
+ public static async global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
global::System.IO.Stream jsonStream,
global::System.Text.Json.Serialization.JsonSerializerContext jsonSerializerContext)
{
return (await global::System.Text.Json.JsonSerializer.DeserializeAsync(
jsonStream,
- typeof(global::Anthropic.BlockDeltaDiscriminator),
- jsonSerializerContext).ConfigureAwait(false)) as global::Anthropic.BlockDeltaDiscriminator;
+ typeof(global::Anthropic.BetaAuthenticationError),
+ jsonSerializerContext).ConfigureAwait(false)) as global::Anthropic.BetaAuthenticationError;
}
///
@@ -80,11 +80,11 @@ public string ToJson(
[global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")]
[global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")]
#endif
- public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
+ public static global::System.Threading.Tasks.ValueTask FromJsonStreamAsync(
global::System.IO.Stream jsonStream,
global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions = null)
{
- return global::System.Text.Json.JsonSerializer.DeserializeAsync(
+ return global::System.Text.Json.JsonSerializer.DeserializeAsync(
jsonStream,
jsonSerializerOptions);
}
diff --git a/src/libs/Anthropic/Generated/Anthropic.Models.BetaAuthenticationError.g.cs b/src/libs/Anthropic/Generated/Anthropic.Models.BetaAuthenticationError.g.cs
new file mode 100644
index 0000000..6a4c03d
--- /dev/null
+++ b/src/libs/Anthropic/Generated/Anthropic.Models.BetaAuthenticationError.g.cs
@@ -0,0 +1,58 @@
+
+#nullable enable
+
+namespace Anthropic
+{
+ ///
+ ///
+ ///
+ public sealed partial class BetaAuthenticationError
+ {
+ ///
+ /// Default Value: authentication_error
+ ///
+ /// global::Anthropic.BetaAuthenticationErrorType.AuthenticationError
+ [global::System.Text.Json.Serialization.JsonPropertyName("type")]
+ [global::System.Text.Json.Serialization.JsonConverter(typeof(global::Anthropic.JsonConverters.BetaAuthenticationErrorTypeJsonConverter))]
+ public global::Anthropic.BetaAuthenticationErrorType Type { get; set; } = global::Anthropic.BetaAuthenticationErrorType.AuthenticationError;
+
+ ///
+ /// Default Value: Authentication error
+ ///
+ /// "Authentication error"
+ [global::System.Text.Json.Serialization.JsonPropertyName("message")]
+ [global::System.Text.Json.Serialization.JsonRequired]
+ public required string Message { get; set; } = "Authentication error";
+
+ ///
+ /// Additional properties that are not explicitly defined in the schema
+ ///
+ [global::System.Text.Json.Serialization.JsonExtensionData]
+ public global::System.Collections.Generic.IDictionary AdditionalProperties { get; set; } = new global::System.Collections.Generic.Dictionary