diff --git a/README.md b/README.md
index c287fb0..be79ca6 100644
--- a/README.md
+++ b/README.md
@@ -12,6 +12,7 @@
- All modern .NET features - nullability, trimming, NativeAOT, etc.
- Support .Net Framework/.Net Standard 2.0
- Support for all Ollama API endpoints including chats, embeddings, listing models, pulling and creating new models, and more.
+- Supporting [Microsoft.Extensions.AI](https://devblogs.microsoft.com/dotnet/introducing-microsoft-extensions-ai-preview/)
## Usage
diff --git a/src/libs/Ollama/Ollama.csproj b/src/libs/Ollama/Ollama.csproj
index f929444..e84c37f 100644
--- a/src/libs/Ollama/Ollama.csproj
+++ b/src/libs/Ollama/Ollama.csproj
@@ -12,11 +12,12 @@
Generated C# SDK based on Ollama OpenAPI specification.
- api;client;sdk;dotnet;swagger;openapi;specification;ollama;generated
+ api;client;sdk;dotnet;swagger;openapi;specification;ollama;generated;ai;abstractions;llama;ichatclient
+
all
runtime; build; native; contentfiles; analyzers; buildtransitive
diff --git a/src/libs/Ollama/OllamaApiClient.IChatClient.cs b/src/libs/Ollama/OllamaApiClient.IChatClient.cs
new file mode 100644
index 0000000..6aa1e6e
--- /dev/null
+++ b/src/libs/Ollama/OllamaApiClient.IChatClient.cs
@@ -0,0 +1,90 @@
+using Microsoft.Extensions.AI;
+
+namespace Ollama;
+
+public partial class OllamaApiClient : Microsoft.Extensions.AI.IChatClient
+{
+ ///
+ public async Task CompleteAsync(
+ IList chatMessages,
+ ChatOptions? options = null,
+ CancellationToken cancellationToken = default)
+ {
+ var response = await Chat.GenerateChatCompletionAsync(
+ model: options?.ModelId ?? "ollama",
+ messages: chatMessages.Select(x => new Message
+ {
+ Content = x.Text ?? string.Empty,
+ Role = x.Role.Value switch
+ {
+ "assistant" => MessageRole.Assistant,
+ "user" => MessageRole.User,
+ "system" => MessageRole.System,
+ "tool" => MessageRole.Tool,
+ _ => MessageRole.User,
+ },
+ }).ToArray(),
+ format: options?.ResponseFormat switch
+ {
+ ChatResponseFormatJson => ResponseFormat.Json,
+ _ => null,
+ },
+ options: new RequestOptions
+ {
+ Temperature = options?.Temperature,
+ },
+ stream: false,
+ keepAlive: default,
+ tools: options?.Tools?.Select(x => new Tool
+ {
+ Function = new ToolFunction
+ {
+ Name = string.Empty,
+ Description = string.Empty,
+ Parameters = x.AsJson(),
+ },
+ }).ToList(),
+ cancellationToken: cancellationToken).WaitAsync().ConfigureAwait(false);
+ if (response.Message == null)
+ {
+ throw new InvalidOperationException("Response message was null.");
+ }
+
+ return new ChatCompletion(new ChatMessage(
+ role: response.Message.Role switch
+ {
+ MessageRole.Assistant => ChatRole.Assistant,
+ MessageRole.User => ChatRole.User,
+ MessageRole.System => ChatRole.System,
+ MessageRole.Tool => ChatRole.Tool,
+ _ => ChatRole.User,
+ },
+ content: response.Message.Content)
+ {
+ RawRepresentation = response.Message,
+ })
+ {
+ RawRepresentation = response,
+ };
+ }
+
+ ///
+ public IAsyncEnumerable CompleteStreamingAsync(
+ IList chatMessages,
+ ChatOptions? options = null,
+ CancellationToken cancellationToken = default)
+ {
+ throw new NotImplementedException();
+ }
+
+ ///
+ public TService? GetService(object? key = null) where TService : class
+ {
+ return this as TService;
+ }
+
+ ///
+ public ChatClientMetadata Metadata => new(
+ providerName: "Ollama",
+ providerUri: HttpClient.BaseAddress);
+}
\ No newline at end of file