From d520f21aaa725584dc080e14ffd6e8f10c58e89a Mon Sep 17 00:00:00 2001 From: Ronny Gunawan <3048897+ronnygunawan@users.noreply.github.com> Date: Thu, 1 Feb 2024 00:29:15 +0700 Subject: [PATCH 1/2] WIP Gemini implementation --- .../AI/Gemini/GeminiTextPromptHandler.cs | 178 ++++++++++++++++++ .../BotUpdate/Message/AICallCommandHandler.cs | 12 ++ .../Message/AIFollowUpMessageHandler.cs | 13 ++ BotNet.Commands/AI/Gemini/GeminiTextPrompt.cs | 77 ++++++++ BotNet.Commands/SenderAggregate/Sender.cs | 3 + BotNet.Services/Gemini/GeminiClient.cs | 54 ++++++ BotNet.Services/Gemini/GeminiOptions.cs | 5 + BotNet.Services/Gemini/Models/Candidate.cs | 11 ++ BotNet.Services/Gemini/Models/Content.cs | 16 ++ .../Gemini/Models/GeminiRequest.cs | 10 + .../Gemini/Models/GeminiResponse.cs | 9 + .../Gemini/Models/GenerationConfig.cs | 12 ++ BotNet.Services/Gemini/Models/InlineData.cs | 8 + BotNet.Services/Gemini/Models/Part.cs | 8 + .../Gemini/Models/PromptFeedback.cs | 8 + BotNet.Services/Gemini/Models/SafetyRating.cs | 8 + .../Gemini/Models/SafetySettings.cs | 8 + .../Gemini/ServiceCollectionExtensions.cs | 10 + BotNet/Program.cs | 3 + 19 files changed, 453 insertions(+) create mode 100644 BotNet.CommandHandlers/AI/Gemini/GeminiTextPromptHandler.cs create mode 100644 BotNet.Commands/AI/Gemini/GeminiTextPrompt.cs create mode 100644 BotNet.Services/Gemini/GeminiClient.cs create mode 100644 BotNet.Services/Gemini/GeminiOptions.cs create mode 100644 BotNet.Services/Gemini/Models/Candidate.cs create mode 100644 BotNet.Services/Gemini/Models/Content.cs create mode 100644 BotNet.Services/Gemini/Models/GeminiRequest.cs create mode 100644 BotNet.Services/Gemini/Models/GeminiResponse.cs create mode 100644 BotNet.Services/Gemini/Models/GenerationConfig.cs create mode 100644 BotNet.Services/Gemini/Models/InlineData.cs create mode 100644 BotNet.Services/Gemini/Models/Part.cs create mode 100644 BotNet.Services/Gemini/Models/PromptFeedback.cs create mode 100644 BotNet.Services/Gemini/Models/SafetyRating.cs create mode 100644 BotNet.Services/Gemini/Models/SafetySettings.cs create mode 100644 BotNet.Services/Gemini/ServiceCollectionExtensions.cs diff --git a/BotNet.CommandHandlers/AI/Gemini/GeminiTextPromptHandler.cs b/BotNet.CommandHandlers/AI/Gemini/GeminiTextPromptHandler.cs new file mode 100644 index 0000000..e0751c2 --- /dev/null +++ b/BotNet.CommandHandlers/AI/Gemini/GeminiTextPromptHandler.cs @@ -0,0 +1,178 @@ +using BotNet.CommandHandlers.Art; +using BotNet.Commands; +using BotNet.Commands.AI.Gemini; +using BotNet.Commands.AI.OpenAI; +using BotNet.Commands.AI.Stability; +using BotNet.Commands.BotUpdate.Message; +using BotNet.Commands.ChatAggregate; +using BotNet.Commands.CommandPrioritization; +using BotNet.Commands.SenderAggregate; +using BotNet.Services.Gemini; +using BotNet.Services.Gemini.Models; +using BotNet.Services.MarkdownV2; +using BotNet.Services.RateLimit; +using Microsoft.Extensions.Logging; +using Telegram.Bot; +using Telegram.Bot.Types; +using Telegram.Bot.Types.Enums; + +namespace BotNet.CommandHandlers.AI.Gemini { + public sealed class GeminiTextPromptHandler( + ITelegramBotClient telegramBotClient, + GeminiClient geminiClient, + ICommandQueue commandQueue, + ITelegramMessageCache telegramMessageCache, + CommandPriorityCategorizer commandPriorityCategorizer, + ILogger logger + ) : ICommandHandler { + internal static readonly RateLimiter CHAT_RATE_LIMITER = RateLimiter.PerChat(60, TimeSpan.FromMinutes(1)); + + private readonly ITelegramBotClient _telegramBotClient = telegramBotClient; + private readonly GeminiClient _geminiClient = geminiClient; + private readonly ICommandQueue _commandQueue = commandQueue; + private readonly ITelegramMessageCache _telegramMessageCache = telegramMessageCache; + private readonly CommandPriorityCategorizer _commandPriorityCategorizer = commandPriorityCategorizer; + private readonly ILogger _logger = logger; + + public Task Handle(GeminiTextPrompt textPrompt, CancellationToken cancellationToken) { + if (textPrompt.Command.Chat is not HomeGroupChat) { + return _telegramBotClient.SendTextMessageAsync( + chatId: textPrompt.Command.Chat.Id, + text: MarkdownV2Sanitizer.Sanitize("Gemini tidak bisa dipakai di sini."), + parseMode: ParseMode.MarkdownV2, + replyToMessageId: textPrompt.Command.MessageId, + cancellationToken: cancellationToken + ); + } + + try { + CHAT_RATE_LIMITER.ValidateActionRate( + chatId: textPrompt.Command.Chat.Id, + userId: textPrompt.Command.Sender.Id + ); + } catch (RateLimitExceededException exc) { + return _telegramBotClient.SendTextMessageAsync( + chatId: textPrompt.Command.Chat.Id, + text: $"Anda terlalu banyak memanggil AI. Coba lagi {exc.Cooldown}.", + parseMode: ParseMode.Html, + replyToMessageId: textPrompt.Command.MessageId, + cancellationToken: cancellationToken + ); + } + + // Fire and forget + Task.Run(async () => { + List messages = [ + Content.FromText("model", "The following is a conversation with an AI assistant. The assistant is helpful, creative, direct, concise, and always get to the point. When user asks for an image to be generated, the AI assistant should respond with \"ImageGeneration:\" followed by comma separated list of features to be expected from the generated image.") + ]; + + messages.AddRange( + from message in textPrompt.Thread.Take(10).Reverse() + select Content.FromText( + role: message.Sender.GeminiRole, + text: message.Text + ) + ); + + messages.Add( + Content.FromText("user", textPrompt.Prompt) + ); + + Message responseMessage = await _telegramBotClient.SendTextMessageAsync( + chatId: textPrompt.Command.Chat.Id, + text: MarkdownV2Sanitizer.Sanitize("… ⏳"), + parseMode: ParseMode.MarkdownV2, + replyToMessageId: textPrompt.Command.MessageId + ); + + string response = await _geminiClient.ChatAsync( + messages: messages, + maxTokens: 512, + cancellationToken: cancellationToken + ); + + // Handle image generation intent + if (response.StartsWith("ImageGeneration:")) { + if (textPrompt.Command.Sender is not VIPSender) { + try { + ArtCommandHandler.IMAGE_GENERATION_RATE_LIMITER.ValidateActionRate(textPrompt.Command.Chat.Id, textPrompt.Command.Sender.Id); + } catch (RateLimitExceededException exc) { + await _telegramBotClient.SendTextMessageAsync( + chatId: textPrompt.Command.Chat.Id, + text: $"Anda belum mendapat giliran. Coba lagi {exc.Cooldown}.", + parseMode: ParseMode.Html, + replyToMessageId: textPrompt.Command.MessageId, + cancellationToken: cancellationToken + ); + return; + } + } + + string imageGenerationPrompt = response.Substring(response.IndexOf(':') + 1).Trim(); + switch (textPrompt.Command) { + case { Sender: VIPSender }: + await _commandQueue.DispatchAsync( + command: new OpenAIImageGenerationPrompt( + callSign: "Gemini", + prompt: imageGenerationPrompt, + promptMessageId: textPrompt.Command.MessageId, + responseMessageId: new(responseMessage.MessageId), + chat: textPrompt.Command.Chat, + sender: textPrompt.Command.Sender + ) + ); + break; + case { Chat: HomeGroupChat }: + await _commandQueue.DispatchAsync( + command: new StabilityTextToImagePrompt( + callSign: "Gemini", + prompt: imageGenerationPrompt, + promptMessageId: textPrompt.Command.MessageId, + responseMessageId: new(responseMessage.MessageId), + chat: textPrompt.Command.Chat, + sender: textPrompt.Command.Sender + ) + ); + break; + default: + await _telegramBotClient.EditMessageTextAsync( + chatId: textPrompt.Command.Chat.Id, + messageId: responseMessage.MessageId, + text: MarkdownV2Sanitizer.Sanitize("Image generation tidak bisa dipakai di sini."), + parseMode: ParseMode.MarkdownV2, + cancellationToken: cancellationToken + ); + break; + } + return; + } + + // Finalize message + try { + responseMessage = await telegramBotClient.EditMessageTextAsync( + chatId: textPrompt.Command.Chat.Id, + messageId: responseMessage.MessageId, + text: MarkdownV2Sanitizer.Sanitize(response), + parseMode: ParseMode.MarkdownV2, + cancellationToken: cancellationToken + ); + } catch (Exception exc) { + _logger.LogError(exc, null); + throw; + } + + // Track thread + _telegramMessageCache.Add( + message: AIResponseMessage.FromMessage( + message: responseMessage, + replyToMessage: textPrompt.Command, + callSign: "Gemini", + commandPriorityCategorizer: _commandPriorityCategorizer + ) + ); + }); + + return Task.CompletedTask; + } + } +} diff --git a/BotNet.CommandHandlers/BotUpdate/Message/AICallCommandHandler.cs b/BotNet.CommandHandlers/BotUpdate/Message/AICallCommandHandler.cs index bd830a8..2aacc4c 100644 --- a/BotNet.CommandHandlers/BotUpdate/Message/AICallCommandHandler.cs +++ b/BotNet.CommandHandlers/BotUpdate/Message/AICallCommandHandler.cs @@ -1,4 +1,5 @@ using BotNet.Commands; +using BotNet.Commands.AI.Gemini; using BotNet.Commands.AI.OpenAI; using BotNet.Commands.BotUpdate.Message; using BotNet.Services.OpenAI; @@ -37,6 +38,17 @@ await _commandQueue.DispatchAsync( ); break; } + case "Gemini" when command.ImageFileId is null && command.ReplyToMessage?.ImageFileId is null: { + await _commandQueue.DispatchAsync( + command: GeminiTextPrompt.FromAICallCommand( + aiCallCommand: command, + thread: command.ReplyToMessage is { } replyToMessage + ? _telegramMessageCache.GetThread(replyToMessage) + : Enumerable.Empty() + ) + ); + break; + } } } } diff --git a/BotNet.CommandHandlers/BotUpdate/Message/AIFollowUpMessageHandler.cs b/BotNet.CommandHandlers/BotUpdate/Message/AIFollowUpMessageHandler.cs index ffdebfb..2d4e023 100644 --- a/BotNet.CommandHandlers/BotUpdate/Message/AIFollowUpMessageHandler.cs +++ b/BotNet.CommandHandlers/BotUpdate/Message/AIFollowUpMessageHandler.cs @@ -1,4 +1,5 @@ using BotNet.Commands; +using BotNet.Commands.AI.Gemini; using BotNet.Commands.AI.OpenAI; using BotNet.Commands.BotUpdate.Message; @@ -25,6 +26,18 @@ await _commandQueue.DispatchAsync( ) ); break; + case "Gemini": + await _commandQueue.DispatchAsync( + command: GeminiTextPrompt.FromAIFollowUpMessage( + aIFollowUpMessage: command, + thread: command.ReplyToMessage is null + ? Enumerable.Empty() + : _telegramMessageCache.GetThread( + firstMessage: command.ReplyToMessage + ) + ) + ); + break; } } } diff --git a/BotNet.Commands/AI/Gemini/GeminiTextPrompt.cs b/BotNet.Commands/AI/Gemini/GeminiTextPrompt.cs new file mode 100644 index 0000000..e19d460 --- /dev/null +++ b/BotNet.Commands/AI/Gemini/GeminiTextPrompt.cs @@ -0,0 +1,77 @@ +using BotNet.Commands.BotUpdate.Message; + +namespace BotNet.Commands.AI.Gemini { + public sealed record GeminiTextPrompt : ICommand { + public string Prompt { get; } + public HumanMessageBase Command { get; } + public IEnumerable Thread { get; } + + private GeminiTextPrompt( + string prompt, + HumanMessageBase command, + IEnumerable thread + ) { + Prompt = prompt; + Command = command; + Thread = thread; + } + + public static GeminiTextPrompt FromAICallCommand(AICallCommand aiCallCommand, IEnumerable thread) { + // Call sign must be Gemini + if (aiCallCommand.CallSign != "Gemini") { + throw new ArgumentException("Call sign must be Gemini", nameof(aiCallCommand)); + } + + // Prompt must be non-empty + if (string.IsNullOrWhiteSpace(aiCallCommand.Text)) { + throw new ArgumentException("Prompt must be non-empty", nameof(aiCallCommand)); + } + + // Non-empty thread must begin with reply to message + if (thread.FirstOrDefault() is { + MessageId: { } firstMessageId, + Chat.Id: { } firstChatId + }) { + if (firstMessageId != aiCallCommand.ReplyToMessage?.MessageId + || firstChatId != aiCallCommand.Chat.Id) { + throw new ArgumentException("Thread must begin with reply to message", nameof(thread)); + } + } + + return new( + prompt: aiCallCommand.Text, + command: aiCallCommand, + thread: thread + ); + } + + public static GeminiTextPrompt FromAIFollowUpMessage(AIFollowUpMessage aIFollowUpMessage, IEnumerable thread) { + // Call sign must be Gemini + if (aIFollowUpMessage.CallSign != "Gemini") { + throw new ArgumentException("Call sign must be Gemini", nameof(aIFollowUpMessage)); + } + + // Prompt must be non-empty + if (string.IsNullOrWhiteSpace(aIFollowUpMessage.Text)) { + throw new ArgumentException("Prompt must be non-empty", nameof(aIFollowUpMessage)); + } + + // Non-empty thread must begin with reply to message + if (thread.FirstOrDefault() is { + MessageId: { } firstMessageId, + Chat.Id: { } firstChatId + }) { + if (firstMessageId != aIFollowUpMessage.ReplyToMessage?.MessageId + || firstChatId != aIFollowUpMessage.Chat.Id) { + throw new ArgumentException("Thread must begin with reply to message", nameof(thread)); + } + } + + return new( + prompt: aIFollowUpMessage.Text, + command: aIFollowUpMessage, + thread: thread + ); + } + } +} diff --git a/BotNet.Commands/SenderAggregate/Sender.cs b/BotNet.Commands/SenderAggregate/Sender.cs index b6a9a83..eb97176 100644 --- a/BotNet.Commands/SenderAggregate/Sender.cs +++ b/BotNet.Commands/SenderAggregate/Sender.cs @@ -7,6 +7,7 @@ public abstract record SenderBase( string Name ) { public abstract string ChatGPTRole { get; } + public abstract string GeminiRole { get; } } public record HumanSender( @@ -14,6 +15,7 @@ public record HumanSender( string Name ) : SenderBase(Id, Name) { public override string ChatGPTRole => "user"; + public override string GeminiRole => "user"; public static bool TryCreate( Telegram.Bot.Types.User user, @@ -51,6 +53,7 @@ public sealed record BotSender( string Name ) : SenderBase(Id, Name) { public override string ChatGPTRole => "assistant"; + public override string GeminiRole => "model"; public static bool TryCreate( Telegram.Bot.Types.User user, diff --git a/BotNet.Services/Gemini/GeminiClient.cs b/BotNet.Services/Gemini/GeminiClient.cs new file mode 100644 index 0000000..4b67a79 --- /dev/null +++ b/BotNet.Services/Gemini/GeminiClient.cs @@ -0,0 +1,54 @@ +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Net.Http; +using System.Net.Http.Json; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using BotNet.Services.Gemini.Models; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; + +namespace BotNet.Services.Gemini { + public class GeminiClient( + HttpClient httpClient, + IOptions geminiOptionsAccessor, + ILogger logger + ) { + private const string BASE_URL = "https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent"; + private readonly HttpClient _httpClient = httpClient; + private readonly string _apiKey = geminiOptionsAccessor.Value.ApiKey!; + private readonly ILogger _logger = logger; + + public async Task ChatAsync(IEnumerable messages, int maxTokens, CancellationToken cancellationToken) { + GeminiRequest geminiRequest = new( + Contents: messages.ToImmutableList(), + SafetySettings: null, + GenerationConfig: new( + MaxOutputTokens: maxTokens + ) + ); + using HttpRequestMessage request = new(HttpMethod.Post, BASE_URL + $"?key={_apiKey}") { + Headers = { + { "Accept", "application/json" } + }, + Content = JsonContent.Create( + inputValue: geminiRequest + ) + }; + using HttpResponseMessage response = await _httpClient.SendAsync(request, cancellationToken); + string responseContent = await response.Content.ReadAsStringAsync(cancellationToken); + response.EnsureSuccessStatusCode(); + + GeminiResponse? geminiResponse = JsonSerializer.Deserialize(responseContent); + if (geminiResponse == null) return ""; + if (geminiResponse.Candidates == null) return ""; + if (geminiResponse.Candidates.Count == 0) return ""; + Content? content = geminiResponse.Candidates[0].Content; + if (content == null) return ""; + if (content.Parts == null) return ""; + if (content.Parts.Count == 0) return ""; + return content.Parts[0].Text ?? ""; + } + } +} diff --git a/BotNet.Services/Gemini/GeminiOptions.cs b/BotNet.Services/Gemini/GeminiOptions.cs new file mode 100644 index 0000000..ea3a902 --- /dev/null +++ b/BotNet.Services/Gemini/GeminiOptions.cs @@ -0,0 +1,5 @@ +namespace BotNet.Services.Gemini { + public class GeminiOptions { + public string? ApiKey { get; set; } + } +} diff --git a/BotNet.Services/Gemini/Models/Candidate.cs b/BotNet.Services/Gemini/Models/Candidate.cs new file mode 100644 index 0000000..0ba0bba --- /dev/null +++ b/BotNet.Services/Gemini/Models/Candidate.cs @@ -0,0 +1,11 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace BotNet.Services.Gemini.Models { + public sealed record Candidate( + [property: JsonPropertyName("content")] Content? Content, + [property: JsonPropertyName("finishReason")] string? FinishReason, + [property: JsonPropertyName("index")] int? Index, + [property: JsonPropertyName("safetyRatings")] ImmutableList? SafetyRatings + ); +} diff --git a/BotNet.Services/Gemini/Models/Content.cs b/BotNet.Services/Gemini/Models/Content.cs new file mode 100644 index 0000000..bdf6c60 --- /dev/null +++ b/BotNet.Services/Gemini/Models/Content.cs @@ -0,0 +1,16 @@ +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace BotNet.Services.Gemini.Models { + public record Content( + [property: JsonPropertyName("role"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] string? Role, + [property: JsonPropertyName("parts")] List? Parts + ) { + public static Content FromText(string role, string text) => new( + Role: role, + Parts: [ + new(Text: text) + ] + ); + } +} diff --git a/BotNet.Services/Gemini/Models/GeminiRequest.cs b/BotNet.Services/Gemini/Models/GeminiRequest.cs new file mode 100644 index 0000000..337fd91 --- /dev/null +++ b/BotNet.Services/Gemini/Models/GeminiRequest.cs @@ -0,0 +1,10 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace BotNet.Services.Gemini.Models { + public sealed record GeminiRequest( + [property: JsonPropertyName("contents")] ImmutableList Contents, + [property: JsonPropertyName("safetySettings"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] ImmutableList? SafetySettings, + [property: JsonPropertyName("generationConfig"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] GenerationConfig? GenerationConfig + ); +} diff --git a/BotNet.Services/Gemini/Models/GeminiResponse.cs b/BotNet.Services/Gemini/Models/GeminiResponse.cs new file mode 100644 index 0000000..c6154ca --- /dev/null +++ b/BotNet.Services/Gemini/Models/GeminiResponse.cs @@ -0,0 +1,9 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace BotNet.Services.Gemini.Models { + public sealed record GeminiResponse( + [property: JsonPropertyName("candidates")] ImmutableList? Candidates, + [property: JsonPropertyName("promptFeedback")] PromptFeedback? PromptFeedback + ); +} diff --git a/BotNet.Services/Gemini/Models/GenerationConfig.cs b/BotNet.Services/Gemini/Models/GenerationConfig.cs new file mode 100644 index 0000000..436a708 --- /dev/null +++ b/BotNet.Services/Gemini/Models/GenerationConfig.cs @@ -0,0 +1,12 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace BotNet.Services.Gemini.Models { + public sealed record GenerationConfig( + [property: JsonPropertyName("stopSequences"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] ImmutableList? StopSequences = null, + [property: JsonPropertyName("temperature"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] double? Temperature = null, + [property: JsonPropertyName("maxOutputTokens"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] int? MaxOutputTokens = null, + [property: JsonPropertyName("topP"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] double? TopP = null, + [property: JsonPropertyName("topK"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] int? TopK = null + ); +} diff --git a/BotNet.Services/Gemini/Models/InlineData.cs b/BotNet.Services/Gemini/Models/InlineData.cs new file mode 100644 index 0000000..478aadb --- /dev/null +++ b/BotNet.Services/Gemini/Models/InlineData.cs @@ -0,0 +1,8 @@ +using System.Text.Json.Serialization; + +namespace BotNet.Services.Gemini.Models { + public sealed record InlineData( + [property: JsonPropertyName("mime_type")] string MimeType, + [property: JsonPropertyName("data")] string Data + ); +} diff --git a/BotNet.Services/Gemini/Models/Part.cs b/BotNet.Services/Gemini/Models/Part.cs new file mode 100644 index 0000000..20bf224 --- /dev/null +++ b/BotNet.Services/Gemini/Models/Part.cs @@ -0,0 +1,8 @@ +using System.Text.Json.Serialization; + +namespace BotNet.Services.Gemini.Models { + public sealed record Part( + [property: JsonPropertyName("text"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] string? Text = null, + [property: JsonPropertyName("inline_data"), JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] InlineData? InlineData = null + ); +} diff --git a/BotNet.Services/Gemini/Models/PromptFeedback.cs b/BotNet.Services/Gemini/Models/PromptFeedback.cs new file mode 100644 index 0000000..e39f7f7 --- /dev/null +++ b/BotNet.Services/Gemini/Models/PromptFeedback.cs @@ -0,0 +1,8 @@ +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace BotNet.Services.Gemini.Models { + public sealed record PromptFeedback( + [property: JsonPropertyName("safetyRatings")] ImmutableList SafetyRatings + ); +} diff --git a/BotNet.Services/Gemini/Models/SafetyRating.cs b/BotNet.Services/Gemini/Models/SafetyRating.cs new file mode 100644 index 0000000..3a38a9f --- /dev/null +++ b/BotNet.Services/Gemini/Models/SafetyRating.cs @@ -0,0 +1,8 @@ +using System.Text.Json.Serialization; + +namespace BotNet.Services.Gemini.Models { + public sealed record SafetyRating( + [property: JsonPropertyName("category")] string Category, + [property: JsonPropertyName("probability")] string Probability + ); +} diff --git a/BotNet.Services/Gemini/Models/SafetySettings.cs b/BotNet.Services/Gemini/Models/SafetySettings.cs new file mode 100644 index 0000000..178606c --- /dev/null +++ b/BotNet.Services/Gemini/Models/SafetySettings.cs @@ -0,0 +1,8 @@ +using System.Text.Json.Serialization; + +namespace BotNet.Services.Gemini.Models { + public sealed record SafetySettings( + [property: JsonPropertyName("category")] string Category, + [property: JsonPropertyName("threshold")] string Threshold + ); +} diff --git a/BotNet.Services/Gemini/ServiceCollectionExtensions.cs b/BotNet.Services/Gemini/ServiceCollectionExtensions.cs new file mode 100644 index 0000000..205a717 --- /dev/null +++ b/BotNet.Services/Gemini/ServiceCollectionExtensions.cs @@ -0,0 +1,10 @@ +using Microsoft.Extensions.DependencyInjection; + +namespace BotNet.Services.Gemini { + public static class ServiceCollectionExtensions { + public static IServiceCollection AddGeminiClient(this IServiceCollection services) { + services.AddTransient(); + return services; + } + } +} diff --git a/BotNet/Program.cs b/BotNet/Program.cs index 233d33f..548f1cb 100644 --- a/BotNet/Program.cs +++ b/BotNet/Program.cs @@ -13,6 +13,7 @@ using BotNet.Services.ColorCard; using BotNet.Services.Craiyon; using BotNet.Services.DynamicExpresso; +using BotNet.Services.Gemini; using BotNet.Services.GoogleMap; using BotNet.Services.Hosting; using BotNet.Services.ImageConverter; @@ -57,6 +58,7 @@ builder.Services.Configure(builder.Configuration.GetSection("GoogleMapOptions")); builder.Services.Configure(builder.Configuration.GetSection("WeatherOptions")); builder.Services.Configure(builder.Configuration.GetSection("CommandPrioritizationOptions")); +builder.Services.Configure(builder.Configuration.GetSection("GeminiOptions")); builder.Services.AddHttpClient(); builder.Services.AddFontService(); builder.Services.AddColorCardRenderer(); @@ -84,6 +86,7 @@ builder.Services.AddCommandHandlers(); builder.Services.AddCommandPriorityCategorizer(); builder.Services.AddBotProfileAccessor(); +builder.Services.AddGeminiClient(); // MediatR builder.Services.AddMediatR(config => { From b6229b353d5621347cc061ace67c0afefba64091 Mon Sep 17 00:00:00 2001 From: Ronny Gunawan <3048897+ronnygunawan@users.noreply.github.com> Date: Thu, 1 Feb 2024 01:17:23 +0700 Subject: [PATCH 2/2] Implemented Gemini --- .../AI/Gemini/GeminiTextPromptHandler.cs | 96 +++++-------------- BotNet.Services/Gemini/Models/Content.cs | 8 +- 2 files changed, 32 insertions(+), 72 deletions(-) diff --git a/BotNet.CommandHandlers/AI/Gemini/GeminiTextPromptHandler.cs b/BotNet.CommandHandlers/AI/Gemini/GeminiTextPromptHandler.cs index e0751c2..cec7729 100644 --- a/BotNet.CommandHandlers/AI/Gemini/GeminiTextPromptHandler.cs +++ b/BotNet.CommandHandlers/AI/Gemini/GeminiTextPromptHandler.cs @@ -1,12 +1,8 @@ -using BotNet.CommandHandlers.Art; -using BotNet.Commands; +using BotNet.Commands; using BotNet.Commands.AI.Gemini; -using BotNet.Commands.AI.OpenAI; -using BotNet.Commands.AI.Stability; using BotNet.Commands.BotUpdate.Message; using BotNet.Commands.ChatAggregate; using BotNet.Commands.CommandPrioritization; -using BotNet.Commands.SenderAggregate; using BotNet.Services.Gemini; using BotNet.Services.Gemini.Models; using BotNet.Services.MarkdownV2; @@ -20,7 +16,6 @@ namespace BotNet.CommandHandlers.AI.Gemini { public sealed class GeminiTextPromptHandler( ITelegramBotClient telegramBotClient, GeminiClient geminiClient, - ICommandQueue commandQueue, ITelegramMessageCache telegramMessageCache, CommandPriorityCategorizer commandPriorityCategorizer, ILogger logger @@ -29,7 +24,6 @@ ILogger logger private readonly ITelegramBotClient _telegramBotClient = telegramBotClient; private readonly GeminiClient _geminiClient = geminiClient; - private readonly ICommandQueue _commandQueue = commandQueue; private readonly ITelegramMessageCache _telegramMessageCache = telegramMessageCache; private readonly CommandPriorityCategorizer _commandPriorityCategorizer = commandPriorityCategorizer; private readonly ILogger _logger = logger; @@ -62,17 +56,33 @@ public Task Handle(GeminiTextPrompt textPrompt, CancellationToken cancellationTo // Fire and forget Task.Run(async () => { - List messages = [ - Content.FromText("model", "The following is a conversation with an AI assistant. The assistant is helpful, creative, direct, concise, and always get to the point. When user asks for an image to be generated, the AI assistant should respond with \"ImageGeneration:\" followed by comma separated list of features to be expected from the generated image.") - ]; + List messages = []; - messages.AddRange( - from message in textPrompt.Thread.Take(10).Reverse() - select Content.FromText( + // Merge adjacent messages from same role + foreach (MessageBase message in textPrompt.Thread.Reverse()) { + Content content = Content.FromText( role: message.Sender.GeminiRole, text: message.Text - ) - ); + ); + + if (messages.Count > 0 + && messages[^1].Role == message.Sender.GeminiRole) { + messages[^1].Add(content); + } else { + messages.Add(content); + } + } + + // Trim thread longer than 10 messages + while (messages.Count > 10) { + messages.RemoveAt(0); + } + + // Thread must start with user message + while (messages.Count > 0 + && messages[0].Role != "user") { + messages.RemoveAt(0); + } messages.Add( Content.FromText("user", textPrompt.Prompt) @@ -91,62 +101,6 @@ select Content.FromText( cancellationToken: cancellationToken ); - // Handle image generation intent - if (response.StartsWith("ImageGeneration:")) { - if (textPrompt.Command.Sender is not VIPSender) { - try { - ArtCommandHandler.IMAGE_GENERATION_RATE_LIMITER.ValidateActionRate(textPrompt.Command.Chat.Id, textPrompt.Command.Sender.Id); - } catch (RateLimitExceededException exc) { - await _telegramBotClient.SendTextMessageAsync( - chatId: textPrompt.Command.Chat.Id, - text: $"Anda belum mendapat giliran. Coba lagi {exc.Cooldown}.", - parseMode: ParseMode.Html, - replyToMessageId: textPrompt.Command.MessageId, - cancellationToken: cancellationToken - ); - return; - } - } - - string imageGenerationPrompt = response.Substring(response.IndexOf(':') + 1).Trim(); - switch (textPrompt.Command) { - case { Sender: VIPSender }: - await _commandQueue.DispatchAsync( - command: new OpenAIImageGenerationPrompt( - callSign: "Gemini", - prompt: imageGenerationPrompt, - promptMessageId: textPrompt.Command.MessageId, - responseMessageId: new(responseMessage.MessageId), - chat: textPrompt.Command.Chat, - sender: textPrompt.Command.Sender - ) - ); - break; - case { Chat: HomeGroupChat }: - await _commandQueue.DispatchAsync( - command: new StabilityTextToImagePrompt( - callSign: "Gemini", - prompt: imageGenerationPrompt, - promptMessageId: textPrompt.Command.MessageId, - responseMessageId: new(responseMessage.MessageId), - chat: textPrompt.Command.Chat, - sender: textPrompt.Command.Sender - ) - ); - break; - default: - await _telegramBotClient.EditMessageTextAsync( - chatId: textPrompt.Command.Chat.Id, - messageId: responseMessage.MessageId, - text: MarkdownV2Sanitizer.Sanitize("Image generation tidak bisa dipakai di sini."), - parseMode: ParseMode.MarkdownV2, - cancellationToken: cancellationToken - ); - break; - } - return; - } - // Finalize message try { responseMessage = await telegramBotClient.EditMessageTextAsync( diff --git a/BotNet.Services/Gemini/Models/Content.cs b/BotNet.Services/Gemini/Models/Content.cs index bdf6c60..36197b6 100644 --- a/BotNet.Services/Gemini/Models/Content.cs +++ b/BotNet.Services/Gemini/Models/Content.cs @@ -1,4 +1,5 @@ -using System.Collections.Generic; +using System; +using System.Collections.Generic; using System.Text.Json.Serialization; namespace BotNet.Services.Gemini.Models { @@ -12,5 +13,10 @@ public record Content( new(Text: text) ] ); + + public void Add(Content content) { + if (content.Role != Role) throw new InvalidOperationException(); + Parts!.AddRange(content.Parts!); + } } }