Skip to content

Commit

Permalink
Merge branch 'main' into query
Browse files Browse the repository at this point in the history
  • Loading branch information
ronnygunawan authored Feb 17, 2024
2 parents 4fa3945 + 7864a30 commit 84f83bb
Show file tree
Hide file tree
Showing 36 changed files with 655 additions and 37 deletions.
149 changes: 149 additions & 0 deletions BotNet.CommandHandlers/AI/Gemini/GeminiTextPromptHandler.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
using BotNet.Commands;
using BotNet.Commands.AI.Gemini;
using BotNet.Commands.BotUpdate.Message;
using BotNet.Commands.ChatAggregate;
using BotNet.Commands.CommandPrioritization;
using BotNet.Commands.SenderAggregate;
using BotNet.Services.Gemini;
using BotNet.Services.Gemini.Models;
using BotNet.Services.RateLimit;
using BotNet.Services.TelegramClient;
using Microsoft.Extensions.Logging;
using Telegram.Bot;
using Telegram.Bot.Types;
using Telegram.Bot.Types.Enums;
using Telegram.Bot.Types.ReplyMarkups;

namespace BotNet.CommandHandlers.AI.Gemini {
public sealed class GeminiTextPromptHandler(
ITelegramBotClient telegramBotClient,
GeminiClient geminiClient,
ITelegramMessageCache telegramMessageCache,
CommandPriorityCategorizer commandPriorityCategorizer,
ICommandQueue commandQueue,
ILogger<GeminiTextPromptHandler> logger
) : ICommandHandler<GeminiTextPrompt> {
internal static readonly RateLimiter CHAT_RATE_LIMITER = RateLimiter.PerUserPerChat(5, TimeSpan.FromMinutes(5));
internal static readonly RateLimiter VIP_CHAT_RATE_LIMITER = RateLimiter.PerUserPerChat(5, TimeSpan.FromMinutes(2));

private readonly ITelegramBotClient _telegramBotClient = telegramBotClient;
private readonly GeminiClient _geminiClient = geminiClient;
private readonly ITelegramMessageCache _telegramMessageCache = telegramMessageCache;
private readonly CommandPriorityCategorizer _commandPriorityCategorizer = commandPriorityCategorizer;
private readonly ICommandQueue _commandQueue = commandQueue;
private readonly ILogger<GeminiTextPromptHandler> _logger = logger;

public Task Handle(GeminiTextPrompt textPrompt, CancellationToken cancellationToken) {
try {
if (textPrompt.Command.Chat is HomeGroupChat
|| textPrompt.Command.Sender is VIPSender) {
VIP_CHAT_RATE_LIMITER.ValidateActionRate(
chatId: textPrompt.Command.Chat.Id,
userId: textPrompt.Command.Sender.Id
);
} else {
CHAT_RATE_LIMITER.ValidateActionRate(
chatId: textPrompt.Command.Chat.Id,
userId: textPrompt.Command.Sender.Id
);
}
} catch (RateLimitExceededException exc) {
return _telegramBotClient.SendTextMessageAsync(
chatId: textPrompt.Command.Chat.Id,
text: $"<code>Anda terlalu banyak memanggil AI. Coba lagi {exc.Cooldown}.</code>",
parseMode: ParseMode.Html,
replyToMessageId: textPrompt.Command.MessageId,
cancellationToken: cancellationToken
);
}

// Fire and forget
Task.Run(async () => {
List<Content> messages = [
Content.FromText("user", "Act as an AI assistant. The assistant is helpful, creative, direct, concise, and always get to the point."),
Content.FromText("model", "Sure.")
];

// Merge adjacent messages from same role
foreach (MessageBase message in textPrompt.Thread.Reverse()) {
Content content = Content.FromText(
role: message.Sender.GeminiRole,
text: message.Text
);

if (messages.Count > 0
&& messages[^1].Role == message.Sender.GeminiRole) {
messages[^1].Add(content);
} else {
messages.Add(content);
}
}

// Trim thread longer than 10 messages
while (messages.Count > 10) {
messages.RemoveAt(0);
}

// Thread must start with user message
while (messages.Count > 0
&& messages[0].Role != "user") {
messages.RemoveAt(0);
}

// Merge user message with replied to message if thread is initiated by replying to another user
if (messages.Count > 0
&& messages[^1].Role == "user") {
messages[^1].Add(Content.FromText("user", textPrompt.Prompt));
} else {
messages.Add(Content.FromText("user", textPrompt.Prompt));
}

string response = await _geminiClient.ChatAsync(
messages: messages,
maxTokens: 512,
cancellationToken: cancellationToken
);

// Send response
Message responseMessage;
try {
responseMessage = await telegramBotClient.SendTextMessageAsync(
chatId: textPrompt.Command.Chat.Id,
text: response,
parseModes: [ParseMode.MarkdownV2, ParseMode.Markdown, ParseMode.Html],
replyToMessageId: textPrompt.Command.MessageId,
replyMarkup: new InlineKeyboardMarkup(
InlineKeyboardButton.WithUrl(
text: "Generated by Google Gemini Pro",
url: "https://deepmind.google/technologies/gemini/"
)
),
cancellationToken: cancellationToken
);
} catch (Exception exc) {
_logger.LogError(exc, null);
await telegramBotClient.SendTextMessageAsync(
chatId: textPrompt.Command.Chat.Id,
text: "😵",
parseMode: ParseMode.Html,
replyToMessageId: textPrompt.Command.MessageId,
cancellationToken: cancellationToken
);
return;
}

// Track thread
_telegramMessageCache.Add(
message: AIResponseMessage.FromMessage(
message: responseMessage,
replyToMessage: textPrompt.Command,
callSign: "Gemini",
commandPriorityCategorizer: _commandPriorityCategorizer
)
);
});

return Task.CompletedTask;
}
}
}
26 changes: 22 additions & 4 deletions BotNet.CommandHandlers/AI/OpenAI/AskCommandHandler.cs
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,12 @@
using BotNet.Services.OpenAI;
using BotNet.Services.OpenAI.Models;
using BotNet.Services.RateLimit;
using BotNet.Services.TelegramClient;
using Microsoft.Extensions.Logging;
using Telegram.Bot;
using Telegram.Bot.Types;
using Telegram.Bot.Types.Enums;
using Telegram.Bot.Types.ReplyMarkups;

namespace BotNet.CommandHandlers.AI.OpenAI {
public sealed class AskCommandHandler(
Expand Down Expand Up @@ -81,21 +83,37 @@ select ChatMessage.FromText(
responseMessage = await telegramBotClient.EditMessageTextAsync(
chatId: askCommand.Command.Chat.Id,
messageId: responseMessage.MessageId,
text: MarkdownV2Sanitizer.Sanitize(response),
parseMode: ParseMode.MarkdownV2,
text: response,
parseModes: [ParseMode.MarkdownV2, ParseMode.Markdown, ParseMode.Html],
replyMarkup: new InlineKeyboardMarkup(
InlineKeyboardButton.WithUrl(
text: askCommand switch {
({ Command: { Sender: VIPSender } or { Chat: HomeGroupChat } }) => "Generated by OpenAI GPT-4",
_ => "Generated by OpenAI GPT-3.5 Turbo"
},
url: "https://openai.com/gpt-4"
)
),
cancellationToken: cancellationToken
);
} catch (Exception exc) {
_logger.LogError(exc, null);
throw;
await telegramBotClient.EditMessageTextAsync(
chatId: askCommand.Command.Chat.Id,
messageId: responseMessage.MessageId,
text: "😵",
parseMode: ParseMode.Html,
cancellationToken: cancellationToken
);
return;
}

// Track thread
_telegramMessageCache.Add(
message: AIResponseMessage.FromMessage(
message: responseMessage,
replyToMessage: askCommand.Command,
callSign: "AI",
callSign: "GPT",
commandPriorityCategorizer: _commandPriorityCategorizer
)
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
using Telegram.Bot;
using Telegram.Bot.Types;
using Telegram.Bot.Types.Enums;
using Telegram.Bot.Types.ReplyMarkups;

namespace BotNet.CommandHandlers.AI.OpenAI {
public sealed class OpenAIImageGenerationPromptHandler(
Expand Down Expand Up @@ -59,6 +60,12 @@ await _telegramBotClient.DeleteMessageAsync(
Message responseMessage = await _telegramBotClient.SendPhotoAsync(
chatId: command.Chat.Id,
photo: new InputFileUrl(generatedImageUrl),
replyMarkup: new InlineKeyboardMarkup(
InlineKeyboardButton.WithUrl(
text: "Generated by OpenAI DALL-E 3",
url: "https://openai.com/dall-e-3"
)
),
replyToMessageId: command.PromptMessageId,
cancellationToken: cancellationToken
);
Expand All @@ -70,6 +77,8 @@ await _telegramBotClient.DeleteMessageAsync(
} catch (OperationCanceledException) {
// Terminate gracefully
// TODO: tie up loose ends
} catch (Exception exc) {
_logger.LogError(exc, "Could not handle command");
}
});

Expand Down
19 changes: 17 additions & 2 deletions BotNet.CommandHandlers/AI/OpenAI/OpenAIImagePromptHandler.cs
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,13 @@
using BotNet.Services.OpenAI;
using BotNet.Services.OpenAI.Models;
using BotNet.Services.RateLimit;
using BotNet.Services.TelegramClient;
using Microsoft.Extensions.Logging;
using SkiaSharp;
using Telegram.Bot;
using Telegram.Bot.Types;
using Telegram.Bot.Types.Enums;
using Telegram.Bot.Types.ReplyMarkups;

namespace BotNet.CommandHandlers.AI.OpenAI {
public sealed class OpenAIImagePromptHandler(
Expand Down Expand Up @@ -180,12 +182,25 @@ await _telegramBotClient.EditMessageTextAsync(
chatId: imagePrompt.Command.Chat.Id,
messageId: responseMessage.MessageId,
text: MarkdownV2Sanitizer.Sanitize(response),
parseMode: ParseMode.MarkdownV2,
parseModes: [ParseMode.MarkdownV2, ParseMode.Markdown, ParseMode.Html],
replyMarkup: new InlineKeyboardMarkup(
InlineKeyboardButton.WithUrl(
text: "Generated by OpenAI GPT-4",
url: "https://openai.com/gpt-4"
)
),
cancellationToken: cancellationToken
);
} catch (Exception exc) {
_logger.LogError(exc, null);
throw;
await telegramBotClient.EditMessageTextAsync(
chatId: imagePrompt.Command.Chat.Id,
messageId: responseMessage.MessageId,
text: "😵",
parseMode: ParseMode.Html,
cancellationToken: cancellationToken
);
return;
}

// Track thread
Expand Down
24 changes: 21 additions & 3 deletions BotNet.CommandHandlers/AI/OpenAI/OpenAITextPromptHandler.cs
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,12 @@
using BotNet.Services.OpenAI;
using BotNet.Services.OpenAI.Models;
using BotNet.Services.RateLimit;
using BotNet.Services.TelegramClient;
using Microsoft.Extensions.Logging;
using Telegram.Bot;
using Telegram.Bot.Types;
using Telegram.Bot.Types.Enums;
using Telegram.Bot.Types.ReplyMarkups;

namespace BotNet.CommandHandlers.AI.OpenAI {
public sealed class OpenAITextPromptHandler(
Expand Down Expand Up @@ -145,13 +147,29 @@ await _telegramBotClient.EditMessageTextAsync(
responseMessage = await telegramBotClient.EditMessageTextAsync(
chatId: textPrompt.Command.Chat.Id,
messageId: responseMessage.MessageId,
text: MarkdownV2Sanitizer.Sanitize(response),
parseMode: ParseMode.MarkdownV2,
text: response,
parseModes: [ParseMode.MarkdownV2, ParseMode.Markdown, ParseMode.Html],
replyMarkup: new InlineKeyboardMarkup(
InlineKeyboardButton.WithUrl(
text: textPrompt switch {
({ Command: { Sender: VIPSender } or { Chat: HomeGroupChat } }) => "Generated by OpenAI GPT-4",
_ => "Generated by OpenAI GPT-3.5 Turbo"
},
url: "https://openai.com/gpt-4"
)
),
cancellationToken: cancellationToken
);
} catch (Exception exc) {
_logger.LogError(exc, null);
throw;
await telegramBotClient.EditMessageTextAsync(
chatId: textPrompt.Command.Chat.Id,
messageId: responseMessage.MessageId,
text: "😵",
parseMode: ParseMode.Html,
cancellationToken: cancellationToken
);
return;
}

// Track thread
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
using Telegram.Bot;
using Telegram.Bot.Types;
using Telegram.Bot.Types.Enums;
using Telegram.Bot.Types.ReplyMarkups;

namespace BotNet.CommandHandlers.AI.Stability {
public sealed class StabilityTextToImagePromptHandler(
Expand Down Expand Up @@ -66,6 +67,12 @@ await _telegramBotClient.DeleteMessageAsync(
Message responseMessage = await _telegramBotClient.SendPhotoAsync(
chatId: command.Chat.Id,
photo: new InputFileStream(generatedImageStream, "art.png"),
replyMarkup: new InlineKeyboardMarkup(
InlineKeyboardButton.WithUrl(
text: "Generated by stability.ai SDXL",
url: "https://stability.ai/stable-image"
)
),
replyToMessageId: command.PromptMessageId,
cancellationToken: cancellationToken
);
Expand Down
4 changes: 2 additions & 2 deletions BotNet.CommandHandlers/Art/ArtCommandHandler.cs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ public Task Handle(ArtCommand command, CancellationToken cancellationToken) {

await _commandQueue.DispatchAsync(
new OpenAIImageGenerationPrompt(
callSign: "AI",
callSign: "GPT",
prompt: command.Prompt,
promptMessageId: command.PromptMessageId,
responseMessageId: new(busyMessage.MessageId),
Expand All @@ -69,7 +69,7 @@ await _commandQueue.DispatchAsync(

await _commandQueue.DispatchAsync(
new StabilityTextToImagePrompt(
callSign: "AI",
callSign: "GPT",
prompt: command.Prompt,
promptMessageId: command.PromptMessageId,
responseMessageId: new(busyMessage.MessageId),
Expand Down
Loading

0 comments on commit 84f83bb

Please sign in to comment.