Compare commits

..

40 Commits
master ... dev

Author SHA1 Message Date
90d5dd973c Added demo bot for testing. Supports image input, but does not have dialog memory or tools usage yet. 2025-08-27 21:14:17 +05:00
f195d77e67 Made Discord bot service for getting llm response out of Discord message 2025-08-27 21:13:04 +05:00
92f536d407 Fixed local import 2025-08-25 20:47:01 +05:00
a322fbae84 Renamed adapter class to MessengerClientMessageAdapter for clarity 2025-08-25 19:09:44 +05:00
bd6a55cd85 Created DiscordMessageAdapter for creating ClientMessage instance instead of a builder's class method 2025-08-25 12:32:35 +05:00
4fd5e06eaa Purged unused imports 2025-08-24 20:01:21 +05:00
1f7aa0a5ee Merge remote-tracking branch 'origin/dev' into dev 2025-08-24 19:59:31 +05:00
3b6fc28d4c Added ClientMessage -> OpenAIMessage object adapter 2025-08-24 19:59:21 +05:00
c024d85db5 Added ClientMessage -> OpenAIMessage object adapter 2025-08-24 19:59:07 +05:00
a5bfccb19a Added OpenAI response -> OpenAIMessage object adapter 2025-08-24 19:43:02 +05:00
7dc1d485b1 Finished some basic OpenAI interaction. Next are adapters. 2025-08-24 18:26:08 +05:00
8255d54e56 Added empty attachments list for test message 2025-08-24 18:25:16 +05:00
37beca0dd6 Modified typing for InputImage 2025-08-24 18:24:27 +05:00
087566811a Added __init__ to Message Builder interface 2025-08-17 15:24:01 +05:00
095bd828cf Added client message builder 2025-08-17 15:20:32 +05:00
0387dafef8 Removed openai test run code 2025-08-15 12:58:37 +05:00
0e2becc212 Commented out non-functional code 2025-08-15 12:37:57 +05:00
b69f169892 Merge remote-tracking branch 'origin/dev' into dev 2025-08-15 12:34:55 +05:00
40b3b900ac Wrong apostrophe character in comments 2025-08-15 12:34:41 +05:00
73bf08843f Update reginaldCog/messenger_clients/discord_data_models.py 2025-08-15 09:33:05 +02:00
d2edf0e60d Delete reginaldCog/messenger_clients/_common.py 2025-08-15 09:32:25 +02:00
bd4b05d2d4 Delete reginaldCog/llm_clients/discord_client_interfaces.py 2025-08-15 09:32:03 +02:00
eb7733be69 Removed non-finished code 2025-08-15 12:28:40 +05:00
131999816b File structure refactor 2025-08-15 12:28:39 +05:00
ff70196756 File structure refactor 2025-08-15 11:09:40 +05:00
180c167a43 File structure refactor 2025-08-15 11:01:21 +05:00
75af912bf5 File_structure refactor 2025-08-15 10:41:35 +05:00
a64a07511d Commented out send_long_message method and auto PEP-8 reginald.py 2025-07-18 14:31:51 +05:00
9209cc568d Made Prompt dataclass immutable and got rid out of add_message method. openai_client\models.py's test run would remain broken for now 2025-07-18 14:28:23 +05:00
7056315d5f Added llm_interface.py for connecting Discord bot with LLMs (only Open AI at the moment) 2025-07-18 14:26:35 +05:00
b77dc31ea5 Added Response dataclass for better openai's response deserialization. 2025-07-14 20:28:00 +05:00
ad59a695af Experimented with more convenient (although, less explicit) way of passing data into abstract dataclasses. 2025-07-13 19:47:37 +05:00
89b98fc5af Experimented with more convenient (although, less explicit) way of passing data into abstract dataclasses. 2025-07-13 19:45:09 +05:00
97252a8062 Added models.py in openai_client containing abstract dataclasses for use with openai
Moved all dataclasses to .openai_client.models.py
Made prompt into its own dataclass
Removed to_dict methods because dataclasses/asdict seems to be sufficient
2025-07-11 16:59:20 +05:00
0534ec6b15 Added models.py containing abstract dataclasses for Discord 2025-07-10 21:22:52 +05:00
11f39bb673 Added package structure for future use 2025-07-10 20:52:21 +05:00
247e4417f5 First attempt at making openai stuff pretty. As it uses replies instead of completions, update of openai library is required. 2025-07-05 18:16:48 +05:00
bfe22e40ee First attempt at making openai stuff pretty. As it uses replies instead of completions, update of openai library is required. 2025-07-05 18:14:49 +05:00
59902085b3 First attempt at making openai stuff pretty. As it uses replies instead of completions, update of openai library is required. 2025-07-05 17:48:36 +05:00
3d59e45e7e First attempt at making openai stuff pretty. As it uses replies instead of completions, update of openai library is required. 2025-07-05 17:46:13 +05:00
10 changed files with 515 additions and 33 deletions

38
reginaldCog/demo_bot.py Normal file
View File

@ -0,0 +1,38 @@
import os
import discord
from discord.ext import commands
from reginaldCog.messenger_clients.services import MessageService
TOKEN = os.getenv('SCREAMING_OPOSSUM') # Your Discord bot token goes here
intents = discord.Intents.default()
intents.message_content = True
bot = commands.Bot(command_prefix='!', intents=intents)
@bot.event
async def on_ready():
print(f'Logged in as {bot.user} (ID: {bot.user.id})')
print('------')
try:
synced = await bot.tree.sync()
print(f'Synced {len(synced)} command(s).')
except Exception as e:
print(f'Failed to sync commands: {e}')
@bot.event
async def on_message(message: discord.Message):
if message.author == bot.user:
return
async with message.channel.typing():
message_service = MessageService(message)
response = await message_service.get_llm_response()
await message.channel.send(response)
print(response)
if __name__ == '__main__':
if TOKEN is None:
raise RuntimeError('Discord token is not set')
bot.run(TOKEN)

View File

View File

@ -0,0 +1,192 @@
from abc import ABC, abstractmethod
from enum import Enum
from openai import OpenAI
from discord import Message
from reginaldCog.messenger_clients.messenger_client import ClientMessage, DiscordMessageAdapter
class ILLMContent(ABC):
pass
class OpenAIContent(ILLMContent):
def __init__(self):
self._content_items: list[dict[str, str]] = []
@property
def content_items(self) -> list[dict]:
return self._content_items
@content_items.setter
def content_items(self, value: list[dict[str, str]]):
self._content_items = value
class ILLMContentBuilder(ABC):
pass
class OpenAIContentBuilder(ILLMContentBuilder):
def __init__(self, content: OpenAIContent):
self.content = content
def add_output_text(self, text: str):
item = {"type": "output_text", "text": text}
self.content.content_items.append(item)
return self
def add_input_text(self, text: str):
item = {"type": "input_text", "text": text}
self.content.content_items.append(item)
return self
def add_input_image(self, image_url: str):
item = {"type": "input_image", "image_url": image_url}
self.content.content_items.append(item)
return self
def add_from_dict(self, item: dict):
self.content.content_items.append(item)
return self
class ILLMMessage(ABC):
pass
class OpenAIMessage(ILLMMessage):
def __init__(self):
self.content = OpenAIContent()
self.role = ""
@property
def to_dict(self):
return {"role": self.role, "content": self.content.content_items}
class ILLMMessageBuilder(ABC):
pass
class OpenAIMessageBuilder(ILLMMessageBuilder):
def __init__(self, message: OpenAIMessage):
self.message = message
def set_role(self, role: str):
self.message.role = role
return self
def set_content(self, content: OpenAIContent):
self.message.content = content
return self
class ILLMPrompt(ABC):
pass
class OpenAIPrompt(ILLMPrompt):
def __init__(self):
self.messages = []
@property
def to_list(self):
return [i_message.to_dict for i_message in self.messages]
class ILLMPromptBuilder(ABC):
@abstractmethod
def __init__(self, prompt: ILLMPrompt):
pass
@abstractmethod
def add_message(self, message: ILLMMessage):
pass
class OpenAIPromptBuilder(ILLMPromptBuilder):
def __init__(self, prompt: OpenAIPrompt):
self.prompt = prompt
def add_message(self, message: OpenAIMessage):
self.prompt.messages.append(message)
class ILLMClient(ABC):
@abstractmethod
def get_response(self, prompt: ILLMPrompt):
pass
class OpenAIClient(ILLMClient):
content_class = OpenAIContent
content_builder_class = OpenAIContentBuilder
message_class = OpenAIMessage
message_builder_class = OpenAIMessageBuilder
prompt_class = OpenAIPrompt
prompt_builder_class = OpenAIPromptBuilder
def __init__(self):
self.model = 'gpt-4.1-mini'
self.client = OpenAI()
def get_response(self, prompt: OpenAIPrompt):
response_input = {"model": self.model, "input": prompt.to_list}
return self.client.responses.create(**response_input)
class IMessageAdapter(ABC):
@abstractmethod
def to_message(self) -> ILLMMessage:
pass
class OpenAIResponseAdapter(IMessageAdapter):
def __init__(self, response):
self.response = response
self.response_output = response.output[0]
def to_message(self) -> OpenAIMessage:
content = OpenAIContent()
content_builder = OpenAIContentBuilder(content)
message = OpenAIMessage()
message_builder = OpenAIMessageBuilder(message)
message_builder.set_role(self.response_output.role)\
.set_content(content)
for i_content_item in self.response_output.content:
item = i_content_item.to_dict()
content_builder.add_from_dict(item)
return message
class LMMClientType(Enum):
OPENAI = OpenAIClient
class MessengerClientMessageAdapter(IMessageAdapter):
def __init__(self, message: ClientMessage, llm_client: LMMClientType):
self.message = message
self.llm_client = llm_client
def to_message(self) -> ILLMMessage:
content = self.llm_client.value.content_class()
content_builder = self.llm_client.value.content_builder_class(content)
message = self.llm_client.value.message_class()
message_builder = self.llm_client.value.message_builder_class(message)
message_builder.set_role("user")\
.set_content(content)
if self.message:
content_builder.add_input_text(self.message.content)
for i_image_url in self.message.image_urls:
content_builder.add_input_image(i_image_url)
return message
if __name__ == "__main__":
pass

View File

@ -0,0 +1,71 @@
from dataclasses import dataclass, field
from abc import ABC
# region Content classes
@dataclass(frozen=True)
class Content(ABC):
type: str = field(init=False, default='')
@dataclass(frozen=True)
class InputText(Content):
type: str = field(init=False, default='input_text')
text: str
@dataclass(frozen=True)
class OutputText(Content):
type: str = field(init=False, default='output_text')
text: str
@dataclass(frozen=True)
class InputImage(Content):
type: str = field(init=False, default='input_image')
image_url: str | None = field(default=None)
file_id: str | None = field(default=None)
def __post_init__(self):
if self.image_url is None and self.file_id is None:
raise ValueError('Either `image_url` or `file_id` must be provided.')
@dataclass(frozen=True)
class UrlCitation(Content):
type: str = field(init=False, default='url_citation')
# To be done
@dataclass(frozen=True)
class FunctionCall(Content):
type: str = field(init=False, default='function_call')
id: str
call_id: str
name: str
arguments: dict
@dataclass(frozen=True)
class FunctionCallOutput(Content):
type: str = field(init=False, default='function_call_output')
call_id: str
output: str
# endregion Content classes
@dataclass(frozen=True)
class Message:
role: str
content: list[Content]
@dataclass(frozen=True)
class Prompt:
model: str
input: list[Message]
@dataclass(frozen=True)
class Response:
output: list[Message]

View File

View File

@ -0,0 +1,65 @@
from dataclasses import dataclass, field
from datetime import datetime
# region dataclasses
@dataclass(frozen=True)
class Attachment:
content_type: str
filename: str
id: int
size: int
url: str
ephemeral: bool = False
@dataclass(frozen=True)
class Channel:
created_at: datetime
id: int
name: str
def __str__(self) -> str:
return self.name
@dataclass(frozen=True)
class Role:
id: int
name: str
def __str__(self) -> str:
return self.name
@dataclass(frozen=True)
class Member:
bot: bool
created_at: datetime
display_name: str # For regular users this is just their global name or their username, but if they have a guild specific nickname then that is returned instead.
global_name: str # The user's global nickname, taking precedence over the username in display.
id: int
joined_at: datetime
mention: str
name: str # The user's username.
nick: str # The guild specific nickname of the user. Takes precedence over the global name.
roles: list[Role] = field(default_factory=list)
def __str__(self) -> str:
return self.name
@dataclass(frozen=True)
class Message:
author: Member
channel: Channel
created_at: datetime
id: int
attachments: list[Attachment] = field(default_factory=list)
channel_mentions: list[Channel] = field(default_factory=list)
content: str = ''
mentions: list[Member] = field(default_factory=list)
if __name__ == '__main__':
pass

View File

@ -0,0 +1,77 @@
from abc import ABC, abstractmethod
from enum import Enum
from discord import Message, Attachment
class IClientMessage(ABC):
@abstractmethod
def __init__(self):
pass
class ClientMessage(IClientMessage):
def __init__(self):
self.author_name: str = ''
self.content: str = ''
self.image_urls: list = []
class IMessageBuilder(ABC):
@abstractmethod
def __init__(self):
pass
@abstractmethod
def set_content(self, value: str):
pass
@abstractmethod
def set_author_name(self, value: str):
pass
@abstractmethod
def set_image_urls(self, value: list[str]):
pass
class DiscordMessageBuilder(IMessageBuilder):
def __init__(self, message: IClientMessage):
self.message = message
def set_author_name(self, value: str):
self.message.author_name = value
return self
def set_content(self, value: str):
self.message.content = value
return self
def set_image_urls(self, value: list[str]):
self.message.image_urls = value
return self
class IMessageAdapter(ABC):
@abstractmethod
def create_message(self, message: object) -> IClientMessage:
pass
@staticmethod
def validate_image_urls(urls_list: list[Attachment]) -> list[str]:
supported_image_formats = ('image/jpeg', 'image/png', 'image/webp', 'image/gif')
return [i_attachment.url for i_attachment in urls_list if i_attachment.content_type in supported_image_formats]
class DiscordMessageAdapter(IMessageAdapter):
def create_message(self, message: Message) -> IClientMessage:
client_message = ClientMessage()
message_builder = DiscordMessageBuilder(client_message)
urls_list = self.validate_image_urls(message.attachments)
message_builder.set_content(message.content)\
.set_author_name(message.author.name)\
.set_image_urls(urls_list)
return client_message
if __name__ == "__main__":
pass

View File

@ -0,0 +1,39 @@
import asyncio
from discord import Message
from reginaldCog.messenger_clients.messenger_client import ClientMessage, DiscordMessageAdapter as MessengerDiscordAdapter
from reginaldCog.llm_clients.llm_client import LMMClientType, MessengerClientMessageAdapter
class MessageService:
def __init__(self, message: Message, llm_client: LMMClientType = LMMClientType.OPENAI):
self.message = message
self.llm_client = llm_client
async def get_llm_response(self) -> str:
# Adapt discord.Message to ClientMessage domain object
client_message: ClientMessage = MessengerDiscordAdapter().create_message(self.message)
# Create prompt and prompt builder for this LLM client
prompt = self.llm_client.value.prompt_class()
prompt_builder = self.llm_client.value.prompt_builder_class(prompt)
# Adapt the messenger client message into LLM message and add it to prompt
llm_message = MessengerClientMessageAdapter(client_message, self.llm_client).to_message()
prompt_builder.add_message(llm_message)
# Call the LLM client; run in executor if method is blocking sync
llm_client_instance = self.llm_client.value()
loop = asyncio.get_running_loop()
# Assuming get_response is blocking - run in executor:
response = await loop.run_in_executor(None, llm_client_instance.get_response, prompt)
# Extract plain text from the response (assuming OpenAIResponseAdapter is present)
from reginaldCog.llm_clients.llm_client import OpenAIResponseAdapter
response_adapter = OpenAIResponseAdapter(response)
message_obj = response_adapter.to_message()
# Concatenate all textual outputs for sending to Discord
texts = [item.get("text", "") for item in message_obj.content.content_items if item.get("type") == "output_text"]
return "\n".join(texts) if texts else "Sorry, no response generated."

View File

@ -16,7 +16,6 @@ from .weather import time_now, get_current_weather, get_weather_forecast
from .tools_description import TOOLS
from .debug_stuff import debug
CALLABLE_FUNCTIONS = {
# Dictionary with functions to call.
# You can use globals()[func_name](**args) instead, but that's too implicit.
@ -80,7 +79,6 @@ class ReginaldCog(PermissionsMixin, BlacklistMixin, MemoryMixin, commands.Cog):
if not (await self.is_admin(message) or await self.has_access(message.author)):
return # Ignore message if user has no permissions
guild = message.guild
channel_id = str(message.channel.id)
user_id = str(message.author.id)
@ -145,10 +143,10 @@ class ReginaldCog(PermissionsMixin, BlacklistMixin, MemoryMixin, commands.Cog):
"content": f"[{summary['timestamp']}] Topics: {', '.join(summary['topics'])}\n{summary['summary']}"
})
formatted_messages += [{"role": "user", "content": f"{entry['user']}: {entry['content']}"} for entry in memory]
formatted_messages += [{"role": "user", "content": f"{entry['user']}: {entry['content']}"} for entry in
memory]
formatted_messages.append({"role": "user", "content": f"{user_name}: {prompt}"})
##################################################
# #
## Generate AI Response, put into response_text ##
@ -166,7 +164,8 @@ class ReginaldCog(PermissionsMixin, BlacklistMixin, MemoryMixin, commands.Cog):
memory.append({"user": "Reginald", "content": response_text})
if len(memory) > self.short_term_memory_limit:
summary = await self.summarize_memory(message, memory[:int(self.short_term_memory_limit * self.summary_retention_ratio)])
summary = await self.summarize_memory(message, memory[:int(
self.short_term_memory_limit * self.summary_retention_ratio)])
mid_memory.setdefault(channel_id, []).append({
"timestamp": datetime.datetime.now().strftime("%Y-%m-%d %H:%M"),
"topics": self.extract_topics_from_summary(summary),
@ -174,13 +173,13 @@ class ReginaldCog(PermissionsMixin, BlacklistMixin, MemoryMixin, commands.Cog):
})
if len(mid_memory[channel_id]) > self.summary_retention_limit:
mid_memory[channel_id].pop(0)
memory = memory[-(self.short_term_memory_limit - int(self.short_term_memory_limit * self.summary_retention_ratio)):]
memory = memory[-(self.short_term_memory_limit - int(
self.short_term_memory_limit * self.summary_retention_ratio)):]
short_memory[channel_id] = memory
await self.send_split_message(message.channel, response_text)
def should_reginald_interject(self, message_content: str) -> bool:
"""Determines if Reginald should respond to a message based on keywords."""
direct_invocation = {
@ -261,7 +260,8 @@ class ReginaldCog(PermissionsMixin, BlacklistMixin, MemoryMixin, commands.Cog):
await self.config.guild(ctx.guild).openai_api_key.set(api_key)
await ctx.send("OpenAI API key set successfully.")
@commands.command(name="reginald_set_listening_channel", help="Set the channel where Reginald listens for messages.")
@commands.command(name="reginald_set_listening_channel",
help="Set the channel where Reginald listens for messages.")
@commands.has_permissions(administrator=True)
async def set_listening_channel(self, ctx, channel: discord.TextChannel):
"""Sets the channel where Reginald will listen for passive responses."""
@ -273,7 +273,8 @@ class ReginaldCog(PermissionsMixin, BlacklistMixin, MemoryMixin, commands.Cog):
await self.config.guild(ctx.guild).listening_channel.set(channel.id)
await ctx.send(f"✅ Reginald will now listen only in {channel.mention}.")
@commands.command(name="reginald_get_listening_channel", help="Check which channel Reginald is currently listening in.")
@commands.command(name="reginald_get_listening_channel",
help="Check which channel Reginald is currently listening in.")
@commands.has_permissions(administrator=True)
async def get_listening_channel(self, ctx):
"""Displays the current listening channel."""
@ -288,17 +289,16 @@ class ReginaldCog(PermissionsMixin, BlacklistMixin, MemoryMixin, commands.Cog):
else:
await ctx.send("❌ No listening channel has been set.")
async def send_long_message(self, ctx, message, prefix: str = ""):
"""Splits and sends a long message to avoid Discord's 2000-character limit."""
chunk_size = 1900 # Leave some space for formatting
if prefix:
prefix_length = len(prefix)
chunk_size -= prefix_length
for i in range(0, len(message), chunk_size):
chunk = message[i:i + chunk_size]
await ctx.send(f"{prefix}{chunk}")
# async def send_long_message(self, ctx, message, prefix: str = ""):
# """Splits and sends a long message to avoid Discord's 2000-character limit."""
# chunk_size = 1900 # Leave some space for formatting
# if prefix:
# prefix_length = len(prefix)
# chunk_size -= prefix_length
#
# for i in range(0, len(message), chunk_size):
# chunk = message[i:i + chunk_size]
# await ctx.send(f"{prefix}{chunk}")
async def send_split_message(self, ctx, content: str, prefix: str = ""):
"""