Made Discord bot service for getting llm response out of Discord message
This commit is contained in:
parent
92f536d407
commit
f195d77e67
@ -1,6 +1,8 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from enum import Enum
|
||||||
from openai import OpenAI
|
from openai import OpenAI
|
||||||
from reginaldCog.messenger_clients.messenger_client import ClientMessage
|
from discord import Message
|
||||||
|
from reginaldCog.messenger_clients.messenger_client import ClientMessage, DiscordMessageAdapter
|
||||||
|
|
||||||
|
|
||||||
class ILLMContent(ABC):
|
class ILLMContent(ABC):
|
||||||
@ -117,6 +119,13 @@ class ILLMClient(ABC):
|
|||||||
|
|
||||||
|
|
||||||
class OpenAIClient(ILLMClient):
|
class OpenAIClient(ILLMClient):
|
||||||
|
content_class = OpenAIContent
|
||||||
|
content_builder_class = OpenAIContentBuilder
|
||||||
|
message_class = OpenAIMessage
|
||||||
|
message_builder_class = OpenAIMessageBuilder
|
||||||
|
prompt_class = OpenAIPrompt
|
||||||
|
prompt_builder_class = OpenAIPromptBuilder
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.model = 'gpt-4.1-mini'
|
self.model = 'gpt-4.1-mini'
|
||||||
self.client = OpenAI()
|
self.client = OpenAI()
|
||||||
@ -153,15 +162,20 @@ class OpenAIResponseAdapter(IMessageAdapter):
|
|||||||
return message
|
return message
|
||||||
|
|
||||||
|
|
||||||
|
class LMMClientType(Enum):
|
||||||
|
OPENAI = OpenAIClient
|
||||||
|
|
||||||
|
|
||||||
class MessengerClientMessageAdapter(IMessageAdapter):
|
class MessengerClientMessageAdapter(IMessageAdapter):
|
||||||
def __init__(self, message: ClientMessage): # Dependency Inversion violation, figure out later
|
def __init__(self, message: ClientMessage, llm_client: LMMClientType):
|
||||||
self.message = message
|
self.message = message
|
||||||
|
self.llm_client = llm_client
|
||||||
|
|
||||||
def to_message(self) -> ILLMMessage:
|
def to_message(self) -> ILLMMessage:
|
||||||
content = OpenAIContent()
|
content = self.llm_client.value.content_class()
|
||||||
content_builder = OpenAIContentBuilder(content)
|
content_builder = self.llm_client.value.content_builder_class(content)
|
||||||
message = OpenAIMessage()
|
message = self.llm_client.value.message_class()
|
||||||
message_builder = OpenAIMessageBuilder(message)
|
message_builder = self.llm_client.value.message_builder_class(message)
|
||||||
|
|
||||||
message_builder.set_role("user")\
|
message_builder.set_role("user")\
|
||||||
.set_content(content)
|
.set_content(content)
|
||||||
@ -175,37 +189,4 @@ class MessengerClientMessageAdapter(IMessageAdapter):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
dev_message_text = "You are a butler named Reginald. Give your answers in a refined manners of the English butler."
|
pass
|
||||||
user_message_text = "Tell me what's on this picture"
|
|
||||||
pic_url = "https://media.discordapp.net/attachments/913835221814763550/1408534120421654669/image.png?ex=68ad62df&is=68ac115f&hm=bdfcf878f20e154a575a528b9cb274a8c2162f84ce12fc3876856e821ef9cc63&=&format=webp&quality=lossless&width=1342&height=755"
|
|
||||||
|
|
||||||
# Create developer message content and message
|
|
||||||
dev_content = OpenAIContent()
|
|
||||||
dev_content_builder = OpenAIContentBuilder(dev_content).add_input_text(dev_message_text)
|
|
||||||
dev_message = OpenAIMessage()
|
|
||||||
OpenAIMessageBuilder(dev_message).set_role("developer").set_content(dev_content)
|
|
||||||
|
|
||||||
# Create user message content and message
|
|
||||||
user_content = OpenAIContent()
|
|
||||||
OpenAIContentBuilder(user_content).add_input_text(user_message_text).add_input_image(pic_url)
|
|
||||||
user_message = OpenAIMessage()
|
|
||||||
OpenAIMessageBuilder(user_message).set_role("user").set_content(user_content)
|
|
||||||
|
|
||||||
# Create prompt and add messages
|
|
||||||
test_prompt = OpenAIPrompt()
|
|
||||||
prompt_builder = OpenAIPromptBuilder(test_prompt)
|
|
||||||
prompt_builder.add_message(dev_message)
|
|
||||||
prompt_builder.add_message(user_message)
|
|
||||||
|
|
||||||
# Create client and get response
|
|
||||||
client = OpenAIClient()
|
|
||||||
test_response = client.get_response(test_prompt)
|
|
||||||
|
|
||||||
# Create response adapter and add response to prompt
|
|
||||||
response_adapter = OpenAIResponseAdapter(test_response)
|
|
||||||
response_message = response_adapter.to_message()
|
|
||||||
prompt_builder.add_message(response_message)
|
|
||||||
|
|
||||||
# Print messages
|
|
||||||
print(test_prompt.to_list)
|
|
||||||
|
|
||||||
|
|||||||
39
reginaldCog/messenger_clients/services.py
Normal file
39
reginaldCog/messenger_clients/services.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
import asyncio
|
||||||
|
from discord import Message
|
||||||
|
from reginaldCog.messenger_clients.messenger_client import ClientMessage, DiscordMessageAdapter as MessengerDiscordAdapter
|
||||||
|
from reginaldCog.llm_clients.llm_client import LMMClientType, MessengerClientMessageAdapter
|
||||||
|
|
||||||
|
|
||||||
|
class MessageService:
|
||||||
|
def __init__(self, message: Message, llm_client: LMMClientType = LMMClientType.OPENAI):
|
||||||
|
self.message = message
|
||||||
|
self.llm_client = llm_client
|
||||||
|
|
||||||
|
async def get_llm_response(self) -> str:
|
||||||
|
# Adapt discord.Message to ClientMessage domain object
|
||||||
|
client_message: ClientMessage = MessengerDiscordAdapter().create_message(self.message)
|
||||||
|
|
||||||
|
# Create prompt and prompt builder for this LLM client
|
||||||
|
prompt = self.llm_client.value.prompt_class()
|
||||||
|
prompt_builder = self.llm_client.value.prompt_builder_class(prompt)
|
||||||
|
|
||||||
|
# Adapt the messenger client message into LLM message and add it to prompt
|
||||||
|
llm_message = MessengerClientMessageAdapter(client_message, self.llm_client).to_message()
|
||||||
|
prompt_builder.add_message(llm_message)
|
||||||
|
|
||||||
|
# Call the LLM client; run in executor if method is blocking sync
|
||||||
|
llm_client_instance = self.llm_client.value()
|
||||||
|
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
# Assuming get_response is blocking - run in executor:
|
||||||
|
response = await loop.run_in_executor(None, llm_client_instance.get_response, prompt)
|
||||||
|
|
||||||
|
# Extract plain text from the response (assuming OpenAIResponseAdapter is present)
|
||||||
|
from reginaldCog.llm_clients.llm_client import OpenAIResponseAdapter
|
||||||
|
|
||||||
|
response_adapter = OpenAIResponseAdapter(response)
|
||||||
|
message_obj = response_adapter.to_message()
|
||||||
|
|
||||||
|
# Concatenate all textual outputs for sending to Discord
|
||||||
|
texts = [item.get("text", "") for item in message_obj.content.content_items if item.get("type") == "output_text"]
|
||||||
|
return "\n".join(texts) if texts else "Sorry, no response generated."
|
||||||
Loading…
x
Reference in New Issue
Block a user