diff --git a/reginaldCog/debug_stuff.py b/reginaldCog/debug_stuff.py new file mode 100644 index 0000000..92ccdf0 --- /dev/null +++ b/reginaldCog/debug_stuff.py @@ -0,0 +1,15 @@ +def debug(func): + def wrap(*args, **kwargs): + # Log the function name and arguments + print(f"DEBUG: Calling {func.__name__} with args: {args}, kwargs: {kwargs}") + + # Call the original function + result = func(*args, **kwargs) + + # Log the return value + print(f"DEBUG: {func.__name__} returned: {result}") + + # Return the result + return result + + return wrap diff --git a/reginaldCog/openai_completion.py b/reginaldCog/openai_completion.py new file mode 100644 index 0000000..60aa384 --- /dev/null +++ b/reginaldCog/openai_completion.py @@ -0,0 +1,83 @@ +import random +import json +import openai +from openai import OpenAIError +from .weather import time_now, get_current_weather, get_weather_forecast +from .tools_description import TOOLS +from .debug_stuff import debug + +CALLABLE_FUNCTIONS = { + # Dictionary with functions to call. + # You can use globals()[func_name](**args) instead, but that's too implicit. + 'time_now': time_now, + 'get_current_weather': get_current_weather, + 'get_weather_forecast': get_weather_forecast, +} + + +class Completion: + def __init__(self, model: str, api_key: str): + self.__model = model + self.__api_key = api_key + self.__messages = [] + + @debug + async def create_completion(self, messages: list): + self.__messages = messages + model = self.__model + try: + client = openai.AsyncClient(api_key=self.__api_key) + completion_kwargs = { + "model": model, + "messages": messages, + "max_tokens": 4096, + "temperature": 0.7, + "presence_penalty": 0.5, + "frequency_penalty": 0.5, + "tools": TOOLS, + "tool_choice": "auto", + } + response = await client.chat.completions.create(**completion_kwargs) + response_content = response.choices[0].message.content + tool_calls = response.choices[0].message.tool_calls + self.append_message(role="assistant", content=response_content, tool_calls=tool_calls) + if tool_calls: + for i_call in tool_calls: + func_name = i_call.function.name + func_args = json.loads(i_call.function.arguments) + tool_call_id = i_call.id + self.function_manager(func_name, func_args, tool_call_id) + return self.create_completion(messages=self.__messages) + return response_content + except OpenAIError as e: + return self.get_error_message(error_message=str(e), error_type="OpenAIError") + + def append_message( + self, + role: str, + content: str, + tool_calls: list = None, + tool_call_id: str = None, + ): + self.__messages.append({ + "role": role, + "content": content, + "tool_calls": tool_calls, + "tool_call_id": tool_call_id, + }) + + @staticmethod + def get_error_message(error_message: str, error_type: str) -> str: + reginald_responses = [ + f"Regrettably, I must inform you that I have encountered a bureaucratic obstruction:", + f"It would seem that a most unfortunate technical hiccup has befallen my faculties:", + f"Ah, it appears I have received an urgent memorandum stating:", + f"I regret to inform you that my usual eloquence is presently obstructed by an unforeseen complication:", + ] + random_response = random.choice(reginald_responses) + return f"{random_response}\n\n{error_type}: {error_message}" + + def function_manager(self, func_name: str, func_kwargs: dict, tool_call_id: str): + result = CALLABLE_FUNCTIONS[func_name](**func_kwargs) + self.append_message(role="tool", content=result, tool_call_id=tool_call_id) + diff --git a/reginaldCog/reginald.py b/reginaldCog/reginald.py index 904de8a..b821293 100644 --- a/reginaldCog/reginald.py +++ b/reginaldCog/reginald.py @@ -14,6 +14,7 @@ from .blacklist import BlacklistMixin from .memory import MemoryMixin from .weather import time_now, get_current_weather, get_weather_forecast from .tools_description import TOOLS +from .debug_stuff import debug CALLABLE_FUNCTIONS = { @@ -189,6 +190,7 @@ class ReginaldCog(PermissionsMixin, BlacklistMixin, MemoryMixin, commands.Cog): return any(message_lower.startswith(invocation) for invocation in direct_invocation) + @debug async def generate_response(self, api_key, messages): model = await self.config.openai_model() try: diff --git a/reginaldCog/weather.py b/reginaldCog/weather.py index 86e6b79..580716d 100644 --- a/reginaldCog/weather.py +++ b/reginaldCog/weather.py @@ -2,20 +2,24 @@ from datetime import datetime, timezone from os import environ import requests import json +from .debug_stuff import debug #WEATHER_API_KEY = environ.get('WEATHER_API_KEY') URL = 'http://api.weatherapi.com/v1' +@debug def time_now() -> str: return str(datetime.now(timezone.utc)) +@debug def get_current_weather(location: str) -> str: weather = Weather(location=location) return json.dumps(weather.realtime()) +@debug def get_weather_forecast(location: str, days: int = 14, dt: str = '2025-03-24') -> str: weather = Weather(location=location) return json.dumps(weather.forecast(days=days, dt=dt)) @@ -35,6 +39,7 @@ class Weather: response = requests.get(url=f'{URL}{method}', params=params) return response.json() + @debug def realtime(self): method = '/current.json' params = { @@ -43,6 +48,7 @@ class Weather: } return self.make_request(method=method, params=params) + @debug def forecast(self, days: int = 14, dt: str = '2025-03-24'): method = '/forecast.json' params = {