KaniumCogs/reginaldCog/openai_completion.py

106 lines
4.0 KiB
Python
Raw Normal View History

2025-06-17 10:57:19 +05:00
import asyncio
2026-03-16 12:16:29 +01:00
import json
import random
2025-06-17 10:57:19 +05:00
from os import environ
2026-03-16 12:16:29 +01:00
import openai
from openai import OpenAIError
2026-03-16 12:16:29 +01:00
try:
from .tools_description import TOOLS
from .weather import get_current_weather, get_weather_forecast, time_now
except ImportError:
from tools_description import TOOLS
from weather import get_current_weather, get_weather_forecast, time_now
CALLABLE_FUNCTIONS = {
2026-03-16 12:16:29 +01:00
"time_now": time_now,
"get_current_weather": get_current_weather,
"get_weather_forecast": get_weather_forecast,
}
class Completion:
def __init__(self, model: str, api_key: str):
self.__model = model
self.__api_key = api_key
self.__messages = []
2025-06-17 10:57:19 +05:00
async def create_completion(self):
try:
2026-03-16 12:16:29 +01:00
client = openai.AsyncOpenAI(api_key=self.__api_key)
completion_kwargs = {
2026-03-16 12:16:29 +01:00
"model": self.__model,
2025-06-17 10:57:19 +05:00
"messages": self.__messages,
2026-03-16 12:16:29 +01:00
"max_completion_tokens": 2000,
"temperature": 0.7,
"presence_penalty": 0.5,
"frequency_penalty": 0.5,
"tools": TOOLS,
"tool_choice": "auto",
}
2026-03-16 12:16:29 +01:00
response = await client.chat.completions.create(**completion_kwargs)
2026-03-16 12:16:29 +01:00
response_message = response.choices[0].message
response_content = response_message.content or ""
tool_calls = response_message.tool_calls or []
self.append_message(role="assistant", content=response_content, tool_calls=tool_calls)
2026-03-16 12:16:29 +01:00
if tool_calls:
2026-03-16 12:16:29 +01:00
for tool_call in tool_calls:
await self.function_manager(
func_name=tool_call.function.name,
func_kwargs=json.loads(tool_call.function.arguments or "{}"),
tool_call_id=tool_call.id,
)
2025-06-17 10:57:19 +05:00
return await self.create_completion()
2026-03-16 12:16:29 +01:00
return response_content
2026-03-16 12:16:29 +01:00
except OpenAIError as error:
return self.get_error_message(error_message=str(error), error_type="OpenAIError")
def append_message(self, role: str, content: str, tool_calls: list = None, tool_call_id: str = None):
message = {"role": role, "content": content}
if tool_calls is not None:
message["tool_calls"] = tool_calls
if tool_call_id is not None:
message["tool_call_id"] = tool_call_id
self.__messages.append(message)
@staticmethod
def get_error_message(error_message: str, error_type: str) -> str:
reginald_responses = [
2026-03-16 12:16:29 +01:00
"Regrettably, I must inform you that I have encountered a bureaucratic obstruction:",
"It would seem that a most unfortunate technical hiccup has befallen my faculties:",
"Ah, it appears I have received an urgent memorandum stating:",
"I regret to inform you that my usual eloquence is presently obstructed by an unforeseen complication:",
]
random_response = random.choice(reginald_responses)
return f"{random_response}\n\n{error_type}: {error_message}"
2026-03-16 12:16:29 +01:00
async def function_manager(self, func_name: str, func_kwargs: dict, tool_call_id: str):
function_to_call = CALLABLE_FUNCTIONS.get(func_name)
if function_to_call is None:
result = json.dumps({"error": f"Unknown tool requested: {func_name}"})
else:
try:
result = await asyncio.to_thread(function_to_call, **func_kwargs)
except Exception as error:
result = json.dumps({"error": f"Tool {func_name} failed: {error}"})
self.append_message(role="tool", content=result, tool_call_id=tool_call_id)
2025-06-17 10:57:19 +05:00
2026-03-16 12:16:29 +01:00
if __name__ == "__main__":
2025-06-17 10:57:19 +05:00
async def main():
2026-03-16 12:16:29 +01:00
test_message = input("Your input: ")
completion = Completion(model="gpt-4.1-mini", api_key=environ.get("OPENAI_API_KEY"))
completion.append_message(role="user", content=test_message)
2025-06-17 10:57:19 +05:00
result = await completion.create_completion()
print(result)
asyncio.run(main())