Merge pull request #29 from T-BENZIN/development
+ Added openai_completion.py for later use as a external script for working with ChatGPT
This commit is contained in:
commit
0483c2d8e6
15
reginaldCog/debug_stuff.py
Normal file
15
reginaldCog/debug_stuff.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
def debug(func):
|
||||||
|
def wrap(*args, **kwargs):
|
||||||
|
# Log the function name and arguments
|
||||||
|
print(f"DEBUG: Calling {func.__name__} with args: {args}, kwargs: {kwargs}")
|
||||||
|
|
||||||
|
# Call the original function
|
||||||
|
result = func(*args, **kwargs)
|
||||||
|
|
||||||
|
# Log the return value
|
||||||
|
print(f"DEBUG: {func.__name__} returned: {result}")
|
||||||
|
|
||||||
|
# Return the result
|
||||||
|
return result
|
||||||
|
|
||||||
|
return wrap
|
||||||
83
reginaldCog/openai_completion.py
Normal file
83
reginaldCog/openai_completion.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
import random
|
||||||
|
import json
|
||||||
|
import openai
|
||||||
|
from openai import OpenAIError
|
||||||
|
from .weather import time_now, get_current_weather, get_weather_forecast
|
||||||
|
from .tools_description import TOOLS
|
||||||
|
from .debug_stuff import debug
|
||||||
|
|
||||||
|
CALLABLE_FUNCTIONS = {
|
||||||
|
# Dictionary with functions to call.
|
||||||
|
# You can use globals()[func_name](**args) instead, but that's too implicit.
|
||||||
|
'time_now': time_now,
|
||||||
|
'get_current_weather': get_current_weather,
|
||||||
|
'get_weather_forecast': get_weather_forecast,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Completion:
|
||||||
|
def __init__(self, model: str, api_key: str):
|
||||||
|
self.__model = model
|
||||||
|
self.__api_key = api_key
|
||||||
|
self.__messages = []
|
||||||
|
|
||||||
|
@debug
|
||||||
|
async def create_completion(self, messages: list):
|
||||||
|
self.__messages = messages
|
||||||
|
model = self.__model
|
||||||
|
try:
|
||||||
|
client = openai.AsyncClient(api_key=self.__api_key)
|
||||||
|
completion_kwargs = {
|
||||||
|
"model": model,
|
||||||
|
"messages": messages,
|
||||||
|
"max_tokens": 4096,
|
||||||
|
"temperature": 0.7,
|
||||||
|
"presence_penalty": 0.5,
|
||||||
|
"frequency_penalty": 0.5,
|
||||||
|
"tools": TOOLS,
|
||||||
|
"tool_choice": "auto",
|
||||||
|
}
|
||||||
|
response = await client.chat.completions.create(**completion_kwargs)
|
||||||
|
response_content = response.choices[0].message.content
|
||||||
|
tool_calls = response.choices[0].message.tool_calls
|
||||||
|
self.append_message(role="assistant", content=response_content, tool_calls=tool_calls)
|
||||||
|
if tool_calls:
|
||||||
|
for i_call in tool_calls:
|
||||||
|
func_name = i_call.function.name
|
||||||
|
func_args = json.loads(i_call.function.arguments)
|
||||||
|
tool_call_id = i_call.id
|
||||||
|
self.function_manager(func_name, func_args, tool_call_id)
|
||||||
|
return self.create_completion(messages=self.__messages)
|
||||||
|
return response_content
|
||||||
|
except OpenAIError as e:
|
||||||
|
return self.get_error_message(error_message=str(e), error_type="OpenAIError")
|
||||||
|
|
||||||
|
def append_message(
|
||||||
|
self,
|
||||||
|
role: str,
|
||||||
|
content: str,
|
||||||
|
tool_calls: list = None,
|
||||||
|
tool_call_id: str = None,
|
||||||
|
):
|
||||||
|
self.__messages.append({
|
||||||
|
"role": role,
|
||||||
|
"content": content,
|
||||||
|
"tool_calls": tool_calls,
|
||||||
|
"tool_call_id": tool_call_id,
|
||||||
|
})
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_error_message(error_message: str, error_type: str) -> str:
|
||||||
|
reginald_responses = [
|
||||||
|
f"Regrettably, I must inform you that I have encountered a bureaucratic obstruction:",
|
||||||
|
f"It would seem that a most unfortunate technical hiccup has befallen my faculties:",
|
||||||
|
f"Ah, it appears I have received an urgent memorandum stating:",
|
||||||
|
f"I regret to inform you that my usual eloquence is presently obstructed by an unforeseen complication:",
|
||||||
|
]
|
||||||
|
random_response = random.choice(reginald_responses)
|
||||||
|
return f"{random_response}\n\n{error_type}: {error_message}"
|
||||||
|
|
||||||
|
def function_manager(self, func_name: str, func_kwargs: dict, tool_call_id: str):
|
||||||
|
result = CALLABLE_FUNCTIONS[func_name](**func_kwargs)
|
||||||
|
self.append_message(role="tool", content=result, tool_call_id=tool_call_id)
|
||||||
|
|
||||||
@ -14,6 +14,7 @@ from .blacklist import BlacklistMixin
|
|||||||
from .memory import MemoryMixin
|
from .memory import MemoryMixin
|
||||||
from .weather import time_now, get_current_weather, get_weather_forecast
|
from .weather import time_now, get_current_weather, get_weather_forecast
|
||||||
from .tools_description import TOOLS
|
from .tools_description import TOOLS
|
||||||
|
from .debug_stuff import debug
|
||||||
|
|
||||||
|
|
||||||
CALLABLE_FUNCTIONS = {
|
CALLABLE_FUNCTIONS = {
|
||||||
@ -189,6 +190,7 @@ class ReginaldCog(PermissionsMixin, BlacklistMixin, MemoryMixin, commands.Cog):
|
|||||||
|
|
||||||
return any(message_lower.startswith(invocation) for invocation in direct_invocation)
|
return any(message_lower.startswith(invocation) for invocation in direct_invocation)
|
||||||
|
|
||||||
|
@debug
|
||||||
async def generate_response(self, api_key, messages):
|
async def generate_response(self, api_key, messages):
|
||||||
model = await self.config.openai_model()
|
model = await self.config.openai_model()
|
||||||
try:
|
try:
|
||||||
|
|||||||
@ -2,20 +2,24 @@ from datetime import datetime, timezone
|
|||||||
from os import environ
|
from os import environ
|
||||||
import requests
|
import requests
|
||||||
import json
|
import json
|
||||||
|
from .debug_stuff import debug
|
||||||
|
|
||||||
#WEATHER_API_KEY = environ.get('WEATHER_API_KEY')
|
#WEATHER_API_KEY = environ.get('WEATHER_API_KEY')
|
||||||
URL = 'http://api.weatherapi.com/v1'
|
URL = 'http://api.weatherapi.com/v1'
|
||||||
|
|
||||||
|
|
||||||
|
@debug
|
||||||
def time_now() -> str:
|
def time_now() -> str:
|
||||||
return str(datetime.now(timezone.utc))
|
return str(datetime.now(timezone.utc))
|
||||||
|
|
||||||
|
|
||||||
|
@debug
|
||||||
def get_current_weather(location: str) -> str:
|
def get_current_weather(location: str) -> str:
|
||||||
weather = Weather(location=location)
|
weather = Weather(location=location)
|
||||||
return json.dumps(weather.realtime())
|
return json.dumps(weather.realtime())
|
||||||
|
|
||||||
|
|
||||||
|
@debug
|
||||||
def get_weather_forecast(location: str, days: int = 14, dt: str = '2025-03-24') -> str:
|
def get_weather_forecast(location: str, days: int = 14, dt: str = '2025-03-24') -> str:
|
||||||
weather = Weather(location=location)
|
weather = Weather(location=location)
|
||||||
return json.dumps(weather.forecast(days=days, dt=dt))
|
return json.dumps(weather.forecast(days=days, dt=dt))
|
||||||
@ -35,6 +39,7 @@ class Weather:
|
|||||||
response = requests.get(url=f'{URL}{method}', params=params)
|
response = requests.get(url=f'{URL}{method}', params=params)
|
||||||
return response.json()
|
return response.json()
|
||||||
|
|
||||||
|
@debug
|
||||||
def realtime(self):
|
def realtime(self):
|
||||||
method = '/current.json'
|
method = '/current.json'
|
||||||
params = {
|
params = {
|
||||||
@ -43,6 +48,7 @@ class Weather:
|
|||||||
}
|
}
|
||||||
return self.make_request(method=method, params=params)
|
return self.make_request(method=method, params=params)
|
||||||
|
|
||||||
|
@debug
|
||||||
def forecast(self, days: int = 14, dt: str = '2025-03-24'):
|
def forecast(self, days: int = 14, dt: str = '2025-03-24'):
|
||||||
method = '/forecast.json'
|
method = '/forecast.json'
|
||||||
params = {
|
params = {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user