Steve says this is a good idea

This commit is contained in:
unknown 2023-05-14 15:42:16 +02:00
parent a99f5c4fb9
commit 75cda96395

View File

@ -18,7 +18,7 @@ class ReginaldCog(commands.Cog):
self.bot = bot self.bot = bot
self.config = Config.get_conf(self, identifier=71717171171717) self.config = Config.get_conf(self, identifier=71717171171717)
self.config.register_global( self.config.register_global(
openai_model="text-davinci-002" openai_model="gpt-3.5-turbo"
) )
self.config.register_guild( self.config.register_guild(
openai_api_key=None openai_api_key=None
@ -80,27 +80,30 @@ class ReginaldCog(commands.Cog):
async def generate_response(self, api_key, prompt): async def generate_response(self, api_key, prompt):
model = await self.config.openai_model() model = await self.config.openai_model()
url = f"https://api.openai.com/v1/engines/{model}/completions" url = f"https://api.openai.com/v1/chat/completions"
headers = { headers = {
"Content-Type": "application/json", "Content-Type": "application/json",
"Authorization": f"Bearer {api_key}", "Authorization": f"Bearer {api_key}",
} }
data = { data = {
"prompt": prompt, "model": model,
"messages": [
{"system": "You are Reginald, the butler. You aim to help anyone however you can, and speak in a refined manner.", "content": prompt}
],
"max_tokens": 1000, "max_tokens": 1000,
"n": 1, "n": 1,
"stop": None, "stop": None,
"temperature": 0.5, "temperature": 0.5,
"presence_penalty": 0.5, "presence_penalty": 0.5,
"frequency_penalty": 0.5, "frequency_penalty": 0.5,
"best_of": 3, "best_of": 1
} }
async with aiohttp.ClientSession() as session: async with aiohttp.ClientSession() as session:
async with session.post(url, headers=headers, json=data) as resp: async with session.post(url, headers=headers, json=data) as resp:
response = await resp.json() response = await resp.json()
return response['choices'][0]['text'].strip() return response['choices'][0]['message'][0]['content'].strip()
@staticmethod @staticmethod
def split_response(response_text, max_chars): def split_response(response_text, max_chars):