More chatgpt
This commit is contained in:
parent
0fe7c67b3c
commit
c353f552fe
@ -9,7 +9,6 @@ import aiohttp
|
|||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
import tempfile
|
import tempfile
|
||||||
import asyncio
|
|
||||||
from openai import OpenAIError
|
from openai import OpenAIError
|
||||||
from redbot.core import Config, commands
|
from redbot.core import Config, commands
|
||||||
|
|
||||||
@ -84,26 +83,27 @@ class ReginaldCog(commands.Cog):
|
|||||||
|
|
||||||
async def generate_response(self, api_key, prompt):
|
async def generate_response(self, api_key, prompt):
|
||||||
model = await self.config.openai_model()
|
model = await self.config.openai_model()
|
||||||
openai.api_key = api_key
|
url = f"https://api.openai.com/v1/engines/{model}/completions"
|
||||||
max_tokens = 1000
|
headers = {
|
||||||
temperature = 0.5
|
"Content-Type": "application/json",
|
||||||
loop = asyncio.get_event_loop()
|
"Authorization": f"Bearer {api_key}",
|
||||||
|
}
|
||||||
|
data = {
|
||||||
|
"prompt": prompt,
|
||||||
|
"max_tokens": 1000,
|
||||||
|
"n": 1,
|
||||||
|
"stop": None,
|
||||||
|
"temperature": 0.5,
|
||||||
|
"presence_penalty": 0.5,
|
||||||
|
"frequency_penalty": 0.5,
|
||||||
|
"best_of": 3,
|
||||||
|
}
|
||||||
|
|
||||||
response = await loop.run_in_executor(
|
async with aiohttp.ClientSession() as session:
|
||||||
None,
|
async with session.post(url, headers=headers, json=data) as resp:
|
||||||
openai.Completion.create,
|
response = await resp.json()
|
||||||
model=model,
|
|
||||||
prompt=prompt,
|
|
||||||
max_tokens=max_tokens,
|
|
||||||
n=1,
|
|
||||||
stop=None,
|
|
||||||
temperature=temperature,
|
|
||||||
presence_penalty=0.5,
|
|
||||||
frequency_penalty=0.5,
|
|
||||||
best_of=3,
|
|
||||||
)
|
|
||||||
|
|
||||||
return response.choices[0].text.strip()
|
return response['choices'][0]['text'].strip()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def split_response(response_text, max_chars):
|
def split_response(response_text, max_chars):
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user