No max tokens, this is surely safe

This commit is contained in:
AllfatherHatt 2025-03-16 14:10:49 +01:00
parent b843a4ef58
commit 1a34585d33

View File

@ -196,7 +196,7 @@ class ReginaldCog(PermissionsMixin, BlacklistMixin, MemoryMixin, commands.Cog):
completion_args = {
'model': model,
'messages': messages,
'max_tokens': 2048,
#'max_tokens': 2048,
'temperature': 0.7,
'presence_penalty': 0.5,
'frequency_penalty': 0.5,
@ -237,6 +237,8 @@ class ReginaldCog(PermissionsMixin, BlacklistMixin, MemoryMixin, commands.Cog):
print("DEBUG: OpenAI response was empty or malformed:", response)
response_text = "⚠️ No response received from AI."
completion_args["messages"] = messages
return response_text
except OpenAIError as e: