Re-adding max tokens

This commit is contained in:
AllfatherHatt 2025-03-16 14:14:56 +01:00
parent f22c77e2a1
commit dc5bce9ca4

View File

@ -196,7 +196,7 @@ class ReginaldCog(PermissionsMixin, BlacklistMixin, MemoryMixin, commands.Cog):
completion_args = { completion_args = {
'model': model, 'model': model,
'messages': messages, 'messages': messages,
#'max_tokens': 2048, 'max_tokens': 4096,
'temperature': 0.7, 'temperature': 0.7,
'presence_penalty': 0.5, 'presence_penalty': 0.5,
'frequency_penalty': 0.5, 'frequency_penalty': 0.5,
@ -237,8 +237,6 @@ class ReginaldCog(PermissionsMixin, BlacklistMixin, MemoryMixin, commands.Cog):
print("DEBUG: OpenAI response was empty or malformed:", response) print("DEBUG: OpenAI response was empty or malformed:", response)
response_text = "⚠️ No response received from AI." response_text = "⚠️ No response received from AI."
completion_args["messages"] = messages
return response_text return response_text
except OpenAIError as e: except OpenAIError as e: