From dc5bce9ca476a78476a32c26563ca2d64f0c1f52 Mon Sep 17 00:00:00 2001 From: AllfatherHatt Date: Sun, 16 Mar 2025 14:14:56 +0100 Subject: [PATCH] Re-adding max tokens --- reginaldCog/reginald.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/reginaldCog/reginald.py b/reginaldCog/reginald.py index 5906370..55aa82e 100644 --- a/reginaldCog/reginald.py +++ b/reginaldCog/reginald.py @@ -196,7 +196,7 @@ class ReginaldCog(PermissionsMixin, BlacklistMixin, MemoryMixin, commands.Cog): completion_args = { 'model': model, 'messages': messages, - #'max_tokens': 2048, + 'max_tokens': 4096, 'temperature': 0.7, 'presence_penalty': 0.5, 'frequency_penalty': 0.5, @@ -237,8 +237,6 @@ class ReginaldCog(PermissionsMixin, BlacklistMixin, MemoryMixin, commands.Cog): print("DEBUG: OpenAI response was empty or malformed:", response) response_text = "⚠️ No response received from AI." - completion_args["messages"] = messages - return response_text except OpenAIError as e: