From 75fccb30d941356599bcdf9ca807759c9a78b91c Mon Sep 17 00:00:00 2001 From: Christoph Eicke Date: Wed, 5 Feb 2025 00:55:32 +0100 Subject: [PATCH] Update max_tokens parameter (#614) --- app/services/ai_backend/open_ai.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/services/ai_backend/open_ai.rb b/app/services/ai_backend/open_ai.rb index 206d9ce4..39d2c57e 100644 --- a/app/services/ai_backend/open_ai.rb +++ b/app/services/ai_backend/open_ai.rb @@ -66,7 +66,7 @@ def set_client_config(config) model: @assistant.language_model.api_name, messages: system_message(config[:instructions]) + config[:messages], stream: config[:streaming] && @response_handler || nil, - max_tokens: 2000, # we should really set this dynamically, based on the model, to the max + max_completion_tokens: 2000, # we should really set this dynamically, based on the model, to the max stream_options: config[:streaming] && { include_usage: true } || nil, response_format: { type: "text" }, tools: @assistant.language_model.supports_tools? && Toolbox.tools || nil,