Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove field reasoning_content from AssistantChatMessage #11481

Merged
merged 5 commits into from
Mar 19, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 0 additions & 23 deletions core/src/providers/anthropic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,6 @@ struct AnthropicImageContent {
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum AnthropicContentType {
Thinking,
RedactedThinking,
Text,
Image,
ToolUse,
Expand All @@ -102,12 +100,6 @@ struct AnthropicContent {
#[serde(skip_serializing_if = "Option::is_none")]
text: Option<String>,

#[serde(skip_serializing_if = "Option::is_none")]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

not sure I follow that diff?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

didn't need these 2 since we never need to parse back thinking tokens from ChatMessages

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

right I thought they were already gone 👍

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yy I forgot to commit on the previous PR 😬

thinking: Option<String>,

#[serde(skip_serializing_if = "Option::is_none")]
redacted_thinking: Option<String>,

#[serde(skip_serializing_if = "Option::is_none", flatten)]
tool_use: Option<AnthropicContentToolUse>,

Expand Down Expand Up @@ -309,8 +301,6 @@ impl<'a> TryFrom<&'a ChatMessageConversionInput<'a>> for AnthropicChatMessage {
Ok(AnthropicContent {
r#type: AnthropicContentType::ToolUse,
text: None,
thinking: None,
redacted_thinking: None,
tool_use: Some(AnthropicContentToolUse {
name: function_call.name.clone(),
id: function_call.id.clone(),
Expand All @@ -329,8 +319,6 @@ impl<'a> TryFrom<&'a ChatMessageConversionInput<'a>> for AnthropicChatMessage {
let text = assistant_msg.content.as_ref().map(|text| AnthropicContent {
r#type: AnthropicContentType::Text,
text: Some(text.clone()),
thinking: None,
redacted_thinking: None,
tool_result: None,
tool_use: None,
source: None,
Expand Down Expand Up @@ -358,8 +346,6 @@ impl<'a> TryFrom<&'a ChatMessageConversionInput<'a>> for AnthropicChatMessage {
content: Some(function_msg.content.clone()),
}),
text: None,
thinking: None,
redacted_thinking: None,
source: None,
};

Expand All @@ -376,8 +362,6 @@ impl<'a> TryFrom<&'a ChatMessageConversionInput<'a>> for AnthropicChatMessage {
MixedContent::TextContent(tc) => Ok(AnthropicContent {
r#type: AnthropicContentType::Text,
text: Some(tc.text.clone()),
thinking: None,
redacted_thinking: None,
tool_result: None,
tool_use: None,
source: None,
Expand All @@ -388,8 +372,6 @@ impl<'a> TryFrom<&'a ChatMessageConversionInput<'a>> for AnthropicChatMessage {
r#type: AnthropicContentType::Image,
source: Some(base64_data.clone()),
text: None,
thinking: None,
redacted_thinking: None,
tool_use: None,
tool_result: None,
})
Expand All @@ -409,8 +391,6 @@ impl<'a> TryFrom<&'a ChatMessageConversionInput<'a>> for AnthropicChatMessage {
content: vec![AnthropicContent {
r#type: AnthropicContentType::Text,
text: Some(t.clone()),
thinking: None,
redacted_thinking: None,
tool_result: None,
tool_use: None,
source: None,
Expand All @@ -422,8 +402,6 @@ impl<'a> TryFrom<&'a ChatMessageConversionInput<'a>> for AnthropicChatMessage {
content: vec![AnthropicContent {
r#type: AnthropicContentType::Text,
text: Some(system_msg.content.clone()),
thinking: None,
redacted_thinking: None,
tool_result: None,
tool_use: None,
source: None,
Expand Down Expand Up @@ -590,7 +568,6 @@ impl TryFrom<ChatResponse> for AssistantChatMessage {
role: ChatMessageRole::Assistant,
name: None,
content: text_content,
reasoning_content: None,
function_call,
function_calls,
})
Expand Down
2 changes: 0 additions & 2 deletions core/src/providers/chat_messages.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,8 +76,6 @@ pub struct AssistantChatMessage {
#[serde(skip_serializing_if = "Option::is_none")]
pub content: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub reasoning_content: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub function_call: Option<ChatFunctionCall>,
#[serde(skip_serializing_if = "Option::is_none")]
pub function_calls: Option<Vec<ChatFunctionCall>>,
Expand Down
4 changes: 2 additions & 2 deletions core/src/providers/deepseek.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ use parking_lot::RwLock;
use serde_json::Value;
use tokio::sync::mpsc::UnboundedSender;

use super::helpers::strip_tools_fromn_chat_history;
use super::helpers::strip_tools_from_chat_history;
use super::openai_compatible_helpers::{
openai_compatible_chat_completion, TransformSystemMessages,
};
Expand Down Expand Up @@ -132,7 +132,7 @@ impl LLM for DeepseekLLM {
self.api_key.clone().unwrap(),
// Pre-process messages if model is deepseek-reasoner.
match MODEL_IDS_WITH_TOOLS_SUPPORT.contains(&self.id.as_str()) {
false => Some(strip_tools_fromn_chat_history(messages)),
false => Some(strip_tools_from_chat_history(messages)),
true => None,
}
.as_ref()
Expand Down
4 changes: 2 additions & 2 deletions core/src/providers/fireworks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use serde_json::Value;
use std::sync::Arc;
use tokio::sync::mpsc::UnboundedSender;

use super::helpers::strip_tools_fromn_chat_history;
use super::helpers::strip_tools_from_chat_history;
use super::openai_compatible_helpers::{
openai_compatible_chat_completion, TransformSystemMessages,
};
Expand Down Expand Up @@ -137,7 +137,7 @@ impl LLM for FireworksLLM {
self.api_key.clone().unwrap(),
// Pre-process messages if model is one of the supported models.
match MODEL_IDS_WITH_TOOLS_SUPPORT.contains(&self.id.as_str()) {
false => Some(strip_tools_fromn_chat_history(messages)),
false => Some(strip_tools_from_chat_history(messages)),
true => None,
}
.as_ref()
Expand Down
4 changes: 1 addition & 3 deletions core/src/providers/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use super::{
// Useful for models that don't support tools.
// For assistant messages, we remove function/tool calls (and we format them inside of the "content" field instead).
// For function/tool result messages, we transform them into user messages.
pub fn strip_tools_fromn_chat_history(messages: &Vec<ChatMessage>) -> Vec<ChatMessage> {
pub fn strip_tools_from_chat_history(messages: &Vec<ChatMessage>) -> Vec<ChatMessage> {
let mut new_messages = Vec::new();
for message in messages {
match message {
Expand Down Expand Up @@ -38,8 +38,6 @@ pub fn strip_tools_fromn_chat_history(messages: &Vec<ChatMessage>) -> Vec<ChatMe
content: Some(content),
name: message.name.clone(),
role: message.role.clone(),
// Unused for r1:
reasoning_content: None,
function_call: None,
function_calls: None,
}));
Expand Down
1 change: 0 additions & 1 deletion core/src/providers/mistral.rs
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,6 @@ impl TryFrom<&MistralChatMessage> for AssistantChatMessage {

Ok(AssistantChatMessage {
content,
reasoning_content: None,
role,
name: None,
function_call,
Expand Down
32 changes: 0 additions & 32 deletions core/src/providers/openai_compatible_helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -252,8 +252,6 @@ pub struct OpenAIChatMessage {
#[serde(skip_serializing_if = "Option::is_none")]
pub content: Option<OpenAIChatMessageContent>,
#[serde(skip_serializing_if = "Option::is_none")]
pub reasoning_content: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_calls: Option<Vec<OpenAIToolCall>>,
Expand All @@ -266,8 +264,6 @@ pub struct OpenAICompletionChatMessage {
#[serde(skip_serializing_if = "Option::is_none")]
pub content: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub reasoning_content: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
pub role: OpenAIChatMessageRole,
#[serde(skip_serializing_if = "Option::is_none")]
Expand Down Expand Up @@ -335,10 +331,6 @@ impl TryFrom<&OpenAICompletionChatMessage> for AssistantChatMessage {
Some(c) => Some(c.clone()),
None => None,
};
let reasoning_content = match cm.reasoning_content.as_ref() {
Some(c) => Some(c.clone()),
None => None,
};

let function_calls = if let Some(tool_calls) = cm.tool_calls.as_ref() {
let cfc = tool_calls
Expand Down Expand Up @@ -368,7 +360,6 @@ impl TryFrom<&OpenAICompletionChatMessage> for AssistantChatMessage {

Ok(AssistantChatMessage {
content,
reasoning_content,
role,
name,
function_call,
Expand Down Expand Up @@ -438,7 +429,6 @@ impl TryFrom<&ChatMessage> for OpenAIChatMessage {
Some(c) => Some(OpenAIChatMessageContent::try_from(c)?),
None => None,
},
reasoning_content: None,
name: assistant_msg.name.clone(),
role: OpenAIChatMessageRole::from(&assistant_msg.role),
tool_calls: match assistant_msg.function_calls.as_ref() {
Expand All @@ -453,23 +443,20 @@ impl TryFrom<&ChatMessage> for OpenAIChatMessage {
}),
ChatMessage::Function(function_msg) => Ok(OpenAIChatMessage {
content: Some(OpenAIChatMessageContent::try_from(&function_msg.content)?),
reasoning_content: None,
name: None,
role: OpenAIChatMessageRole::Tool,
tool_calls: None,
tool_call_id: Some(function_msg.function_call_id.clone()),
}),
ChatMessage::System(system_msg) => Ok(OpenAIChatMessage {
content: Some(OpenAIChatMessageContent::try_from(&system_msg.content)?),
reasoning_content: None,
name: None,
role: OpenAIChatMessageRole::from(&system_msg.role),
tool_calls: None,
tool_call_id: None,
}),
ChatMessage::User(user_msg) => Ok(OpenAIChatMessage {
content: Some(OpenAIChatMessageContent::try_from(&user_msg.content)?),
reasoning_content: None,
name: user_msg.name.clone(),
role: OpenAIChatMessageRole::from(&user_msg.role),
tool_calls: None,
Expand Down Expand Up @@ -819,7 +806,6 @@ fn to_openai_messages(
// Case 3: there's more than one content, the content isn't text or we don't want to squash them => keep structured format
(_, _, _) => Some(OpenAIChatMessageContent::Structured(contents)),
},
reasoning_content: None,
}
}
})
Expand All @@ -843,7 +829,6 @@ fn to_openai_messages(
.collect()
}),
content: m.content,
reasoning_content: None,
}
})
// Remove system messages if requested.
Expand Down Expand Up @@ -1230,7 +1215,6 @@ async fn streamed_chat_completion(
.map(|c| OpenAIChatChoice {
message: OpenAICompletionChatMessage {
content: Some("".to_string()),
reasoning_content: None,
name: None,
role: OpenAIChatMessageRole::System,
tool_calls: None,
Expand Down Expand Up @@ -1288,14 +1272,6 @@ async fn streamed_chat_completion(
},
};

match a.choices[j].delta.get("reasoning_content") {
None => (),
Some(reasoning_content) => {
c.choices[j].message.reasoning_content =
Some(reasoning_content.as_str().unwrap_or("").to_string());
}
};

if let Some(tool_calls) = a.choices[j]
.delta
.get("tool_calls")
Expand Down Expand Up @@ -1370,10 +1346,6 @@ async fn streamed_chat_completion(
None => None,
Some(c) => Some(c.trim().to_string()),
};
m.message.reasoning_content = match m.message.reasoning_content.as_ref() {
None => None,
Some(c) => Some(c.trim().to_string()),
};
}

Ok((completion, request_id))
Expand Down Expand Up @@ -1525,10 +1497,6 @@ async fn chat_completion(
None => None,
Some(c) => Some(c.trim().to_string()),
};
m.message.reasoning_content = match m.message.reasoning_content.as_ref() {
None => None,
Some(c) => Some(c.trim().to_string()),
};
}

Ok((completion, request_id))
Expand Down
4 changes: 2 additions & 2 deletions core/src/providers/togetherai.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use serde_json::Value;
use std::sync::Arc;
use tokio::sync::mpsc::UnboundedSender;

use super::helpers::strip_tools_fromn_chat_history;
use super::helpers::strip_tools_from_chat_history;
use super::openai_compatible_helpers::{
openai_compatible_chat_completion, TransformSystemMessages,
};
Expand Down Expand Up @@ -138,7 +138,7 @@ impl LLM for TogetherAILLM {
self.api_key.clone().unwrap(),
// Pre-process messages if model is one of the supported models.
match MODEL_IDS_WITH_TOOLS_SUPPORT.contains(&self.id.as_str()) {
false => Some(strip_tools_fromn_chat_history(messages)),
false => Some(strip_tools_from_chat_history(messages)),
true => None,
}
.as_ref()
Expand Down
Loading