Skip to content

[GH-3723] Vertex AI Gemini logprobs support #3724

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@
import org.springframework.ai.retry.RetryUtils;
import org.springframework.ai.support.UsageCalculator;
import org.springframework.ai.tool.definition.ToolDefinition;
import org.springframework.ai.vertexai.gemini.api.VertexAiGeminiApi;
import org.springframework.ai.vertexai.gemini.common.VertexAiGeminiConstants;
import org.springframework.ai.vertexai.gemini.common.VertexAiGeminiSafetySetting;
import org.springframework.ai.vertexai.gemini.schema.VertexToolCallingManager;
Expand Down Expand Up @@ -580,8 +581,28 @@ protected List<Generation> responseCandidateToGeneration(Candidate candidate) {
int candidateIndex = candidate.getIndex();
FinishReason candidateFinishReason = candidate.getFinishReason();

// Convert from VertexAI protobuf to VertexAiGeminiApi DTOs
List<VertexAiGeminiApi.LogProbs.TopContent> topCandidates = candidate.getLogprobsResult()
.getTopCandidatesList()
.stream()
.filter(topCandidate -> !topCandidate.getCandidatesList().isEmpty())
.map(topCandidate -> new VertexAiGeminiApi.LogProbs.TopContent(topCandidate.getCandidatesList()
.stream()
.map(c -> new VertexAiGeminiApi.LogProbs.Content(c.getToken(), c.getLogProbability(), c.getTokenId()))
.toList()))
.toList();

List<VertexAiGeminiApi.LogProbs.Content> chosenCandidates = candidate.getLogprobsResult()
.getChosenCandidatesList()
.stream()
.map(c -> new VertexAiGeminiApi.LogProbs.Content(c.getToken(), c.getLogProbability(), c.getTokenId()))
.toList();

VertexAiGeminiApi.LogProbs logprobs = new VertexAiGeminiApi.LogProbs(candidate.getAvgLogprobs(), topCandidates,
chosenCandidates);

Map<String, Object> messageMetadata = Map.of("candidateIndex", candidateIndex, "finishReason",
candidateFinishReason);
candidateFinishReason, "logprobs", logprobs);

ChatGenerationMetadata chatGenerationMetadata = ChatGenerationMetadata.builder()
.finishReason(candidateFinishReason.name())
Expand Down Expand Up @@ -737,6 +758,10 @@ private GenerationConfig toGenerationConfig(VertexAiGeminiChatOptions options) {
if (options.getPresencePenalty() != null) {
generationConfigBuilder.setPresencePenalty(options.getPresencePenalty().floatValue());
}
if (options.getLogprobs() != null) {
generationConfigBuilder.setLogprobs(options.getLogprobs());
}
generationConfigBuilder.setResponseLogprobs(options.getResponseLogprobs());

return generationConfigBuilder.build();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,20 @@ public class VertexAiGeminiChatOptions implements ToolCallingChatOptions {
*/
private @JsonProperty("temperature") Double temperature;

/**
* Optional. Enable returning the log probabilities of the top candidate tokens at each generation step.
* The model's chosen token might not be the same as the top candidate token at each step.
* Specify the number of candidates to return by using an integer value in the range of 1-20.
* Should not be set unless responseLogprobs is set to true.
*/
private @JsonProperty("logprobs") Integer logprobs;

/**
* Optional. If true, returns the log probabilities of the tokens that were chosen by the model at each step.
* By default, this parameter is set to false.
*/
private @JsonProperty("responseLogprobs") boolean responseLogprobs;

/**
* Optional. If specified, nucleus sampling will be used.
*/
Expand Down Expand Up @@ -162,6 +176,8 @@ public static VertexAiGeminiChatOptions fromOptions(VertexAiGeminiChatOptions fr
options.setSafetySettings(fromOptions.getSafetySettings());
options.setInternalToolExecutionEnabled(fromOptions.getInternalToolExecutionEnabled());
options.setToolContext(fromOptions.getToolContext());
options.setLogprobs(fromOptions.getLogprobs());
options.setResponseLogprobs(fromOptions.getResponseLogprobs());
return options;
}

Expand All @@ -183,6 +199,10 @@ public void setTemperature(Double temperature) {
this.temperature = temperature;
}

public void setResponseLogprobs(boolean responseLogprobs) {
this.responseLogprobs = responseLogprobs;
}

@Override
public Double getTopP() {
return this.topP;
Expand Down Expand Up @@ -326,6 +346,18 @@ public void setToolContext(Map<String, Object> toolContext) {
this.toolContext = toolContext;
}

public Integer getLogprobs() {
return logprobs;
}

public void setLogprobs(Integer logprobs) {
this.logprobs = logprobs;
}

public boolean getResponseLogprobs() {
return responseLogprobs;
}

@Override
public boolean equals(Object o) {
if (this == o) {
Expand All @@ -346,15 +378,16 @@ public boolean equals(Object o) {
&& Objects.equals(this.toolNames, that.toolNames)
&& Objects.equals(this.safetySettings, that.safetySettings)
&& Objects.equals(this.internalToolExecutionEnabled, that.internalToolExecutionEnabled)
&& Objects.equals(this.toolContext, that.toolContext);
&& Objects.equals(this.toolContext, that.toolContext) && Objects.equals(this.logprobs, that.logprobs)
&& Objects.equals(this.responseLogprobs, that.responseLogprobs);
}

@Override
public int hashCode() {
return Objects.hash(this.stopSequences, this.temperature, this.topP, this.topK, this.candidateCount,
this.frequencyPenalty, this.presencePenalty, this.maxOutputTokens, this.model, this.responseMimeType,
this.toolCallbacks, this.toolNames, this.googleSearchRetrieval, this.safetySettings,
this.internalToolExecutionEnabled, this.toolContext);
this.internalToolExecutionEnabled, this.toolContext, this.logprobs, this.responseLogprobs);
}

@Override
Expand All @@ -365,7 +398,8 @@ public String toString() {
+ this.candidateCount + ", maxOutputTokens=" + this.maxOutputTokens + ", model='" + this.model + '\''
+ ", responseMimeType='" + this.responseMimeType + '\'' + ", toolCallbacks=" + this.toolCallbacks
+ ", toolNames=" + this.toolNames + ", googleSearchRetrieval=" + this.googleSearchRetrieval
+ ", safetySettings=" + this.safetySettings + '}';
+ ", safetySettings=" + this.safetySettings + ", logProbs=" + this.logprobs + ", responseLogprobs="
+ this.responseLogprobs + '}';
}

@Override
Expand Down Expand Up @@ -488,6 +522,16 @@ public Builder toolContext(Map<String, Object> toolContext) {
return this;
}

public Builder logprobs(Integer logprobs) {
this.options.setLogprobs(logprobs);
return this;
}

public Builder responseLogprobs(Boolean responseLogprobs) {
this.options.setResponseLogprobs(responseLogprobs);
return this;
}

public VertexAiGeminiChatOptions build() {
return this.options;
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/*
* Copyright 2023-2025 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.ai.vertexai.gemini.api;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please add a license header.


import java.util.List;

public class VertexAiGeminiApi {

public record LogProbs(Double avgLogprobs, List<TopContent> topCandidates,
List<LogProbs.Content> chosenCandidates) {
public record Content(String token, Float logprob, Integer id) {
}

public record TopContent(List<Content> candidates) {
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,8 @@ public void createRequestWithGenerationConfigOptions() {
.stopSequences(List.of("stop1", "stop2"))
.candidateCount(1)
.responseMimeType("application/json")
.responseLogprobs(true)
.logprobs(2)
.build())
.build();

Expand All @@ -280,6 +282,8 @@ public void createRequestWithGenerationConfigOptions() {
assertThat(request.model().getGenerationConfig().getStopSequences(0)).isEqualTo("stop1");
assertThat(request.model().getGenerationConfig().getStopSequences(1)).isEqualTo("stop2");
assertThat(request.model().getGenerationConfig().getResponseMimeType()).isEqualTo("application/json");
assertThat(request.model().getGenerationConfig().getLogprobs()).isEqualTo(2);
assertThat(request.model().getGenerationConfig().getResponseLogprobs()).isEqualTo(true);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
import org.springframework.ai.model.tool.ToolCallingManager;
import org.springframework.ai.tool.annotation.Tool;
import org.springframework.ai.vertexai.gemini.VertexAiGeminiChatModel.ChatModel;
import org.springframework.ai.vertexai.gemini.api.VertexAiGeminiApi;
import org.springframework.ai.vertexai.gemini.common.VertexAiGeminiSafetySetting;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
Expand Down Expand Up @@ -226,6 +227,26 @@ void textStream() {
assertThat(generationTextFromStream).isNotEmpty();
}

@Test
void logprobs() {
VertexAiGeminiChatOptions chatOptions = VertexAiGeminiChatOptions.builder()
.logprobs(1)
.responseLogprobs(true)
.build();

var logprobs = (VertexAiGeminiApi.LogProbs) this.chatModel
.call(new Prompt("Explain Bulgaria? Answer in 10 paragraphs.", chatOptions))
.getResult()
.getOutput()
.getMetadata()
.get("logprobs");

assertThat(logprobs).isNotNull();
assertThat(logprobs.avgLogprobs()).isNotZero();
assertThat(logprobs.topCandidates()).isNotEmpty();
assertThat(logprobs.chosenCandidates()).isNotEmpty();
}

@Test
void beanStreamOutputConverterRecords() {

Expand Down