Skip to content

Commit

Permalink
Added support for stream_options in chat completions
Browse files Browse the repository at this point in the history
  • Loading branch information
usrivastava92 committed May 17, 2024
1 parent e7de81c commit 217fb8b
Show file tree
Hide file tree
Showing 4 changed files with 64 additions and 1 deletion.
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
package com.theokanning.openai.completion.chat;
import com.theokanning.openai.Usage;
import lombok.Data;

import java.util.List;
Expand Down Expand Up @@ -32,4 +33,9 @@ public class ChatCompletionChunk {
* A list of all generated completions.
*/
List<ChatCompletionChoice> choices;
}

/**
* The API usage for this request
*/
Usage usage;
}
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,13 @@ public class ChatCompletionRequest {
*/
Boolean stream;

/**
* Options for streaming response. Only set this when you set stream: true
* <a href="https://platform.openai.com/docs/api-reference/chat/create#chat-create-stream_options">OpenAI Docs</a>
*/
@JsonProperty("stream_options")
StreamOptions streamOptions;

/**
* Up to 4 sequences where the API will stop generating further tokens.
*/
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package com.theokanning.openai.completion.chat;

import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;

/**
* <p>Options for streaming response. Only set this when you set stream: true</p>
* see <a href="https://platform.openai.com/docs/api-reference/chat/create#chat-create-stream_options">OpenAi documentation</a>
*/
@Data
@NoArgsConstructor(force = true)
@AllArgsConstructor
public class StreamOptions {

/**
* If set, an additional chunk will be streamed before the data: [DONE] message.
* The usage field on this chunk shows the token usage statistics for the entire request, and the choices field will always be an empty array.
* All other chunks will also include a usage field, but with a null value.
*/
@JsonProperty("include_usage")
Boolean includeUsage;

}
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,31 @@ void streamChatCompletion() {
assertNotNull(chunks.get(0).getChoices().get(0));
}

@Test
void streamChatCompletionWithStreamOptions() {
final List<ChatMessage> messages = new ArrayList<>();
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You are a dog and will speak as such.");
messages.add(systemMessage);

ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo")
.messages(messages)
.n(1)
.maxTokens(50)
.logitBias(new HashMap<>())
.stream(true)
.streamOptions(new StreamOptions(true))
.build();

List<ChatCompletionChunk> chunks = new ArrayList<>();
service.streamChatCompletion(chatCompletionRequest).blockingForEach(chunks::add);
assertTrue(chunks.size() > 0);
assertNotNull(chunks.get(0).getChoices().get(0));
chunks.stream().limit(chunks.size() - 1).forEach(chunk -> assertNull(chunk.getUsage()));
assertNotNull(chunks.get(chunks.size()-1).getUsage());
}

@Test
void createChatCompletionWithFunctions() {
final List<ChatFunction> functions = Collections.singletonList(ChatFunction.builder()
Expand Down

0 comments on commit 217fb8b

Please sign in to comment.