From 217fb8bcb254e76590ab099b346e7c70389af2bc Mon Sep 17 00:00:00 2001 From: Utkarsh Srivastava Date: Fri, 17 May 2024 20:06:17 +0530 Subject: [PATCH] Added support for stream_options in chat completions Fixes: #502 --- .../completion/chat/ChatCompletionChunk.java | 8 +++++- .../chat/ChatCompletionRequest.java | 7 ++++++ .../openai/completion/chat/StreamOptions.java | 25 +++++++++++++++++++ .../openai/service/ChatCompletionTest.java | 25 +++++++++++++++++++ 4 files changed, 64 insertions(+), 1 deletion(-) create mode 100644 api/src/main/java/com/theokanning/openai/completion/chat/StreamOptions.java diff --git a/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionChunk.java b/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionChunk.java index 4bbab2b9..48454e76 100644 --- a/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionChunk.java +++ b/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionChunk.java @@ -1,4 +1,5 @@ package com.theokanning.openai.completion.chat; +import com.theokanning.openai.Usage; import lombok.Data; import java.util.List; @@ -32,4 +33,9 @@ public class ChatCompletionChunk { * A list of all generated completions. */ List choices; -} \ No newline at end of file + + /** + * The API usage for this request + */ + Usage usage; +} diff --git a/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java b/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java index e4479ff3..9dcfeac5 100644 --- a/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java +++ b/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java @@ -54,6 +54,13 @@ public class ChatCompletionRequest { */ Boolean stream; + /** + * Options for streaming response. Only set this when you set stream: true + * OpenAI Docs + */ + @JsonProperty("stream_options") + StreamOptions streamOptions; + /** * Up to 4 sequences where the API will stop generating further tokens. */ diff --git a/api/src/main/java/com/theokanning/openai/completion/chat/StreamOptions.java b/api/src/main/java/com/theokanning/openai/completion/chat/StreamOptions.java new file mode 100644 index 00000000..a3af6257 --- /dev/null +++ b/api/src/main/java/com/theokanning/openai/completion/chat/StreamOptions.java @@ -0,0 +1,25 @@ +package com.theokanning.openai.completion.chat; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +/** + *

Options for streaming response. Only set this when you set stream: true

+ * see OpenAi documentation + */ +@Data +@NoArgsConstructor(force = true) +@AllArgsConstructor +public class StreamOptions { + + /** + * If set, an additional chunk will be streamed before the data: [DONE] message. + * The usage field on this chunk shows the token usage statistics for the entire request, and the choices field will always be an empty array. + * All other chunks will also include a usage field, but with a null value. + */ + @JsonProperty("include_usage") + Boolean includeUsage; + +} diff --git a/service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java b/service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java index 25f0defb..3505a4ef 100644 --- a/service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java +++ b/service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java @@ -84,6 +84,31 @@ void streamChatCompletion() { assertNotNull(chunks.get(0).getChoices().get(0)); } + @Test + void streamChatCompletionWithStreamOptions() { + final List messages = new ArrayList<>(); + final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You are a dog and will speak as such."); + messages.add(systemMessage); + + ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest + .builder() + .model("gpt-3.5-turbo") + .messages(messages) + .n(1) + .maxTokens(50) + .logitBias(new HashMap<>()) + .stream(true) + .streamOptions(new StreamOptions(true)) + .build(); + + List chunks = new ArrayList<>(); + service.streamChatCompletion(chatCompletionRequest).blockingForEach(chunks::add); + assertTrue(chunks.size() > 0); + assertNotNull(chunks.get(0).getChoices().get(0)); + chunks.stream().limit(chunks.size() - 1).forEach(chunk -> assertNull(chunk.getUsage())); + assertNotNull(chunks.get(chunks.size()-1).getUsage()); + } + @Test void createChatCompletionWithFunctions() { final List functions = Collections.singletonList(ChatFunction.builder()