Skip to content

Commit

Permalink
Implement IsRunning() as health checks
Browse files Browse the repository at this point in the history
Fixes #32
  • Loading branch information
awaescher committed May 15, 2024
1 parent 077d38b commit cf3b871
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 6 deletions.
10 changes: 4 additions & 6 deletions OllamaApiConsole/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
AnsiConsole.Write(new Rule("OllamaSharp Api Console").LeftJustified());
AnsiConsole.WriteLine();

OllamaApiClient ollama;
OllamaApiClient ollama = null;
var connected = false;

do
Expand All @@ -27,15 +27,14 @@
var uri = new Uri(url);
Console.WriteLine($"Connecting to {uri} ...");

ollama = new OllamaApiClient(url);

try
{
ollama = new OllamaApiClient(url);
connected = await ollama.IsRunning();

var models = await ollama.ListLocalModels();
if (!models.Any())
AnsiConsole.MarkupLineInterpolated($"[yellow]Your Ollama instance does not provide any models :([/]");

connected = true;
}
catch (Exception ex)
{
Expand All @@ -44,7 +43,6 @@
}
} while (!connected);


string demo;

do
Expand Down
6 changes: 6 additions & 0 deletions src/IOllamaApiClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -120,5 +120,11 @@ public interface IOllamaApiClient
/// Should be reused for further calls to this method to keep a chat going.
/// </returns>
Task<ConversationContext> StreamCompletion(GenerateCompletionRequest request, IResponseStreamer<GenerateCompletionResponseStream> streamer, CancellationToken cancellationToken = default);

/// <summary>
/// Sends a query to check whether the Ollama api is running or not
/// </summary>
/// <param name="cancellationToken">The token to cancel the operation with</param>
Task<bool> IsRunning(CancellationToken cancellationToken = default);
}
}
9 changes: 9 additions & 0 deletions src/OllamaApiClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,14 @@ public async Task<IEnumerable<Message>> SendChat(ChatRequest chatRequest, IRespo
return await ProcessStreamedChatResponseAsync(chatRequest, response, streamer, cancellationToken);
}

public async Task<bool> IsRunning(CancellationToken cancellationToken = default)
{
var response = await _client.GetAsync("", cancellationToken); // without route returns "Ollama is running"
response.EnsureSuccessStatusCode();
var stringContent = await response.Content.ReadAsStringAsync();
return !string.IsNullOrWhiteSpace(stringContent);
}

private async Task<ConversationContext> GenerateCompletion(GenerateCompletionRequest generateRequest, IResponseStreamer<GenerateCompletionResponseStream> streamer, CancellationToken cancellationToken)
{
var request = new HttpRequestMessage(HttpMethod.Post, "api/generate")
Expand All @@ -145,6 +153,7 @@ private async Task<TResponse> GetAsync<TResponse>(string endpoint, CancellationT
response.EnsureSuccessStatusCode();

var responseBody = await response.Content.ReadAsStringAsync();

return JsonSerializer.Deserialize<TResponse>(responseBody);
}

Expand Down

0 comments on commit cf3b871

Please sign in to comment.