Skip to content

Commit

Permalink
working on JSON left as is not easy answer to this one
Browse files Browse the repository at this point in the history
  • Loading branch information
alnutile committed Jul 14, 2024
1 parent 52f2c92 commit 15adc0b
Show file tree
Hide file tree
Showing 17 changed files with 297 additions and 90 deletions.
8 changes: 2 additions & 6 deletions Modules/LlmDriver/app/BaseClient.php
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ public function setFormatJson(): self

public function modifyPayload(array $payload): array
{
$payload = $this->addJsonFormat($payload);

return $payload;
}

Expand Down Expand Up @@ -62,12 +64,6 @@ protected function messagesToArray(array $messages): array

public function addJsonFormat(array $payload): array
{
if ($this->formatJson) {
$payload['response_format'] = [
'type' => 'json_object',
];
}

return $payload;
}

Expand Down
59 changes: 9 additions & 50 deletions Modules/LlmDriver/app/Functions/ReportingTool.php
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,18 @@

namespace LlmLaraHub\LlmDriver\Functions;

use App\Domains\Messages\RoleEnum;
use App\Domains\Prompts\ReportBuildingFindRequirementsPrompt;
use App\Domains\Prompts\ReportingSummaryPrompt;
use App\Domains\Reporting\ReportTypeEnum;
use App\Domains\Reporting\StatusEnum;
use App\Jobs\MakeReportSectionsJob;
use App\Jobs\ReportingToolSummarizeReportJob;
use App\Jobs\ReportMakeEntriesJob;
use App\Models\Message;
use App\Models\Report;
use Illuminate\Bus\Batch;
use Illuminate\Support\Collection;
use Illuminate\Support\Facades\Bus;
use Illuminate\Support\Facades\Log;
use LlmLaraHub\LlmDriver\LlmDriverFacade;
use LlmLaraHub\LlmDriver\Responses\CompletionResponse;
use LlmLaraHub\LlmDriver\Responses\FunctionResponse;
use LlmLaraHub\LlmDriver\ToolsHelper;

Expand All @@ -32,8 +29,6 @@ class ReportingTool extends FunctionContract

protected array $results = [];

protected array $promptHistory = [];

protected array $sectionJobs = [];

public function handle(
Expand Down Expand Up @@ -74,45 +69,26 @@ public function handle(
])->name(sprintf('Reporting Entities Report Id %s', $report->id))
->allowFailures()
->finally(function (Batch $batch) use ($report) {
$report->update([
'status_entries_generation' => StatusEnum::Complete,
]);
Bus::batch([
new ReportingToolSummarizeReportJob($report),
])->name(sprintf('Reporting Tool Summarize Report Id %s', $report->id))
->allowFailures()
->dispatch();
})
->dispatch();

})
->dispatch();

notify_ui($message->getChat(), 'Building Summary');

$response = $this->summarizeReport($report);

$report->update([
'status_sections_generation' => StatusEnum::Running,
]);

$assistantMessage = $message->getChat()->addInput(
message: $response->content,
role: RoleEnum::Assistant,
systemPrompt: $message->getChat()->getChatable()->systemPrompt(),
show_in_thread: true,
meta_data: $message->meta_data,
tools: $message->tools
);

$this->savePromptHistory($assistantMessage,
implode("\n", $this->promptHistory));

$report->message_id = $assistantMessage->id;
$report->save();

notify_ui($message->getChat(), 'Building Solutions list');
notify_ui_report($report, 'Building Solutions list');
notify_ui_complete($report->getChat());
notify_ui($report->getChat(), 'Running');

return FunctionResponse::from([
'content' => $response->content,
'prompt' => implode('\n', $this->promptHistory),
'content' => 'Building report and Sections and then summarizing',
'prompt' => '',
'requires_followup' => false,
'documentChunks' => collect([]),
'save_to_message' => false,
Expand Down Expand Up @@ -159,23 +135,6 @@ protected function buildUpSections(Collection $documents, Report $report, Messag
}
}

protected function summarizeReport(Report $report): CompletionResponse
{
$sectionContent = $report->refresh()->sections->pluck('content')->toArray();
$sectionContent = implode("\n", $sectionContent);

$prompt = ReportingSummaryPrompt::prompt($sectionContent);

$this->promptHistory = [$prompt];

/** @var CompletionResponse $response */
$response = LlmDriverFacade::driver(
$report->getChatable()->getDriver()
)->completion($prompt);

return $response;
}

/**
* @return PropertyDto[]
*/
Expand Down
15 changes: 10 additions & 5 deletions Modules/LlmDriver/app/Functions/ReportingToolMakeSections.php
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
use App\Models\Document;
use App\Models\Report;
use App\Models\Section;
use Illuminate\Support\Arr;
use Illuminate\Support\Facades\Log;
use LlmLaraHub\LlmDriver\LlmDriverFacade;

Expand All @@ -22,6 +23,9 @@ public function handle(

protected function poolPrompt(array $prompts, Report $report, Document $document): void
{
/**
* @NOTE if Format JSON not good enough will try this again
*/
$dto = FunctionDto::from([
'name' => 'reporting_json',
'description' => 'JSON Summary of the report',
Expand All @@ -44,12 +48,12 @@ protected function poolPrompt(array $prompts, Report $report, Document $document
]),
]);

Log::info('LlmDriver::ClaudeClient::poolPrompt', [
Log::info('LlmDriver::Reporting::poolPrompt', [
'driver' => $report->getDriver(),
'dto' => $dto,
'prompts' => $prompts,
]);

$results = LlmDriverFacade::driver($report->getDriver())
->setForceTool($dto)
->completionPool($prompts);

foreach ($results as $resultIndex => $result) {
Expand All @@ -69,9 +73,10 @@ protected function makeSectionFromContent(
notify_ui_report($report, 'Building Requirements list');

$contentDecoded = json_decode($content, true);
$contentDecoded = Arr::wrap($contentDecoded);
foreach ($contentDecoded as $sectionIndex => $sectionText) {
$title = data_get($sectionText, 'title', 'NOT TITLE GIVEN');
$contentBody = data_get($sectionText, 'content', 'NOT CONTENT GIVEN');
$title = data_get($sectionText, 'title', 'NO TITLE GIVEN');
$contentBody = data_get($sectionText, 'content', 'NO CONTENT GIVEN');
Section::updateOrCreate([
'document_id' => $document->id,
'report_id' => $report->id,
Expand Down
62 changes: 62 additions & 0 deletions Modules/LlmDriver/app/Functions/Reports/SummarizeReport.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
<?php

namespace LlmLaraHub\LlmDriver\Functions\Reports;

use App\Domains\Messages\RoleEnum;
use App\Domains\Prompts\ReportingSummaryPrompt;
use App\Models\Report;
use LlmLaraHub\LlmDriver\LlmDriverFacade;
use LlmLaraHub\LlmDriver\Responses\CompletionResponse;
use LlmLaraHub\LlmDriver\ToolsHelper;

class SummarizeReport
{
use ToolsHelper;

protected array $promptHistory = [];

public function handle(Report $report)
{
$message = $report->message;

notify_ui($message->getChat(), 'Building Summary');

$response = $this->summarizeReport($report);

$assistantMessage = $message->getChat()->addInput(
message: $response->content,
role: RoleEnum::Assistant,
systemPrompt: $message->getChat()->getChatable()->systemPrompt(),
show_in_thread: true,
meta_data: $message->meta_data,
tools: $message->tools
);

$this->savePromptHistory($assistantMessage,
implode("\n", $this->promptHistory));

$report->message_id = $assistantMessage->id;
$report->save();

notify_ui($message->getChat(), 'Building Solutions list');
notify_ui_report($report, 'Building Solutions list');
notify_ui_complete($report->getChat());
}

protected function summarizeReport(Report $report): CompletionResponse
{
$sectionContent = $report->refresh()->sections->pluck('content')->toArray();
$sectionContent = implode("\n", $sectionContent);

$prompt = ReportingSummaryPrompt::prompt($sectionContent);

$this->promptHistory = [$prompt];

/** @var CompletionResponse $response */
$response = LlmDriverFacade::driver(
$report->getChatable()->getDriver()
)->completion($prompt);

return $response;
}
}
2 changes: 0 additions & 2 deletions Modules/LlmDriver/app/Helpers/CreateReferencesTrait.php
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@ protected function saveDocumentReference(
Message $model,
Collection $documentChunks
): void {
//put_fixture("document_chunks.json", $documentChunks->toArray());
//add each one to a batch job or do the work here.
foreach ($documentChunks as $documentChunk) {
$model->message_document_references()->create([
'document_chunk_id' => $documentChunk->id,
Expand Down
36 changes: 27 additions & 9 deletions Modules/LlmDriver/app/OllamaClient.php
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,13 @@ public function embedData(string $prompt): EmbeddingsResponseDto
]);
}

public function addJsonFormat(array $payload): array
{
//@NOTE Just too hard if it is an array of objects
//$payload['format'] = 'json';
return $payload;
}

/**
* This is to get functions out of the llm
* if none are returned your system
Expand Down Expand Up @@ -89,13 +96,15 @@ public function chat(array $messages): CompletionResponse

$messages = $this->remapMessages($messages);

put_fixture('messages_llama3.json', $messages);

$response = $this->getClient()->post('/chat', [
$payload = [
'model' => $this->getConfig('ollama')['models']['completion_model'],
'messages' => $messages,
'stream' => false,
]);
];

$payload = $this->modifyPayload($payload);

$response = $this->getClient()->post('/chat', $payload);

$results = $response->json()['message']['content'];

Expand Down Expand Up @@ -123,15 +132,24 @@ public function completionPool(array $prompts, int $temperature = 0): array
$baseUrl
) {
foreach ($prompts as $prompt) {
$payload = [
'model' => $model,
'prompt' => $prompt,
'stream' => false,
];

$payload = $this->modifyPayload($payload);

Log::info('Ollama Request', [
'prompt' => $prompt,
'payload' => $payload,
]);

$pool->withHeaders([
'content-type' => 'application/json',
])->timeout(300)
->baseUrl($baseUrl)
->post('/generate', [
'model' => $model,
'prompt' => $prompt,
'stream' => false,
]);
->post('/generate', $payload);
}
});

Expand Down
14 changes: 14 additions & 0 deletions Modules/LlmDriver/app/OpenAiClient.php
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ public function completionPool(array $prompts, int $temperature = 0): array
}

$responses = Http::pool(function (Pool $pool) use ($prompts, $token) {

foreach ($prompts as $prompt) {
$payload = [
'model' => $this->getConfig('openai')['models']['completion_model'],
Expand Down Expand Up @@ -272,6 +273,19 @@ public function modifyPayload(array $payload): array
return $payload;
}

public function addJsonFormat(array $payload): array
{
// @NOTE the results are not great if you want an array of objects

// if ($this->formatJson) {
// $payload['response_format'] = [
// 'type' => 'json_object',
// ];
// }

return $payload;
}

/**
* This is to get functions out of the llm
* if none are returned your system
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ public static function prompt(string $context, string $userPrompt, string $colle
### FORMAT ###
Output in JSON format as an Array of Objects with keys: title (string), content (string).
NO SURROUNDING TEXT JUST VALID JSON! START WITH [ and END WITH ]
NO SURROUNDING TEXT JUST VALID JSON! START WITH [ and END WITH ] even if only one item found.
### User Prompt ###
$userPrompt
Expand Down
40 changes: 40 additions & 0 deletions app/Jobs/ReportingToolSummarizeReportJob.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
<?php

namespace App\Jobs;

use App\Models\Report;
use Facades\LlmLaraHub\LlmDriver\Functions\Reports\SummarizeReport;
use Illuminate\Bus\Batchable;
use Illuminate\Bus\Queueable;
use Illuminate\Contracts\Queue\ShouldQueue;
use Illuminate\Foundation\Bus\Dispatchable;
use Illuminate\Queue\InteractsWithQueue;
use Illuminate\Queue\SerializesModels;

class ReportingToolSummarizeReportJob implements ShouldQueue
{
use Batchable;
use Dispatchable, InteractsWithQueue, Queueable, SerializesModels;

/**
* Create a new job instance.
*/
public function __construct(public Report $report)
{
//
}

/**
* Execute the job.
*/
public function handle(): void
{
if ($this->batch()->cancelled()) {
// Determine if the batch has been cancelled...

return;
}

SummarizeReport::handle($this->report);
}
}
Loading

0 comments on commit 15adc0b

Please sign in to comment.