Skip to content

Commit

Permalink
add the verification process to the prompt
Browse files Browse the repository at this point in the history
  • Loading branch information
alnutile committed Apr 29, 2024
1 parent 5fd8ed0 commit 927b435
Show file tree
Hide file tree
Showing 12 changed files with 290 additions and 16 deletions.
8 changes: 8 additions & 0 deletions app/Domains/Agents/BaseAgent.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
<?php

namespace App\Domains\Agents;

abstract class BaseAgent
{
abstract public function verify(VerifyPromptInputDto $input) : VerifyPromptOutputDto;
}
6 changes: 6 additions & 0 deletions app/Domains/Agents/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# Agents

This will be for specific case classes.
I am calling it agents but in the end this will be a fine line between agents and tools/functions.


17 changes: 17 additions & 0 deletions app/Domains/Agents/VerifyPromptInputDto.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
<?php

namespace App\Domains\Agents;

use LlmLaraHub\LlmDriver\HasDrivers;
use Spatie\LaravelData\Data;

class VerifyPromptInputDto extends Data {

public function __construct(
public HasDrivers $chattable,
public string $originalPrompt,
public string $context,
public string $llmResponse,
public string $verifyPrompt
) {}
}
18 changes: 18 additions & 0 deletions app/Domains/Agents/VerifyPromptOutputDto.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
<?php

namespace App\Domains\Agents;

use LlmLaraHub\LlmDriver\HasDrivers;
use Spatie\LaravelData\Data;

class VerifyPromptOutputDto extends Data {

public function __construct(
public HasDrivers $chattable,
public string $originalPrompt,
public string $context,
public string $llmResponse,
public string $verifyPrompt,
public string $response
) {}
}
78 changes: 78 additions & 0 deletions app/Domains/Agents/VerifyResponseAgent.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
<?php

namespace App\Domains\Agents;

use Illuminate\Support\Facades\Log;
use LlmLaraHub\LlmDriver\LlmDriverFacade;
use LlmLaraHub\LlmDriver\Responses\CompletionResponse;

class VerifyResponseAgent extends BaseAgent
{
public function verify(VerifyPromptInputDto $input) : VerifyPromptOutputDto
{

Log::info("[LaraChain] VerifyResponseAgent::verify");
$originalPrompt = $input->originalPrompt;
$context = $input->context;
$llmResponse = $input->llmResponse;
$verifyPrompt = $input->verifyPrompt;


$prompt = <<<EOT
As a date verification assistant please review the following and return
a response that cleans up the original "LLM RESPONSE" included below.
What is key for you to do is that this is a RAG systems so if the original "LLM RESPONSE" response does not
line up with the data in the "CONTEXT" then remove any questionable text and
numbers. See VERIFY PROMPT for any additional information. The output here
will go directly to the user in a chat window so please reply accordingly.
Your Response will not include anything about the verification process you are just a proxy to the origin LLM RESPONSE.
Your Response will be that just cleaned up for chat.
DO NOT include text like "Here is the cleaned-up response" the user should not even know your step happened :)
Your repsonse will NOT be a list like below but just follow the formatting of the "LLM RESPONSE".
### Included are the following sections
- ORIGINAL PROMPT: The question from the user
- CONTEXT:
- LLM RESPONSE: The response from the LLM system using the original prompt and context
- VERIFY PROMPT: The prompt added to help clear up the required output.
### START ORIGINAL PROMPT
{$originalPrompt}
### END ORIGINAL PROMPT
### START CONTEXT
{$context}
### END CONTEXT
### START LLM RESPONSE
{$llmResponse}
### END LLM RESPONSE
### START VERIFY PROMPT
{$verifyPrompt}
### END VERIFY PROMPT
EOT;


Log::info("[LaraChain] VerifyResponseAgent::verify", [
'prompt' => $prompt
]);
/** @var CompletionResponse $response */
$response = LlmDriverFacade::driver(
$input->chattable->getDriver()
)->completion($prompt, $input->llmResponse);

return VerifyPromptOutputDto::from(
[
'chattable' => $input->chattable,
'originalPrompt' => $input->originalPrompt,
'context' => $input->context,
'llmResponse' => $input->llmResponse,
'verifyPrompt' => $input->verifyPrompt,
'response' => $response->content
]
);
}
}
36 changes: 35 additions & 1 deletion app/Http/Controllers/ChatController.php
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

namespace App\Http\Controllers;

use App\Domains\Agents\VerifyPromptInputDto;
use Facades\App\Domains\Agents\VerifyResponseAgent;
use App\Domains\Messages\RoleEnum;
use App\Events\ChatUpdatedEvent;
use App\Http\Resources\ChatResource;
Expand All @@ -14,6 +16,8 @@
use Illuminate\Support\Facades\Log;
use LlmLaraHub\LlmDriver\LlmDriverFacade;
use LlmLaraHub\LlmDriver\Requests\MessageInDto;
use App\Domains\Agents\VerifyPromptOutputDto;
use App\Events\ChatUiUpdateEvent;

class ChatController extends Controller
{
Expand Down Expand Up @@ -68,12 +72,42 @@ public function chat(Chat $chat)
if (data_get($validated, 'completion', false)) {
Log::info('[LaraChain] Running Simple Completion');
$prompt = $validated['input'];


ChatUiUpdateEvent::dispatch(
$chat->chatable,
$chat,
"We are running a completion back shortly"
);

$response = LlmDriverFacade::driver($chat->getDriver())->completion($prompt);
$response = $response->content;

$dto = VerifyPromptInputDto::from(
[
'chattable' => $chat,
'originalPrompt' => $prompt,
'context' => $prompt,
'llmResponse' => $response,
'verifyPrompt' => 'This is a completion so the users prompt was past directly to the llm with all the context. That is why ORIGINAL PROMPT is the same as CONTEXT. Keep the format as Markdown.',
]
);

ChatUiUpdateEvent::dispatch(
$chat->chatable,
$chat,
"We are verifying the completion back shortly"
);


/** @var VerifyPromptOutputDto $response */
$response = VerifyResponseAgent::verify($dto);

$chat->addInput(
message: $response,
message: $response->response,
role: RoleEnum::Assistant,
show_in_thread: true);

} elseif (LlmDriverFacade::driver($chat->getDriver())->hasFunctions()) {
Log::info('[LaraChain] Running Orchestrate');
$response = Orchestrate::handle($messagesArray, $chat);
Expand Down
9 changes: 6 additions & 3 deletions resources/js/Pages/Chat/ChatBaloon.vue
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,14 @@ const props = defineProps({
<div v-if="message.from_ai">
<TabGroup >
<TabList class="flex justify-start gap-4 items-center">
<Tab as="template" v-slot="{ selected }">
<Tab as="div" v-slot="{ selected }">
<div :class="{ 'underline text-gray-800': selected }" class="hover:cursor-pointer m4-2 text-gray-500">Message</div>
</Tab>
<Tab as="template" v-slot="{ selected }">
<div :class="{ 'underline text-gray-800': selected }" class="hover:cursor-pointer m4-2 text-gray-500">Sources</div>
<Tab as="div" v-slot="{ selected }" :disabled="message?.message_document_references.length === 0" class="disabled:opacity-45 disabled:cursor-not-allowed">
<div :class="{ 'underline text-gray-800': selected }" class="hover:cursor-pointer
text-gray-500 flex justify-start gap-2 items-center">
<span>Sources</span> <div class="text-xs text-white rounded-full bg-indigo-600 h-4 w-6 text-center">{{ message?.message_document_references.length}}</div>
</div>
</Tab>
</TabList>
<TabPanels v-auto-animate>
Expand Down
4 changes: 2 additions & 2 deletions resources/js/Pages/Chat/ChatInputThreaded.vue
Original file line number Diff line number Diff line change
Expand Up @@ -85,9 +85,9 @@ const setQuestion = (question) => {
class="relative p-4 flex-col max-container mx-auto w-full" v-auto-animate>

<div class="relative p-4 flex max-container mx-auto w-full" >
<input
<textarea
rows="2"
type="text"
@keydown.enter.prevent
autofocus="true"
class="caret caret-indigo-400 caret-opacity-50
disabled:opacity-40
Expand Down
54 changes: 54 additions & 0 deletions tests/Feature/Http/Controllers/ChatControllerTest.php
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,17 @@

namespace Tests\Feature\Http\Controllers;

use App\Domains\Agents\VerifyPromptOutputDto;
use App\Domains\Messages\RoleEnum;
use Facades\App\Domains\Agents\VerifyResponseAgent;
use App\Models\Chat;
use App\Models\Collection;
use App\Models\Message;
use App\Models\User;
use Facades\LlmLaraHub\LlmDriver\Orchestrate;
use Facades\LlmLaraHub\LlmDriver\SimpleSearchAndSummarizeOrchestrate;
use LlmLaraHub\LlmDriver\LlmDriverFacade;
use LlmLaraHub\LlmDriver\Responses\CompletionResponse;
use Tests\TestCase;

class ChatControllerTest extends TestCase
Expand All @@ -27,6 +32,55 @@ public function test_can_create_chat_and_redirect(): void
$this->assertDatabaseCount('chats', 1);
}


public function test_will_verify_on_completion(): void
{
$user = User::factory()->create();
$collection = Collection::factory()->create();
$chat = Chat::factory()->create([
'chatable_id' => $collection->id,
'chatable_type' => Collection::class,
'user_id' => $user->id,
]);

Orchestrate::shouldReceive('handle')->never();

$firstResponse = CompletionResponse::from([
'content' => 'test'
]);

LlmDriverFacade::shouldReceive('driver->completion')->once()->andReturn($firstResponse);


VerifyResponseAgent::shouldReceive('verify')->once()->andReturn(
VerifyPromptOutputDto::from(
[
'chattable' => $chat,
'originalPrompt' => 'test',
'context' => 'test',
'llmResponse' => 'test',
'verifyPrompt' => 'This is a completion so the users prompt was past directly to the llm with all the context.',
'response' => "verified yay!"
]
));


$this->assertDatabaseCount('messages', 0);
$this->actingAs($user)->post(route('chats.messages.create', [
'chat' => $chat->id,
]),
[
'system_prompt' => 'Foo',
'input' => 'user input',
'completion' => true,
])->assertOk();
$this->assertDatabaseCount('messages', 2);
$message = Message::where('role', RoleEnum::Assistant)->first();

$this->assertEquals("verified yay!", $message->body);
}


public function test_a_function_based_chat()
{
$user = User::factory()->create();
Expand Down
44 changes: 44 additions & 0 deletions tests/Feature/VerifyResponseAgentTest.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
<?php

namespace Tests\Feature;

use App\Domains\Agents\VerifyPromptInputDto;
use App\Domains\Agents\VerifyPromptOutputDto;
use App\Domains\Agents\VerifyResponseAgent;
use App\Models\Chat;
use Illuminate\Foundation\Testing\RefreshDatabase;
use Illuminate\Foundation\Testing\WithFaker;
use LlmLaraHub\LlmDriver\LlmDriverFacade;
use LlmLaraHub\LlmDriver\Responses\CompletionResponse;
use Tests\TestCase;

class VerifyResponseAgentTest extends TestCase
{
/**
* A basic feature test example.
*/
public function test_agent(): void
{
$chat = Chat::factory()->create();

$response = CompletionResponse::from([
'content' => 'test'
]);

LlmDriverFacade::shouldReceive('driver->completion')->once()->andReturn($response);

$verifyPromptInput = VerifyPromptInputDto::from([
'chattable' => $chat,
'originalPrompt' => 'test',
'context' => 'test',
'llmResponse' => 'test',
'verifyPrompt' => 'test'
]);


$response = (new VerifyResponseAgent())->verify($verifyPromptInput);

$this->assertInstanceOf(VerifyPromptOutputDto::class, $response);

}
}
Loading

0 comments on commit 927b435

Please sign in to comment.