Skip to content

Commit

Permalink
Pass context in separate message from user query to research chat actor
Browse files Browse the repository at this point in the history
  • Loading branch information
debanjum committed Oct 28, 2024
1 parent d75ce4a commit d184498
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 12 deletions.
5 changes: 1 addition & 4 deletions src/khoj/processor/conversation/prompts.py
Original file line number Diff line number Diff line change
Expand Up @@ -673,10 +673,7 @@
Return the next tool AI to use and the query to ask it. Your response should always be a valid JSON object. Do not say anything else.
Response format:
{{"scratchpad": "<your_scratchpad_to_reason_about_which_tool_to_use>", "tool": "<name_of_tool_ai>", "query": "<your_query_for_the_tool_ai>"}}
User: {query}
Khoj:
{{"scratchpad": "<your_scratchpad_to_reason_about_which_tool_to_use>", "tool": "<name_of_tool_ai>", "query": "<your_detailed_query_for_the_tool_ai>"}}
""".strip()
)

Expand Down
17 changes: 11 additions & 6 deletions src/khoj/routers/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -799,7 +799,7 @@ async def generate_excalidraw_diagram_from_description(

with timer("Chat actor: Generate excalidraw diagram", logger):
raw_response = await send_message_to_model_wrapper(
message=excalidraw_diagram_generation, user=user, tracer=tracer
query=excalidraw_diagram_generation, user=user, tracer=tracer
)
raw_response = raw_response.strip()
raw_response = remove_json_codeblock(raw_response)
Expand Down Expand Up @@ -879,11 +879,12 @@ async def generate_better_image_prompt(


async def send_message_to_model_wrapper(
message: str,
query: str,
system_message: str = "",
response_type: str = "text",
user: KhojUser = None,
query_images: List[str] = None,
context: str = "",
tracer: dict = {},
):
conversation_config: ChatModelOptions = await ConversationAdapters.aget_default_conversation_config(user)
Expand Down Expand Up @@ -914,7 +915,8 @@ async def send_message_to_model_wrapper(

loaded_model = state.offline_chat_processor_config.loaded_model
truncated_messages = generate_chatml_messages_with_context(
user_message=message,
user_message=query,
context_message=context,
system_message=system_message,
model_name=chat_model,
loaded_model=loaded_model,
Expand All @@ -939,7 +941,8 @@ async def send_message_to_model_wrapper(
api_key = openai_chat_config.api_key
api_base_url = openai_chat_config.api_base_url
truncated_messages = generate_chatml_messages_with_context(
user_message=message,
user_message=query,
context_message=context,
system_message=system_message,
model_name=chat_model,
max_prompt_size=max_tokens,
Expand All @@ -960,7 +963,8 @@ async def send_message_to_model_wrapper(
elif model_type == ChatModelOptions.ModelType.ANTHROPIC:
api_key = conversation_config.openai_config.api_key
truncated_messages = generate_chatml_messages_with_context(
user_message=message,
user_message=query,
context_message=context,
system_message=system_message,
model_name=chat_model,
max_prompt_size=max_tokens,
Expand All @@ -979,7 +983,8 @@ async def send_message_to_model_wrapper(
elif model_type == ChatModelOptions.ModelType.GOOGLE:
api_key = conversation_config.openai_config.api_key
truncated_messages = generate_chatml_messages_with_context(
user_message=message,
user_message=query,
context_message=context,
system_message=system_message,
model_name=chat_model,
max_prompt_size=max_tokens,
Expand Down
4 changes: 2 additions & 2 deletions src/khoj/routers/research.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,6 @@ async def apick_next_tool(
username = prompts.user_name.format(name=user_name) if user_name else ""

function_planning_prompt = prompts.plan_function_execution.format(
query=query,
tools=tool_options_str,
chat_history=chat_history,
personality_context=personality_context,
Expand All @@ -91,7 +90,8 @@ async def apick_next_tool(

with timer("Chat actor: Infer information sources to refer", logger):
response = await send_message_to_model_wrapper(
function_planning_prompt,
query=query,
context=function_planning_prompt,
response_type="json_object",
user=user,
query_images=query_images,
Expand Down

0 comments on commit d184498

Please sign in to comment.