Skip to content

Commit 42a86c8

Browse files
committed
Release v2.8.8
1 parent 48443b1 commit 42a86c8

File tree

8 files changed

+48
-25
lines changed

8 files changed

+48
-25
lines changed

docker/Dockerfile.chat

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ RUN mkdir -p /root/.praison
1616
# Install Python packages (using latest versions)
1717
RUN pip install --no-cache-dir \
1818
praisonai_tools \
19-
"praisonai>=2.8.7" \
19+
"praisonai>=2.8.8" \
2020
"praisonai[chat]" \
2121
"embedchain[github,youtube]"
2222

docker/Dockerfile.dev

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ RUN mkdir -p /root/.praison
2020
# Install Python packages (using latest versions)
2121
RUN pip install --no-cache-dir \
2222
praisonai_tools \
23-
"praisonai>=2.8.7" \
23+
"praisonai>=2.8.8" \
2424
"praisonai[ui]" \
2525
"praisonai[chat]" \
2626
"praisonai[realtime]" \

docker/Dockerfile.ui

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ RUN mkdir -p /root/.praison
1616
# Install Python packages (using latest versions)
1717
RUN pip install --no-cache-dir \
1818
praisonai_tools \
19-
"praisonai>=2.8.7" \
19+
"praisonai>=2.8.8" \
2020
"praisonai[ui]" \
2121
"praisonai[crewai]"
2222

src/praisonai/praisonai.rb

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@ class Praisonai < Formula
33

44
desc "AI tools for various AI applications"
55
homepage "https://github.com/MervinPraison/PraisonAI"
6-
url "https://github.com/MervinPraison/PraisonAI/archive/refs/tags/v2.8.7.tar.gz"
7-
sha256 `curl -sL https://github.com/MervinPraison/PraisonAI/archive/refs/tags/v2.8.7.tar.gz | shasum -a 256`.split.first
6+
url "https://github.com/MervinPraison/PraisonAI/archive/refs/tags/v2.8.8.tar.gz"
7+
sha256 `curl -sL https://github.com/MervinPraison/PraisonAI/archive/refs/tags/v2.8.8.tar.gz | shasum -a 256`.split.first
88
license "MIT"
99

1010
depends_on "[email protected]"

src/praisonai/praisonai/deploy.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ def create_dockerfile(self):
5757
file.write("FROM python:3.11-slim\n")
5858
file.write("WORKDIR /app\n")
5959
file.write("COPY . .\n")
60-
file.write("RUN pip install flask praisonai==2.8.7 gunicorn markdown\n")
60+
file.write("RUN pip install flask praisonai==2.8.8 gunicorn markdown\n")
6161
file.write("EXPOSE 8080\n")
6262
file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
6363

src/praisonai/praisonai/ui/chat.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -319,7 +319,6 @@ async def main(message: cl.Message):
319319

320320
message_history.append({"role": "user", "content": user_message})
321321
msg = cl.Message(content="")
322-
await msg.send()
323322

324323
completion_params = {
325324
"model": model_name,
@@ -349,13 +348,18 @@ async def main(message: cl.Message):
349348
full_response = ""
350349
tool_calls = []
351350
current_tool_call = None
351+
msg_sent = False
352352

353353
async for part in response:
354354
if 'choices' in part and len(part['choices']) > 0:
355355
delta = part['choices'][0].get('delta', {})
356356

357357
if 'content' in delta and delta['content'] is not None:
358358
token = delta['content']
359+
# Send message on first token to avoid delay
360+
if not msg_sent:
361+
await msg.send()
362+
msg_sent = True
359363
await msg.stream_token(token)
360364
full_response += token
361365

@@ -382,6 +386,10 @@ async def main(message: cl.Message):
382386
if current_tool_call:
383387
tool_calls.append(current_tool_call)
384388

389+
# Ensure message is sent even if no content (e.g., tool calls only)
390+
if not msg_sent:
391+
await msg.send()
392+
385393
logger.debug(f"Full response: {full_response}")
386394
logger.debug(f"Tool calls: {tool_calls}")
387395
message_history.append({"role": "assistant", "content": full_response})

src/praisonai/praisonai/ui/code.py

Lines changed: 32 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -398,7 +398,6 @@ async def main(message: cl.Message):
398398
message_history.append({"role": "user", "content": user_message})
399399

400400
msg = cl.Message(content="")
401-
await msg.send()
402401

403402
# Use PraisonAI Agents if available, otherwise fallback to litellm
404403
if PRAISONAI_AGENTS_AVAILABLE:
@@ -433,48 +432,55 @@ async def handle_with_praisonai_agents(message, user_message, model_name, claude
433432
434433
For informational questions, explanations, or general conversations, respond normally without using Claude Code."""
435434

436-
# Create agent
435+
# Create agent with streaming enabled
437436
agent = Agent(
438437
name="PraisonAI Assistant",
439438
instructions=instructions,
440439
llm=model_name,
441-
tools=available_tools if available_tools else None
440+
tools=available_tools if available_tools else None,
441+
stream=True
442442
)
443443

444444
# Execute agent with streaming
445445
full_response = ""
446+
msg_sent = False
446447

447-
# Use agent's streaming capabilities if available
448448
try:
449-
# For now, use synchronous execution and stream the result
450-
# TODO: Implement proper streaming when PraisonAI agents support it
451-
result = agent.start(user_message)
449+
# Use async chat for proper streaming
450+
result = await agent.achat(user_message)
452451

453-
# Stream the response character by character for better UX
452+
# Get the response text
454453
if hasattr(result, 'raw'):
455454
response_text = result.raw
456455
else:
457456
response_text = str(result)
458457

459-
for char in response_text:
460-
await msg.stream_token(char)
461-
full_response += char
462-
# Small delay to make streaming visible
463-
await asyncio.sleep(0.01)
458+
# Send message on first content
459+
if not msg_sent:
460+
await msg.send()
461+
msg_sent = True
462+
463+
# Stream in word chunks for better UX (not char-by-char which is too slow)
464+
words = response_text.split(' ')
465+
for i, word in enumerate(words):
466+
token = word + (' ' if i < len(words) - 1 else '')
467+
await msg.stream_token(token)
468+
full_response += token
464469

465470
except Exception as e:
466471
error_response = f"Error executing agent: {str(e)}"
467-
for char in error_response:
468-
await msg.stream_token(char)
469-
full_response += char
470-
await asyncio.sleep(0.01)
472+
if not msg_sent:
473+
await msg.send()
474+
await msg.stream_token(error_response)
475+
full_response = error_response
471476

472477
msg.content = full_response
473478
await msg.update()
474479

475480
except Exception as e:
476481
error_msg = f"Failed to use PraisonAI Agents: {str(e)}"
477482
logger.error(error_msg)
483+
await msg.send()
478484
await msg.stream_token(error_msg)
479485
msg.content = error_msg
480486
await msg.update()
@@ -515,6 +521,7 @@ async def handle_with_litellm(user_message, model_name, message_history, msg, im
515521
full_response = ""
516522
tool_calls = []
517523
current_tool_call = None
524+
msg_sent = False
518525

519526
async for part in response:
520527
logger.debug(f"LLM part: {part}")
@@ -523,6 +530,10 @@ async def handle_with_litellm(user_message, model_name, message_history, msg, im
523530

524531
if 'content' in delta and delta['content'] is not None:
525532
token = delta['content']
533+
# Send message on first token
534+
if not msg_sent:
535+
await msg.send()
536+
msg_sent = True
526537
await msg.stream_token(token)
527538
full_response += token
528539

@@ -549,6 +560,10 @@ async def handle_with_litellm(user_message, model_name, message_history, msg, im
549560
if current_tool_call:
550561
tool_calls.append(current_tool_call)
551562

563+
# Ensure message is sent even if no content (tool calls only)
564+
if not msg_sent:
565+
await msg.send()
566+
552567
logger.debug(f"Full response: {full_response}")
553568
logger.debug(f"Tool calls: {tool_calls}")
554569
message_history.append({"role": "assistant", "content": full_response})

src/praisonai/praisonai/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "2.8.7"
1+
__version__ = "2.8.8"

0 commit comments

Comments
 (0)