Skip to content

Commit

Permalink
feat: show batch generation progress
Browse files Browse the repository at this point in the history
  • Loading branch information
llllvvuu committed Aug 29, 2024
1 parent 280b378 commit 2caa832
Showing 1 changed file with 3 additions and 0 deletions.
3 changes: 3 additions & 0 deletions llms/mlx_lm/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,6 +375,8 @@ def generate(
break
if is_batch:
output_toks.append(tokens)
if verbose:
print(".", end="", flush=True)
else:
token = tokens.item()
logprobs = logprobs.squeeze(0)
Expand Down Expand Up @@ -404,6 +406,7 @@ def generate(
if token_count <= 0:
print("No tokens generated for this prompt")
if is_batch:
print()
for p, resp in zip(prompt, response):
print("=" * 10)
print("Prompt:", p)
Expand Down

0 comments on commit 2caa832

Please sign in to comment.