Skip to content

Commit

Permalink
Merge pull request #15 from microsoft/hjiang/fix_context_empty
Browse files Browse the repository at this point in the history
Fixed (LLMLingua): Resolved the issue where the context was coming up as empty

Co-authored-by: Qianhui Wu <[email protected]>
Co-authored-by: Xufang Luo <[email protected]>
  • Loading branch information
3 people authored Nov 15, 2023
2 parents d43ebf4 + 06f5a2c commit be7072a
Showing 1 changed file with 9 additions and 2 deletions.
11 changes: 9 additions & 2 deletions llmlingua/prompt_compressor.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,8 @@ def compress_prompt(
rank_method: str = "llmlingua",
concate_question: bool = True,
):
if not context:
context = [" "]
if isinstance(context, str):
context = [context]
assert not (
Expand Down Expand Up @@ -239,10 +241,15 @@ def compress_prompt(
else:
compressed_prompt = "\n\n".join(context)

res = []
if instruction:
compressed_prompt = instruction + "\n\n" + compressed_prompt
res.append(instruction)
if compressed_prompt.strip():
res.append(compressed_prompt)
if question and concate_question:
compressed_prompt = compressed_prompt + "\n\n" + question
res.append(question)

compressed_prompt = "\n\n".join(res)

compressed_tokens = len(encoding.encode(compressed_prompt))
saving = (origin_tokens - compressed_tokens) * 0.06 / 1000
Expand Down

0 comments on commit be7072a

Please sign in to comment.