Skip to content

Commit c0a3cb7

Browse files
committed
fix lints
1 parent 56214ef commit c0a3cb7

File tree

1 file changed

+5
-6
lines changed

1 file changed

+5
-6
lines changed

crates/bpe-openai/src/lib.rs

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -92,12 +92,11 @@ impl Tokenizer {
9292
/// Otherwise, it returns none. This function can be faster than [`Self::count`]` when the
9393
/// token limit is much smaller than the provided text. Applies pre-tokenization before counting.
9494
pub fn count_till_limit(&self, text: &str, token_limit: usize) -> Option<usize> {
95-
self.split(text)
96-
.try_fold(0, |consumed, piece| {
97-
self.bpe
98-
.count_till_limit(piece.as_bytes(), token_limit - consumed)
99-
.map(|piece_count| consumed + piece_count)
100-
})
95+
self.split(text).try_fold(0, |consumed, piece| {
96+
self.bpe
97+
.count_till_limit(piece.as_bytes(), token_limit - consumed)
98+
.map(|piece_count| consumed + piece_count)
99+
})
101100
}
102101

103102
/// Returns the tokens for the encoding of the given text. Applies pre-tokenization before

0 commit comments

Comments
 (0)