Skip to content

Commit

Permalink
fix: switch back to regex based implementation for stop words (TabbyM…
Browse files Browse the repository at this point in the history
  • Loading branch information
wsxiaoys authored Oct 6, 2023
1 parent 4c00ac0 commit fd2a1ab
Showing 1 changed file with 13 additions and 17 deletions.
30 changes: 13 additions & 17 deletions crates/tabby-inference/src/decoding.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
use std::sync::Arc;

use dashmap::DashMap;
use regex::RegexSet;
use regex::Regex;
use tokenizers::tokenizer::Tokenizer;

pub struct DecodingFactory {
stop_regex_cache: DashMap<&'static Vec<&'static str>, RegexSet>,
stop_regex_cache: DashMap<&'static Vec<&'static str>, Regex>,
}

fn reverse<T>(s: T) -> String
Expand Down Expand Up @@ -33,7 +33,7 @@ impl DecodingFactory {
IncrementalDecoding::new(tokenizer, self.get_re(stop_words), input_token_ids)
}

fn get_re(&self, stop_words: &'static Vec<&'static str>) -> Option<RegexSet> {
fn get_re(&self, stop_words: &'static Vec<&'static str>) -> Option<Regex> {
if stop_words.is_empty() {
None
} else {
Expand All @@ -48,19 +48,20 @@ impl DecodingFactory {
}
}

fn create_stop_regex(stop_words: &[&str]) -> RegexSet {
fn create_stop_regex(stop_words: &[&str]) -> Regex {
// (?m) enables multi-line matching mode.
// \A means absolute begins of string.
let tokens: Vec<String> = stop_words
let reversed_stop_words: Vec<_> = stop_words
.iter()
.map(|x| r"(?m)\A".to_owned() + &reverse(*x))
.map(|x| regex::escape(&reverse(*x)))
.collect();
RegexSet::new(tokens).expect("Failed to create regex set")
let regex_string = r"(?m)\A".to_owned() + "((" + &reversed_stop_words.join(")|(") + "))";
Regex::new(&regex_string).expect("Failed to create regex")
}

pub struct IncrementalDecoding {
tokenizer: Arc<Tokenizer>,
stop_re: Option<RegexSet>,
stop_re: Option<Regex>,

token_ids: Vec<u32>,
prefix_offset: usize,
Expand All @@ -70,11 +71,7 @@ pub struct IncrementalDecoding {
}

impl IncrementalDecoding {
pub fn new(
tokenizer: Arc<Tokenizer>,
stop_re: Option<RegexSet>,
input_token_ids: &[u32],
) -> Self {
pub fn new(tokenizer: Arc<Tokenizer>, stop_re: Option<Regex>, input_token_ids: &[u32]) -> Self {
let text = tokenizer
.decode(input_token_ids, /* skip_special_token = */ true)
.expect("Cannot decode token from tokenizer.");
Expand Down Expand Up @@ -132,10 +129,9 @@ mod tests {
use super::*;

#[test]
fn test_it_should_not_match() {
let stop_words = vec!["\n\n", "\n\n "];
let re = create_stop_regex(&stop_words);
fn test_it_works() {
let text = reverse("void write_u32(std::uint32_t val) const {\n write_raw(&val, sizeof(val));\n }\n\n ~llama_file() {\n if (fp) {\n std::fclose(fp);\n }\n }\n};\n\nvoid");
assert!(!re.is_match(&text))
assert!(!create_stop_regex(&["\n\n", "\n\n "]).is_match(&text));
assert!(create_stop_regex(&["\n\n", "\n\n ", "\nvoid"]).is_match(&text));
}
}

0 comments on commit fd2a1ab

Please sign in to comment.