Skip to content

Commit

Permalink
feat: dedup against existing snippets for retrieval augmented code (#582
Browse files Browse the repository at this point in the history
)

completion.
  • Loading branch information
wsxiaoys authored Oct 17, 2023
1 parent 99d1bf3 commit f9eb052
Showing 1 changed file with 9 additions and 3 deletions.
12 changes: 9 additions & 3 deletions crates/tabby/src/serve/completions/prompt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ fn build_prefix(language: &str, prefix: &str, snippets: &[Snippet]) -> String {

fn collect_snippets(index_server: &IndexServer, language: &str, text: &str) -> Vec<Snippet> {
let mut ret = Vec::new();
let tokens = tokenize_text(text);
let mut tokens = Box::new(tokenize_text(text));

let sanitized_text = tokens.join(" ");
let sanitized_text = sanitized_text.trim();
Expand All @@ -131,7 +131,7 @@ fn collect_snippets(index_server: &IndexServer, language: &str, text: &str) -> V
let mut count_characters = 0;
for hit in serp.hits {
let body = hit.doc.body;
let body_tokens = tokenize_text(&body);
let mut body_tokens = tokenize_text(&body);

if count_characters + body.len() > MAX_SNIPPET_CHARS_IN_PROMPT {
break;
Expand All @@ -151,6 +151,11 @@ fn collect_snippets(index_server: &IndexServer, language: &str, text: &str) -> V
continue;
}

// Prepend body tokens and update tokens, so future similarity calculation will consider
// added snippets.
body_tokens.append(&mut tokens);
*tokens = body_tokens;

count_characters += body.len();
ret.push(Snippet {
filepath: hit.doc.filepath,
Expand All @@ -166,10 +171,11 @@ lazy_static! {
static ref TOKENIZER: Regex = Regex::new(r"[^\w]").unwrap();
}

fn tokenize_text(text: &str) -> Vec<&str> {
fn tokenize_text(text: &str) -> Vec<String> {
TOKENIZER
.split(text)
.filter(|s| *s != "AND" && *s != "OR" && *s != "NOT" && !s.is_empty())
.map(|x| x.to_owned())
.collect()
}

Expand Down

0 comments on commit f9eb052

Please sign in to comment.