Skip to content

Commit

Permalink
Refine synonym query. (#3855)
Browse files Browse the repository at this point in the history
### What problem does this PR solve?

### Type of change

- [x] Performance Improvement
  • Loading branch information
KevinHuSh authored Dec 4, 2024
1 parent 1b58960 commit 1b817a5
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 10 deletions.
10 changes: 9 additions & 1 deletion conf/mapping.json
Original file line number Diff line number Diff line change
Expand Up @@ -140,13 +140,21 @@
}
},
{
"string": {
"rank_feature": {
"match": "*_fea",
"mapping": {
"type": "rank_feature"
}
}
},
{
"rank_features": {
"match": "*_feas",
"mapping": {
"type": "rank_features"
}
}
},
{
"dense_vector": {
"match": "*_512_vec",
Expand Down
25 changes: 16 additions & 9 deletions rag/nlp/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def need_fine_grained_tokenize(tk):
keywords.append(tt)
twts = self.tw.weights([tt])
syns = self.syn.lookup(tt)
if syns: keywords.extend(syns)
if syns and len(keywords) < 32: keywords.extend(syns)
logging.debug(json.dumps(twts, ensure_ascii=False))
tms = []
for tk, w in sorted(twts, key=lambda x: x[1] * -1):
Expand All @@ -140,17 +140,24 @@ def need_fine_grained_tokenize(tk):
sm = [FulltextQueryer.subSpecialChar(m) for m in sm if len(m) > 1]
sm = [m for m in sm if len(m) > 1]

keywords.append(re.sub(r"[ \\\"']+", "", tk))
keywords.extend(sm)
if len(keywords) >= 12:
break
if len(keywords) < 32:
keywords.append(re.sub(r"[ \\\"']+", "", tk))
keywords.extend(sm)

tk_syns = self.syn.lookup(tk)
tk_syns = [FulltextQueryer.subSpecialChar(s) for s in tk_syns]
if len(keywords) < 32: keywords.extend([s for s in tk_syns if s])
tk_syns = [rag_tokenizer.fine_grained_tokenize(s) for s in tk_syns if s]
tk_syns = [f"\"{s}\"" if s.find(" ")>0 else s for s in tk_syns]

if len(keywords) >= 32:
break

tk = FulltextQueryer.subSpecialChar(tk)
if tk.find(" ") > 0:
tk = '"%s"' % tk
if tk_syns:
tk = f"({tk} %s)" % " ".join(tk_syns)
tk = f"({tk} OR (%s)^0.2)" % " ".join(tk_syns)
if sm:
tk = f'{tk} OR "%s" OR ("%s"~2)^0.5' % (" ".join(sm), " ".join(sm))
if tk.strip():
Expand All @@ -159,14 +166,14 @@ def need_fine_grained_tokenize(tk):
tms = " ".join([f"({t})^{w}" for t, w in tms])

if len(twts) > 1:
tms += ' ("%s"~4)^1.5' % (" ".join([t for t, _ in twts]))
tms += ' ("%s"~2)^1.5' % rag_tokenizer.tokenize(tt)
if re.match(r"[0-9a-z ]+$", tt):
tms = f'("{tt}" OR "%s")' % rag_tokenizer.tokenize(tt)

syns = " OR ".join(
[
'"%s"^0.7'
% FulltextQueryer.subSpecialChar(rag_tokenizer.tokenize(s))
'"%s"'
% rag_tokenizer.tokenize(FulltextQueryer.subSpecialChar(s))
for s in syns
]
)
Expand Down

0 comments on commit 1b817a5

Please sign in to comment.