enable 3 char words to finegrind tokenize (#2210)
### What problem does this PR solve? ### Type of change - [x] Performance Improvement
This commit is contained in:
parent
21179a9be9
commit
6d232f1bdb
1 changed files with 1 additions and 1 deletions
|
|
@ -83,7 +83,7 @@ class EsQueryer:
|
|||
), tks
|
||||
|
||||
def need_fine_grained_tokenize(tk):
|
||||
if len(tk) < 4:
|
||||
if len(tk) < 3:
|
||||
return False
|
||||
if re.match(r"[0-9a-z\.\+#_\*-]+$", tk):
|
||||
return False
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue