Elasticsearch nest不能找到过滤器
本文关键字:过滤器 不能 nest Elasticsearch | 更新日期: 2023-09-27 18:06:45
所以我试图配置我的索引有一定的映射和过滤器,但每当我尝试创建索引我得到以下错误:
"[amgindex] failed to create index]; nested: IllegalArgumentException[Custom Analyzer [amgsearch] failed to find filter under name [synonym]"
这是我用来创建索引
的代码public void newIndex() {
var amgBasic = new CustomAnalyzer {
Tokenizer = "edgeNGram",
Filter = new string[] { "lowercase", "worddelimiter", "stemmerEng", "stemmerNl", "stopper", "snowball" } //
};
var amgBasicText = new CustomAnalyzer {
Tokenizer = "standard",
Filter = new string[] { "lowercase", "worddelimiter" }
};
var amgSearch = new CustomAnalyzer {
Tokenizer = "whitespace",
Filter = new string[] { "lowercase", "synonym" }
};
var synonmyfilter = new SynonymTokenFilter() {
Format = "Solr",
SynonymsPath = "analysis/synonym.txt"
};
try {
var result = client.CreateIndex("amgindex", i => i
.Analysis(descriptor => descriptor
.Analyzers(bases => bases
//.Add("amgBasic", amgBasic)
//.Add("amgBasicText", amgBasicText)
.Add("amgsearch", amgSearch)
)
.TokenFilters(c => c.Add("stemmereng", new StemmerTokenFilter() { Language = "english" }))
.TokenFilters(c => c.Add("stemmernl", new StemmerTokenFilter() { Language = "english" }))
.TokenFilters(c => c.Add("stopper", new StopTokenFilter() { Stopwords = new List<string>() { "_english_", "_dutch_" } }))
.TokenFilters(c => c.Add("snowball", new SnowballTokenFilter() { Language = "english" }))
.TokenFilters(c => c.Add("worddelimiter ", new WordDelimiterTokenFilter() { }))
.TokenFilters(c => c.Add("synonym ", synonmyfilter))
)
.AddMapping<general_document>(m => m
.Properties(o => o
.String(p => p.Name(x => x.object_name).IndexAnalyzer("amgSearch"))
.String(p => p.Name(x => x.title).IndexAnalyzer("amgSearch"))
.String(p => p.Name(x => x.Text).IndexAnalyzer("amgSearch"))
)
)
);
Log.Info("Index created? " + result.Acknowledged);
} catch (Exception ex) {
Log.Error("[index-creation] " + ex.Message);
throw;
}
}
每次我使用我自己的过滤器时,错误弹出。知道为什么会这样吗?
在花了很多时间之后,我找到了它!:)
在2个过滤器的名称中有一个空格,这就是导致这个问题的原因;
▼
.TokenFilters(c => c.Add("worddelimiter ", new WordDelimiterTokenFilter() { }))
▼
.TokenFilters(c => c.Add("synonym ", synonmyfilter))