Elasticsearch: Analyzer Tokenizer Extended Words Stop Words
# tokenizer
put /orders
{
"settings":{},
"mappings":{
"properties":{
"title":{
"type":"text",
"analyzer":"standard"
}
}
}
}
put /orders/_doc/1
{
"title":"分大, this is a good MAN"
}
get _cat/indices?v
get /orders/_search
{
"query":{
"term":{
"title":{
"value": ""
}
}
}
}
post /_analyze
{
"analyzer":"ik_max_word",
"text": "百知教育存在一名非常优秀的老师,他的名字叫陈艳男"
}
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE properties SYSTEM "http://java.sun.com/dtd/properties.dtd">
<properties>
<comment>IK Analyzer 扩展配置</comment>
<!--用户可以在这里配置自己的扩展字典 -->
<!-- <entry key="ext_dict"></entry> -->
<entry key="extra_main">extra_main.dic</entry>
<entry key="ext_dict">ext_dict.dic</entry>
<!--用户可以在这里配置自己的扩展停止词字典-->
<!-- <entry key="ext_stopwords"></entry> -->
<entry key="ext_stopwords">ext_stopwords.dic</entry>
<!--用户可以在这里配置远程扩展字典 -->
<!-- <entry key="remote_ext_dict">words_location</entry> -->
<!--用户可以在这里配置远程扩展停止词字典-->
<!-- <entry key="remote_ext_stopwords">words_location</entry> -->
</properties>