POST _analyze
{
"tokenizer": "keyword",
"char_filter": ["html_strip"],
"text": "<b>hello world</b>"
}
//使用char filter进行替换
POST _analyze
{
"tokenizer": "standard",
"char_filter": [{
"type":"mapping",
"mappings":["- => _"]
}],
"text": "123-456,I-test! test-990 650-555-1234"
}
//使用char filter 替换表情符号
POST _analyze
{
"tokenizer": "standard",
"char_filter": [
{
"type":"mapping",
"mappings":[":) => happy",":( => sad"]
}
],
"text": ["I am felling :)","Felling :( today"]
}
//正则表达式
GET _anal