在弹性搜索中创建索引时出错

问题描述 投票:0回答:1

我是弹性搜索的新手,我正在尝试使用下面的映射创建索引,该映射是我在网上找到的,并使用kibana作为我的客户端,它抛出错误为。

PUT /local_test
    {
      "settings": {
        "index.mapping.total_fields.limit": 1000,
        "index.mapping.depth.limit": 20,
        "index.mapping.nested_fields.limit": 50,
        "number_of_shards": 5,
        "number_of_replicas": 1,
        "analysis": {
          "analyzer": {
            "edge_ngram_analyzer": {
              "type": "custom",
              "tokenizer": "edge_ngram_tokenizer",
              "filter": [
                "lowercase",
                "en_stopwords"
              ]
            },
            "standard_custom": {
              "type": "custom",
              "char_filter": [
                "punctuation_remap"
              ],
              "tokenizer": "standard",
              "filter": [
                "lowercase",
                "en_stopwords"
              ]
            },
            "lowercase_keyword": {
              "type": "custom",
              "tokenizer": "keyword",
              "filter": [
                "lowercase"
              ]
            }
          },
          "tokenizer": {
            "edge_ngram_tokenizer": {
              "type": "edge_ngram",
              "min_gram": 2,
              "max_gram": 50,
              "token_chars": [
                "letter",
                "digit"
              ]
            }
          },
          "filter": {
            "en_stopwords": {
              "type": "stop",
              "stopwords": "_english_"
            }
          },
          "char_filter": {
            "punctuation_remap": {
              "type": "mapping",
              "mappings": [
                ". => -",
                ": => -",
                "' => -"
              ]
            }
          }
        }
      },
      "mappings": {
        "local_test": {
          "_all": {
            "enabled": false
          },
          "properties": {
            "id": {
              "type": "keyword"
            },
            "user_id": {
              "type": "keyword"
            },
            "created_at": {
              "type": "date",
              "format": "yyyy-MM-dd HH:mm:ss||epoch_millis"
            },
            "docvalue": {
              "type": "object",
              "dynamic": false,
              "enabled": true,
              "properties": {
                "key": {
                  "type": "text",
                  "analyzer": "lowercase_keyword"
                },
                "value": {
                  "type": "text",
                  "analyzer": "lowercase_keyword"
                }
              }
            },
            "recurring": {
              "type": "boolean"
            },
            "amount": {
              "type": "long"
            }
          }
        }
      }
    }

“ type”:“ mapper_parsing_exception”,“ reason”:“根映射定义具有不受支持的参数:[local_test:{_all = {enabled = false},properties = {amount = {type = long},user_id = {type = keyword},recurring = {type = boolean} ,created_at = {format = yyyy-MM-dd HH:mm:ss || epoch_millis,type = date},id = {type = keyword},docvalue = {dynamic = false,type = object,enabled = true,properties = {value = {analyzer = lowercase_keyword,type = text},key = {analyzer = lowercase_keyword,type = text}}}}}]]“]”

elasticsearch search lucene
1个回答
0
投票

以下是您的请求中的两个问题,我假设您使用的是最新的主要版本,即7.X。

  1. 您需要删除在最新版本中删除的_all。参见this official blog on this change
  2. 删除类型local_test,因为在最新版本中也会删除类型。有关更多信息,请参见the removal of types

因此使用下面的请求可以正常工作:

输入您的索引名称

{
    "settings": {
        "index.mapping.total_fields.limit": 1000,
        "index.mapping.depth.limit": 20,
        "index.mapping.nested_fields.limit": 50,
        "number_of_shards": 5,
        "number_of_replicas": 1,
        "analysis": {
            "analyzer": {
                "edge_ngram_analyzer": {
                    "type": "custom",
                    "tokenizer": "edge_ngram_tokenizer",
                    "filter": [
                        "lowercase",
                        "en_stopwords"
                    ]
                },
                "standard_custom": {
                    "type": "custom",
                    "char_filter": [
                        "punctuation_remap"
                    ],
                    "tokenizer": "standard",
                    "filter": [
                        "lowercase",
                        "en_stopwords"
                    ]
                },
                "lowercase_keyword": {
                    "type": "custom",
                    "tokenizer": "keyword",
                    "filter": [
                        "lowercase"
                    ]
                }
            },
            "tokenizer": {
                "edge_ngram_tokenizer": {
                    "type": "edge_ngram",
                    "min_gram": 2,
                    "max_gram": 50,
                    "token_chars": [
                        "letter",
                        "digit"
                    ]
                }
            },
            "filter": {
                "en_stopwords": {
                    "type": "stop",
                    "stopwords": "_english_"
                }
            },
            "char_filter": {
                "punctuation_remap": {
                    "type": "mapping",
                    "mappings": [
                        ". => -",
                        ": => -",
                        "' => -"
                    ]
                }
            }
        }
    },
    "mappings": {
        "properties": {
            "id": {
                "type": "keyword"
            },
            "user_id": {
                "type": "keyword"
            },
            "created_at": {
                "type": "date",
                "format": "yyyy-MM-dd HH:mm:ss||epoch_millis"
            },
            "docvalue": {
                "type": "object",
                "dynamic": false,
                "enabled": true,
                "properties": {
                    "key": {
                        "type": "text",
                        "analyzer": "lowercase_keyword"
                    },
                    "value": {
                        "type": "text",
                        "analyzer": "lowercase_keyword"
                    }
                }
            },
            "recurring": {
                "type": "boolean"
            },
            "amount": {
                "type": "long"
            }
        }
    }
}

输出

{
    "acknowledged": true,
    "shards_acknowledged": true,
    "index": "playlistmy"
}
© www.soinside.com 2019 - 2024. All rights reserved.