Reputation: 1
I am trying to initialize settings and mappings via shell script in docker.
#!/bin/sh
until $(curl --output /dev/null --silent --head --fail http://elasticsearch:9200); do
printf '.'
sleep 5
done
for i in {30..0}; do
if curl elasticsearch:9200; then
curl -XPUT 'elasticsearch:9200/x_product/' -H 'Content-Type: application/json' -d '{
"settings": {
"number_of_shards": 1,
"analysis": {
"filter": {
"ngram_filter": {
"type": "nGram",
"min_gram": 2,
"max_gram": 3
}
},
"analyzer": {
"ngram_analyzer": {
"type": "custom",
"tokenizer": "my_tokenizer",
"filter": ["lowercase", "ngram_filter"]
}
}
}
},
"mappings" : {
"product": {
"name": {
"type": "string",
"include_in_all": true,
"term_vector": "yes",
"index_analyzer": "ngram_analyzer",
"search_analyzer": "standard"
},
"description_value": {
"type": "string",
"include_in_all": true,
"term_vector": "yes",
"index_analyzer": "ngram_analyzer",
"search_analyzer": "standard"
},
"barcode_value": {
"type": "string",
"include_in_all": true,
"term_vector": "yes",
"index_analyzer": "ngram_analyzer",
"search_analyzer": "standard"
},
"searchword_content": {
"type": "string",
"include_in_all": true,
"term_vector": "yes",
"index_analyzer": "ngram_analyzer",
"search_analyzer": "standard"
},
"discount": {
"type": "integer"
},
"datetime": {
"type": "date",
"format": "epoch_millis"
}
}
}
}';
break;
fi
sleep 2
done
However this syntax give me the error:
{"error":{"root_cause":[{"type":"parse_exception","reason":"Failed to parse content to map"}],"type":"parse_exception","reason":"Failed to parse content to map","caused_by":{"type":"json_parse_exception","reason":"Unexpected character ('}' (code 125)): was expecting double-quote to start field name\n at [Source: org.elasticsearch.transport.netty4.ByteBufStreamInput@770de4a9; line: 19, column: 22]"}},"status":400}
What was wrong with my syntax?
Updated: 1. Update my syntax change "lowercase to "lowercase" also type to "type", however got another error 2. Remove extra comma at analyzer
Upvotes: 0
Views: 299
Reputation: 1
Beside the syntax problem there is also problem in json fields for elastic search, so I change some fields and value, and also separate the settings and mappings to make it easier to read.
curl -XPUT 'elasticsearch:9200/x_product/_settings' -H 'Content-Type: application/json' -d '{
"analysis": {
"filter": {
"ngram_filter": {
"type": "nGram",
"min_gram": 2,
"max_gram": 3
}
},
"analyzer": {
"ngram_analyzer": {
"type": "custom",
"tokenizer": "standard",
"filter": ["lowercase", "ngram_filter"]
}
}
}
}';
curl -XPUT 'elasticsearch:9200/x_product/_mapping/product' -H 'Content-Type: application/json' -d '{
"properties": {
"name": {
"type": "text",
"term_vector": "yes",
"analyzer": "ngram_analyzer",
"search_analyzer": "standard"
},
"description_value": {
"type": "text",
"term_vector": "yes",
"analyzer": "ngram_analyzer",
"search_analyzer": "standard"
},
"barcode_value": {
"type": "text",
"term_vector": "yes",
"analyzer": "ngram_analyzer",
"search_analyzer": "standard"
},
"searchword_content": {
"type": "text",
"term_vector": "yes",
"analyzer": "ngram_analyzer",
"search_analyzer": "standard"
},
"discount": {
"type": "integer"
},
"datetime": {
"type": "date",
"format": "epoch_millis"
}
}
}';
Upvotes: 0