1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42
|
// This file is autogenerated, DO NOT EDIT
// analysis/tokenfilters/keep-words-tokenfilter.asciidoc:118
[source, python]
----
resp = client.indices.create(
index="keep_words_example",
settings={
"analysis": {
"analyzer": {
"standard_keep_word_array": {
"tokenizer": "standard",
"filter": [
"keep_word_array"
]
},
"standard_keep_word_file": {
"tokenizer": "standard",
"filter": [
"keep_word_file"
]
}
},
"filter": {
"keep_word_array": {
"type": "keep",
"keep_words": [
"one",
"two",
"three"
]
},
"keep_word_file": {
"type": "keep",
"keep_words_path": "analysis/example_word_list.txt"
}
}
}
},
)
print(resp)
----
|