1 "Basic test": 2 - do: 3 indices.analyze: 4 body: 5 text: Foo Bar 6 - length: { tokens: 2 } 7 - match: { tokens.0.token: foo } 8 - match: { tokens.1.token: bar } 9 10 --- 11 "Index and field": 12 - do: 13 indices.create: 14 index: test 15 body: 16 mappings: 17 test: 18 properties: 19 text: 20 type: text 21 analyzer: standard 22 23 - do: 24 indices.analyze: 25 index: test 26 body: 27 field: text 28 text: Foo Bar! 29 - length: { tokens: 2 } 30 - match: { tokens.0.token: foo } 31 - match: { tokens.1.token: bar } 32 33 --- 34 "Array text": 35 - do: 36 indices.analyze: 37 body: 38 text: ["Foo Bar", "Baz"] 39 tokenizer: standard 40 - length: { tokens: 3 } 41 - match: { tokens.0.token: Foo } 42 - match: { tokens.1.token: Bar } 43 - match: { tokens.2.token: Baz } 44 45 --- 46 "Detail response with Analyzer": 47 - do: 48 indices.analyze: 49 body: 50 text: This is troubled 51 analyzer: standard 52 explain: true 53 - length: { detail.analyzer.tokens: 3 } 54 - match: { detail.analyzer.name: standard } 55 - match: { detail.analyzer.tokens.0.token: this } 56 - match: { detail.analyzer.tokens.1.token: is } 57 - match: { detail.analyzer.tokens.2.token: troubled } 58 59 --- 60 "Custom filter in request": 61 - skip: 62 version: " - 5.99.99" 63 reason: token filter name changed in 6.0, so this needs to be skipped on mixed clusters 64 - do: 65 indices.analyze: 66 body: 67 text: foo bar buzz 68 tokenizer: standard 69 explain: true 70 filter: 71 - type: stop 72 stopwords: ["foo", "buzz"] 73 - length: { detail.tokenizer.tokens: 3 } 74 - length: { detail.tokenfilters.0.tokens: 1 } 75 - match: { detail.tokenizer.name: standard } 76 - match: { detail.tokenizer.tokens.0.token: foo } 77 - match: { detail.tokenizer.tokens.1.token: bar } 78 - match: { detail.tokenizer.tokens.2.token: buzz } 79 - match: { detail.tokenfilters.0.name: "_anonymous_tokenfilter" } 80 - match: { detail.tokenfilters.0.tokens.0.token: bar }