GET _analyze { "tokenizer" : "standard", "text" : "this is a test 13544478956" }
GET _analyze { "tokenizer" : "standard", "filter": [{"type": "length", "min":1, "max":3 }], "text" : "this is a test" }
POST _analyze { "char_filter": [], "tokenizer": "standard", "filter": [ "stop" ], "text": "Eating an apple a day keeps doctor away" }