Elasticlunr.Tokenizer (Elasticlunr v0.1.1) View Source

Link to this section Summary

Link to this section Functions

Link to this function

tokenize(str, separator \\ %{__struct__: Regex, opts: "", re_pattern: {:re_pattern, 0, 0, 0, <<69, 82, 67, 80, 105, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 131, 0, 37, 110, 0, 62, 0, 0, 1, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 107, 120, 0, 37, 0>>}, re_version: {"8.44 2020-02-12", :little}, source: "[\\s\\-]+"})

View Source

Specs

tokenize(binary() | number(), Regex.t()) :: [Elasticlunr.Token.t()]