diff --git a/lib/tokenizer.js b/lib/tokenizer.js index 13dd26ae..78ff571a 100644 --- a/lib/tokenizer.js +++ b/lib/tokenizer.js @@ -28,9 +28,17 @@ elasticlunr.tokenizer = function (str) { return true; }); - return arr.map(function (t) { + arr = arr.map(function (t) { return elasticlunr.utils.toString(t).toLowerCase(); }); + + var out = []; + arr.forEach(function(item) { + var tokens = item.split(elasticlunr.tokenizer.seperator); + out = out.concat(tokens); + }, this); + + return out; } return str.toString().trim().toLowerCase().split(elasticlunr.tokenizer.seperator); diff --git a/test/tokenizer_test.js b/test/tokenizer_test.js index 7b4a76b1..bb8b7ac2 100644 --- a/test/tokenizer_test.js +++ b/test/tokenizer_test.js @@ -117,3 +117,27 @@ test("test get seperator function", function () { elasticlunr.tokenizer.setSeperator(sep2); deepEqual(elasticlunr.tokenizer.getSeperator(), sep2); }); + +test("tokenize array", function () { + var str = ['hello world', 'glad to see you']; + var tokens = elasticlunr.tokenizer(str); + deepEqual(tokens, ['hello', 'world', 'glad', 'to', 'see', 'you']); +}); + +test("tokenize array 2", function () { + var str = ['helloworld', 'glad to see you']; + var tokens = elasticlunr.tokenizer(str); + deepEqual(tokens, ['helloworld', 'glad', 'to', 'see', 'you']); +}); + +test("tokenize array", function () { + var str = ['helloworld', null, undefined, 'glad to see you']; + var tokens = elasticlunr.tokenizer(str); + deepEqual(tokens, ['helloworld', 'glad', 'to', 'see', 'you']); +}); + +test("tokenize array", function () { + var str = ['helloworld', 'glad to see you', 'hyper-parameters']; + var tokens = elasticlunr.tokenizer(str); + deepEqual(tokens, ['helloworld', 'glad', 'to', 'see', 'you', 'hyper', 'parameters']); +});