mirror of
https://github.com/nextapps-de/flexsearch.git
synced 2025-09-02 18:33:17 +02:00
(re)adding tests, migration
This commit is contained in:
19
test/.c8rc.json
Normal file
19
test/.c8rc.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"reporter": "text",
|
||||
"reports-dir": "./test/report/",
|
||||
"branches": 80,
|
||||
"functions": 80,
|
||||
"lines": 80,
|
||||
"statements": 80,
|
||||
"exclude": [
|
||||
"src/compress.js",
|
||||
"src/db/indexeddb/**"
|
||||
],
|
||||
"src": [
|
||||
"src"
|
||||
],
|
||||
"include": [
|
||||
"test/*.js",
|
||||
"src/**"
|
||||
]
|
||||
}
|
97
test/async.js
Normal file
97
test/async.js
Normal file
@@ -0,0 +1,97 @@
|
||||
global.self = global;
|
||||
const env = process.argv[3];
|
||||
import { expect } from "chai";
|
||||
console.log("--RELEASE-------------");
|
||||
console.log(env ? "dist/" + env + ".js" : "src/bundle.js")
|
||||
console.log("----------------------");
|
||||
let FlexSearch = await import(env ? "../dist/" + env + ".js" : "../src/bundle.js");
|
||||
if(FlexSearch.default) FlexSearch = FlexSearch.default;
|
||||
if(FlexSearch.FlexSearch) FlexSearch = FlexSearch.FlexSearch;
|
||||
const { Index, Document, Worker, Charset: _Charset, Encoder, Resolver } = FlexSearch;
|
||||
const build_light = env && env.includes(".light");
|
||||
const build_compact = env && env.includes(".compact");
|
||||
const build_esm = !env || env.startsWith("module");
|
||||
const Charset = _Charset || (await import("../src/charset.js")).default;
|
||||
|
||||
if(!build_light) describe("Add (Async)", function(){
|
||||
|
||||
it("Should have been added asynchronously to the index", async function(){
|
||||
|
||||
const index = new Index(/*{ priority: 4 }*/);
|
||||
let duration = 0;
|
||||
let time = Date.now();
|
||||
|
||||
setTimeout(function(){
|
||||
duration = Date.now() - time;
|
||||
});
|
||||
|
||||
for(let i = 0; i < 1000; i++){
|
||||
await index.addAsync(i, "foo");
|
||||
if(duration) break;
|
||||
}
|
||||
|
||||
expect(duration).to.equal(0);
|
||||
|
||||
for(let i = 0; i < 999999999; i++){
|
||||
await index.addAsync(i, "foo");
|
||||
if(duration){
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
expect(duration).to.closeTo(50, 5);
|
||||
});
|
||||
|
||||
it("Should have been added asynchronously to the index (priority: 1)", async function(){
|
||||
|
||||
const index = new Index({ priority: 1 });
|
||||
let duration = 0;
|
||||
let time = Date.now();
|
||||
|
||||
setTimeout(function(){
|
||||
duration = Date.now() - time;
|
||||
});
|
||||
|
||||
for(let i = 0; i < 1000; i++){
|
||||
await index.addAsync(i, "foo");
|
||||
if(duration) break;
|
||||
}
|
||||
|
||||
expect(duration).to.equal(0);
|
||||
|
||||
for(let i = 0; i < 999999999; i++){
|
||||
await index.addAsync(i, "foo");
|
||||
if(duration){
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
expect(duration).to.closeTo(4, 2);
|
||||
});
|
||||
|
||||
it("Should have been added asynchronously to the index (priority: 9)", async function(){
|
||||
|
||||
const index = new Index({ priority: 9 });
|
||||
let duration = 0;
|
||||
let time = Date.now();
|
||||
|
||||
setTimeout(function(){
|
||||
duration = Date.now() - time;
|
||||
});
|
||||
|
||||
for(let i = 0; i < 1000; i++){
|
||||
await index.addAsync(i, "foo");
|
||||
if(duration) break;
|
||||
}
|
||||
|
||||
expect(duration).to.equal(0);
|
||||
|
||||
for(let i = 0; i < 999999999; i++){
|
||||
await index.addAsync(i, "foo");
|
||||
if(duration){
|
||||
break;
|
||||
}
|
||||
}
|
||||
expect(duration).to.closeTo(250, 25);
|
||||
});
|
||||
});
|
958
test/basic.js
Normal file
958
test/basic.js
Normal file
@@ -0,0 +1,958 @@
|
||||
global.self = global;
|
||||
const env = process.argv[3];
|
||||
import { expect } from "chai";
|
||||
// console.log("--------------");
|
||||
// console.log(env ? "dist/" + env + ".js" : "src/bundle.js")
|
||||
let FlexSearch = await import(env ? "../dist/" + env + ".js" : "../src/bundle.js");
|
||||
if(FlexSearch.default) FlexSearch = FlexSearch.default;
|
||||
if(FlexSearch.FlexSearch) FlexSearch = FlexSearch.FlexSearch;
|
||||
const { Index, Document, Worker, Charset: _Charset, Encoder, Resolver } = FlexSearch;
|
||||
const build_light = env && env.includes(".light");
|
||||
const build_compact = env && env.includes(".compact");
|
||||
const build_esm = !env || env.startsWith("module");
|
||||
const Charset = _Charset || (await import("../src/charset.js")).default;
|
||||
|
||||
// global.FlexSearch = { Index, Document, Worker, Charset, Encoder, Resolver };
|
||||
// global.build = { build_light, build_compact, build_esm };
|
||||
|
||||
describe("Initialize", function(){
|
||||
|
||||
const index = new Index();
|
||||
|
||||
it("Should have proper constructor", function(){
|
||||
|
||||
expect(index).to.be.an.instanceOf(Index);
|
||||
});
|
||||
|
||||
it("Should have all provided methods", function(){
|
||||
|
||||
expect(index).to.respondTo("search");
|
||||
expect(index).to.respondTo("add");
|
||||
expect(index).to.respondTo("append");
|
||||
expect(index).to.respondTo("update");
|
||||
expect(index).to.respondTo("remove");
|
||||
expect(index).to.respondTo("clear");
|
||||
|
||||
expect(index).to.hasOwnProperty("map");
|
||||
expect(index).to.hasOwnProperty("ctx");
|
||||
expect(index).to.hasOwnProperty("reg");
|
||||
|
||||
if(!build_light){
|
||||
expect(index).to.respondTo("searchAsync");
|
||||
expect(index).to.respondTo("addAsync");
|
||||
expect(index).to.respondTo("appendAsync");
|
||||
expect(index).to.respondTo("updateAsync");
|
||||
expect(index).to.respondTo("removeAsync");
|
||||
expect(index).to.respondTo("export");
|
||||
expect(index).to.respondTo("import");
|
||||
expect(index).to.respondTo("serialize");
|
||||
}
|
||||
});
|
||||
|
||||
it("Should have the default options", function(){
|
||||
|
||||
expect(index.resolution).to.equal(9);
|
||||
expect(index.depth).to.equal(0);
|
||||
expect(index.fastupdate).to.equal(false);
|
||||
});
|
||||
|
||||
it("Should have the default Encoder", function(){
|
||||
|
||||
const encoder = new Encoder(Charset.LatinDefault);
|
||||
expect(index.tokenize).to.equal("strict");
|
||||
expect(typeof index.encoder.normalize).to.equal(typeof encoder.normalize);
|
||||
index.encoder.normalize = encoder.normalize;
|
||||
expect(index.encoder).to.eql(encoder);
|
||||
expect(index.encoder.minlength).to.equal(1);
|
||||
expect(index.encoder.maxlength).to.equal(0);
|
||||
expect(index.encoder.rtl).to.equal(false);
|
||||
expect(index.encoder.numeric).to.equal(true);
|
||||
expect(index.encoder.dedupe).to.equal(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Add", function(){
|
||||
|
||||
it("Should have been properly added to the index", function(){
|
||||
|
||||
const index = new Index();
|
||||
|
||||
index.add(0, "foo");
|
||||
index.add(2, "bar");
|
||||
index.add(1, "FooBar");
|
||||
index.add(3, "Some 'short' content.");
|
||||
|
||||
expect(index.reg.keys()).to.have.members([0, 1, 2, 3]);
|
||||
expect(index.map.keys()).to.have.members(["foo", "bar", "foobar", "some", "short", "content"]);
|
||||
expect(index.ctx.size).to.equal(0);
|
||||
expect(index.reg.size).to.equal(4);
|
||||
});
|
||||
|
||||
build_light || it("Should have been numeric content properly added to the index (Triplets)", function(){
|
||||
|
||||
const index = new Index();
|
||||
|
||||
index.add(0, "TEST-123456789123456789");
|
||||
index.add(1, "T10030");
|
||||
index.add(2, "T10030T10030");
|
||||
index.add(3, "1443-AB14345-1778");
|
||||
|
||||
expect(index.reg.keys()).to.have.members([0, 1, 2, 3]);
|
||||
expect(index.map.keys()).to.have.members([
|
||||
"test", "123", "456", "789",
|
||||
"t", "100", "30",
|
||||
// id 2 was already completely added, split: "t", "100", "30", "t", "100", "30"
|
||||
"144", "3", "ab", "143", "45", "177", "8"
|
||||
]);
|
||||
expect(index.ctx.size).to.equal(0);
|
||||
expect(index.reg.size).to.equal(4);
|
||||
});
|
||||
|
||||
it("Should not have been added to the index (Parameter)", function(){
|
||||
|
||||
const index = new Index();
|
||||
|
||||
index.add("foo");
|
||||
index.add(3);
|
||||
index.add(null, "foobar");
|
||||
index.add(void 0, "foobar");
|
||||
index.add(3, null);
|
||||
index.add(3, false);
|
||||
|
||||
expect(index.reg.size).to.equal(0);
|
||||
});
|
||||
|
||||
it("Should not have been added to the index (Empty)", function(){
|
||||
|
||||
const index = new Index();
|
||||
|
||||
index.add(1, "");
|
||||
index.add(2, " ");
|
||||
index.add(3, " ");
|
||||
index.add(4, " - ");
|
||||
index.add(5, ` ...
|
||||
- : ,
|
||||
<-- `);
|
||||
|
||||
expect(index.reg.size).to.equal(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Search (Sync)", function(){
|
||||
|
||||
it("Should have been matched properly", function(){
|
||||
|
||||
const index = new Index();
|
||||
|
||||
index.add(0, "foo");
|
||||
index.add(1, "bar");
|
||||
index.add(2, "FooBar");
|
||||
index.add(3, "Some 'short' content.");
|
||||
index.add(4, "Foo Bar");
|
||||
|
||||
expect(index.search("foo")).to.have.members([0, 4]);
|
||||
expect(index.search("bar")).to.include(1, 4);
|
||||
expect(index.search("foobar")).to.include(2);
|
||||
expect(index.search("short 'content'")).to.include(3);
|
||||
expect(index.search("foo foo")).to.have.members([0, 4]);
|
||||
expect(index.search("foo foo bar foo bar")).to.have.members([4]);
|
||||
});
|
||||
|
||||
it("Should have been applied limit/offset properly", function(){
|
||||
|
||||
const index = new Index();
|
||||
|
||||
for(let i = 0; i < 10; i++){
|
||||
index.add(i, "foo");
|
||||
}
|
||||
|
||||
expect(index.search("foo", 99)).to.have.members([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
|
||||
expect(index.search("foo", 3)).to.have.members([0, 1, 2]);
|
||||
expect(index.search("foo", { limit: 3 })).to.have.members([0, 1, 2]);
|
||||
expect(index.search("foo", { limit: 3, offset: 3 })).to.have.members([3, 4, 5]);
|
||||
expect(index.search("foo", { limit: 3, offset: 9 })).to.have.members([9]);
|
||||
expect(index.search("foo", { limit: 3, offset: 10 })).to.have.members([]);
|
||||
expect(index.search({ query: "foo", limit: 1 })).to.include(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Search Scoring", function(){
|
||||
|
||||
it("Should have been matched properly", function(){
|
||||
|
||||
const index = new Index();
|
||||
[ 'cats abcd efgh ijkl mnop qrst uvwx cute',
|
||||
'cats abcd efgh ijkl mnop dogs cute',
|
||||
'cats abcd efgh ijkl mnop cute',
|
||||
'cats abcd efgh ijkl cute',
|
||||
'cats abcd efgh cute',
|
||||
'cats abcd cute',
|
||||
'cats cute'
|
||||
].forEach((item, id) => {
|
||||
index.add(id, item);
|
||||
});
|
||||
|
||||
let result = index.search("cats cute");
|
||||
expect(result.length).to.equal(7);
|
||||
expect(result).to.eql([6, 5, 4, 3, 2, 1, 0]);
|
||||
|
||||
result = index.search("cute cats");
|
||||
expect(result.length).to.equal(7);
|
||||
expect(result).to.eql([6, 5, 4, 3, 2, 1, 0]);
|
||||
|
||||
result = index.search("cute dogs cats");
|
||||
expect(result.length).to.equal(1);
|
||||
expect(result).to.eql([1]);
|
||||
|
||||
result = index.search("cute cat");
|
||||
expect(result.length).to.equal(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Tokenizer", function(){
|
||||
|
||||
it("Should have been \"forward\" tokenized properly", function(){
|
||||
|
||||
const index = new Index({ tokenize: "forward" });
|
||||
[ 'cats abcd efgh ijkl mnop qrst uvwx cute',
|
||||
'cats abcd efgh ijkl mnop dogs cute',
|
||||
'cats abcd efgh ijkl mnop cute',
|
||||
'cats abcd efgh ijkl cute',
|
||||
'cats abcd efgh cute',
|
||||
'cats abcd cute',
|
||||
'cats cute'
|
||||
].forEach((item, id) => {
|
||||
index.add(id, item);
|
||||
});
|
||||
|
||||
let result = index.search("cat cute");
|
||||
expect(result.length).to.equal(7);
|
||||
expect(result).to.eql([6, 5, 4, 3, 2, 1, 0]);
|
||||
|
||||
result = index.search("cute cat");
|
||||
expect(result.length).to.equal(7);
|
||||
expect(result).to.eql([6, 5, 4, 3, 2, 1, 0]);
|
||||
});
|
||||
|
||||
it("Should have been \"reverse\" tokenized properly", function(){
|
||||
|
||||
const index = new Index({ tokenize: "reverse", resolution: 12 });
|
||||
[ 'cats abcd efgh ijkl mnop qrst uvwx cute',
|
||||
'cats abcd efgh ijkl mnop dogs cute',
|
||||
'cats abcd efgh ijkl mnop cute',
|
||||
'cats abcd efgh ijkl cute',
|
||||
'cats abcd efgh cute',
|
||||
'cats abcd cute',
|
||||
'cats cute'
|
||||
].forEach((item, id) => {
|
||||
index.add(id, item);
|
||||
});
|
||||
|
||||
let result = index.search("ats ute");
|
||||
expect(result.length).to.equal(7);
|
||||
expect(result).to.eql([6, 5, 4, 3, 2, 1, 0]);
|
||||
|
||||
result = index.search("ute ats");
|
||||
expect(result.length).to.equal(7);
|
||||
expect(result).to.eql([6, 5, 4, 3, 2, 1, 0]);
|
||||
});
|
||||
|
||||
it("Should have been \"full\" tokenized properly", function(){
|
||||
|
||||
const index = new Index({ tokenize: "full", resolution: 12 });
|
||||
[ 'cats abcd efgh ijkl mnop qrst uvwx cute',
|
||||
'cats abcd efgh ijkl mnop dogs cute',
|
||||
'cats abcd efgh ijkl mnop cute',
|
||||
'cats abcd efgh ijkl cute',
|
||||
'cats abcd efgh cute',
|
||||
'cats abcd cute',
|
||||
'cats cute'
|
||||
].forEach((item, id) => {
|
||||
index.add(id, item);
|
||||
});
|
||||
|
||||
let result = index.search("at ut");
|
||||
expect(result.length).to.equal(7);
|
||||
expect(result).to.eql([6, 5, 4, 3, 2, 1, 0]);
|
||||
|
||||
result = index.search("ut at");
|
||||
expect(result.length).to.equal(7);
|
||||
expect(result).to.eql([6, 5, 4, 3, 2, 1, 0]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Search: Suggestion", function(){
|
||||
|
||||
it("Should have been provide suggestions properly", function(){
|
||||
|
||||
const index = new Index({ tokenize: "forward" });
|
||||
[ 'cats abcd efgh ijkl mnop qrst uvwx cute',
|
||||
'cats abcd efgh ijkl mnop dogs cute', // <-- dogs
|
||||
'cats abcd efgh ijkl mnop cute',
|
||||
'cats abcd efgh ijkl cute',
|
||||
'cats abcd efgh cute',
|
||||
'cats abcd cute',
|
||||
'cats cute'
|
||||
].forEach((item, id) => {
|
||||
index.add(id, item);
|
||||
});
|
||||
|
||||
let result = index.search("cute dog or cute cat or nothing", { suggest: true });
|
||||
expect(result.length).to.equal(7);
|
||||
expect(result).to.eql([1, 6, 5, 4, 3, 2, 0]);
|
||||
|
||||
result = index.search("nothing or cute cat or cute dog", { suggest: true });
|
||||
expect(result.length).to.equal(7);
|
||||
expect(result).to.eql([1, 6, 5, 4, 3, 2, 0]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Update (Sync)", function(){
|
||||
|
||||
it("Should have been updated to the index", function(){
|
||||
|
||||
const index = new Index({ tokenize: "full" });
|
||||
index.add(1, "foo");
|
||||
index.add(2, "bar");
|
||||
index.add(3, "foobar");
|
||||
|
||||
index.update(1, "bar");
|
||||
index.update(2, "foobar");
|
||||
index.update(3, "foo");
|
||||
|
||||
expect(index.reg.size).to.equal(3);
|
||||
expect(index.search("foo")).to.have.members([2, 3]);
|
||||
expect(index.search("bar")).to.have.members([1, 2]);
|
||||
expect(index.search("bar")).to.not.include(3);
|
||||
expect(index.search("foobar")).to.have.members([2]);
|
||||
|
||||
index.update(1, "bar");
|
||||
index.update(2, "foobar");
|
||||
index.update(3, "foo");
|
||||
|
||||
expect(index.reg.size).to.equal(3);
|
||||
expect(index.search("foo")).to.have.members([2, 3]);
|
||||
expect(index.search("bar")).to.have.members([1, 2]);
|
||||
expect(index.search("bar")).to.not.include(3);
|
||||
expect(index.search("foobar")).to.have.members([2]);
|
||||
});
|
||||
|
||||
it("Should not have been updated to the index", function(){
|
||||
|
||||
const index = new Index({ tokenize: "full" });
|
||||
index.add(1, "bar");
|
||||
index.add(2, "foobar");
|
||||
index.add(3, "foo");
|
||||
|
||||
index.update("foo");
|
||||
// todo
|
||||
// index.update(1);
|
||||
index.update(null, "foobar");
|
||||
index.update(void 0, "foobar");
|
||||
// index.update(1, null);
|
||||
// index.update(2, false);
|
||||
index.update(4, "new");
|
||||
|
||||
expect(index.reg.size).to.equal(4);
|
||||
expect(index.search("foo")).to.have.members([2, 3]);
|
||||
expect(index.search("bar")).to.have.members([1, 2]);
|
||||
expect(index.search("bar")).to.not.include(3);
|
||||
expect(index.search("foobar")).to.have.members([2]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Remove (Sync)", function(){
|
||||
|
||||
it("Should have been removed from the index", function(){
|
||||
|
||||
const index = new Index({ tokenize: "full" });
|
||||
index.add(1, "bar");
|
||||
index.add(2, "foobar");
|
||||
index.add(3, "foo");
|
||||
|
||||
index.remove(2);
|
||||
index.remove(1);
|
||||
index.remove(3);
|
||||
index.remove(4);
|
||||
|
||||
expect(index.reg.size).to.equal(0);
|
||||
expect(index.search("foo")).to.have.lengthOf(0);
|
||||
expect(index.search("bar")).to.have.lengthOf(0);
|
||||
expect(index.search("foobar")).to.have.lengthOf(0);
|
||||
});
|
||||
});
|
||||
|
||||
// if(env !== "light") describe("Operators", function(){
|
||||
//
|
||||
// var data = [{
|
||||
// id: 2,
|
||||
// title: "Title 3",
|
||||
// body: "Body 3",
|
||||
// blacklist: "x1"
|
||||
// },{
|
||||
// id: 1,
|
||||
// title: "Title 2",
|
||||
// body: "Body 2",
|
||||
// blacklist: "x2"
|
||||
// },{
|
||||
// id: 0,
|
||||
// title: "Title 1",
|
||||
// body: "Body 1",
|
||||
// blacklist: "x3"
|
||||
// }];
|
||||
//
|
||||
// var index = new FlexSearch({
|
||||
// tokenize: "forward",
|
||||
// doc: {
|
||||
// id: "id",
|
||||
// field: ["title", "body", "blacklist"]
|
||||
// }
|
||||
// });
|
||||
//
|
||||
// it("Should have been properly applied logic", function(){
|
||||
//
|
||||
// index.add(data);
|
||||
//
|
||||
// expect(index.search([{
|
||||
// field: "title",
|
||||
// query: "title",
|
||||
// bool: "and"
|
||||
// },{
|
||||
// field: "body",
|
||||
// query: "body",
|
||||
// bool: "and"
|
||||
// },{
|
||||
// field: "blacklist",
|
||||
// query: "xxx",
|
||||
// bool: "not"
|
||||
// }])).to.have.members(data);
|
||||
//
|
||||
// expect(index.search([{
|
||||
// field: "title",
|
||||
// query: "title",
|
||||
// bool: "and"
|
||||
// },{
|
||||
// field: "body",
|
||||
// query: "title",
|
||||
// bool: "and"
|
||||
// },{
|
||||
// field: "blacklist",
|
||||
// query: "xxx",
|
||||
// bool: "not"
|
||||
// }])).to.have.length(0);
|
||||
//
|
||||
// expect(index.search([{
|
||||
// field: "title",
|
||||
// query: "title",
|
||||
// bool: "and"
|
||||
// },{
|
||||
// field: "body",
|
||||
// query: "title",
|
||||
// bool: "or"
|
||||
// },{
|
||||
// field: "blacklist",
|
||||
// query: "xxx",
|
||||
// bool: "not"
|
||||
// }])).to.have.members(data);
|
||||
//
|
||||
// expect(index.search([{
|
||||
// field: "title",
|
||||
// query: "title",
|
||||
// bool: "and"
|
||||
// },{
|
||||
// field: "body",
|
||||
// query: "title",
|
||||
// bool: "or"
|
||||
// }])).to.have.members(data);
|
||||
//
|
||||
// expect(index.search([{
|
||||
// field: "title",
|
||||
// query: "title",
|
||||
// bool: "and"
|
||||
// },{
|
||||
// field: "body",
|
||||
// query: "title",
|
||||
// bool: "or"
|
||||
// },{
|
||||
// field: "blacklist",
|
||||
// query: "x1",
|
||||
// bool: "not"
|
||||
// }])).to.have.members([data[1], data[2]]);
|
||||
//
|
||||
// expect(index.search([{
|
||||
// field: "title",
|
||||
// query: "body",
|
||||
// bool: "or"
|
||||
// },{
|
||||
// field: "body",
|
||||
// query: "title",
|
||||
// bool: "or"
|
||||
// },{
|
||||
// field: "blacklist",
|
||||
// query: "x1",
|
||||
// bool: "not"
|
||||
// }])).to.have.length(0);
|
||||
//
|
||||
// expect(index.search([{
|
||||
// field: "blacklist",
|
||||
// query: "x1",
|
||||
// bool: "not"
|
||||
// },{
|
||||
// field: "title",
|
||||
// query: "title",
|
||||
// bool: "or"
|
||||
// },{
|
||||
// field: "body",
|
||||
// query: "body",
|
||||
// bool: "or"
|
||||
// }])).to.have.members([data[1], data[2]]);
|
||||
//
|
||||
// expect(index.search([{
|
||||
// field: "title",
|
||||
// query: "body",
|
||||
// bool: "or"
|
||||
// },{
|
||||
// field: "body",
|
||||
// query: "body",
|
||||
// bool: "and"
|
||||
// },{
|
||||
// field: "blacklist",
|
||||
// query: "x2",
|
||||
// bool: "not"
|
||||
// }])).to.have.members([data[0], data[2]]);
|
||||
//
|
||||
// expect(index.search([{
|
||||
// field: "blacklist",
|
||||
// query: "x2",
|
||||
// bool: "not"
|
||||
// },{
|
||||
// field: "title",
|
||||
// query: "body",
|
||||
// bool: "or"
|
||||
// },{
|
||||
// field: "body",
|
||||
// query: "body",
|
||||
// bool: "and"
|
||||
// }])).to.have.members([data[0], data[2]]);
|
||||
//
|
||||
// expect(index.search([{
|
||||
// field: "title",
|
||||
// query: "body",
|
||||
// bool: "or"
|
||||
// },{
|
||||
// field: "blacklist",
|
||||
// query: "x2",
|
||||
// bool: "not"
|
||||
// },{
|
||||
// field: "body",
|
||||
// query: "body",
|
||||
// bool: "and"
|
||||
// }])).to.have.members([data[0], data[2]]);
|
||||
//
|
||||
// expect(index.search([{
|
||||
// field: "title",
|
||||
// query: "title",
|
||||
// bool: "and"
|
||||
// },{
|
||||
// field: "body",
|
||||
// query: "body",
|
||||
// bool: "and"
|
||||
// },{
|
||||
// field: "blacklist",
|
||||
// query: "x",
|
||||
// bool: "not"
|
||||
// }])).to.have.length(0);
|
||||
//
|
||||
// expect(index.search([{
|
||||
// field: "title",
|
||||
// query: "title",
|
||||
// bool: "not"
|
||||
// },{
|
||||
// field: "body",
|
||||
// query: "body",
|
||||
// bool: "not"
|
||||
// },{
|
||||
// field: "blacklist",
|
||||
// query: "x",
|
||||
// bool: "not"
|
||||
// }])).to.have.length(0);
|
||||
// });
|
||||
// });
|
||||
//
|
||||
// describe("Reserved Words", function(){
|
||||
//
|
||||
// it("Should have been indexed properly", function(){
|
||||
//
|
||||
// var index = new FlexSearch({
|
||||
// encode: function(str){ return [str]; },
|
||||
// tokenize: "strict",
|
||||
// threshold: 0,
|
||||
// depth: 3
|
||||
// });
|
||||
//
|
||||
// var array = Object.getOwnPropertyNames({}.__proto__);
|
||||
// array = array.concat(Object.getOwnPropertyNames(index));
|
||||
//
|
||||
// array.push("prototype");
|
||||
// array.push("constructor");
|
||||
// array.push("__proto__");
|
||||
//
|
||||
// if(env !== "min"){
|
||||
//
|
||||
// array.push("concat");
|
||||
// array.push("hasOwnProperty");
|
||||
// array.push("length");
|
||||
// }
|
||||
//
|
||||
// for(var i = 0; i < array.length; i++){
|
||||
//
|
||||
// index.add(array[i], array[i]);
|
||||
// }
|
||||
//
|
||||
// for(var i = 0; i < array.length; i++){
|
||||
//
|
||||
// // TODO: this word is reserved and can't be indexed
|
||||
// if(array[i] === "_ctx"){
|
||||
//
|
||||
// continue;
|
||||
// }
|
||||
//
|
||||
// expect(index.search(array[i])).to.have.members([array[i]]);
|
||||
// }
|
||||
// });
|
||||
// });
|
||||
//
|
||||
// // ------------------------------------------------------------------------
|
||||
// // Export / Import
|
||||
// // ------------------------------------------------------------------------
|
||||
//
|
||||
// if(env !== "light") describe("Export / Import", function(){
|
||||
//
|
||||
// var data;
|
||||
//
|
||||
// it("Should have been exported properly", function(){
|
||||
//
|
||||
// var index = new FlexSearch("match");
|
||||
//
|
||||
// index.add(0, "foo");
|
||||
// index.add(1, "bar");
|
||||
// index.add(2, "foobar");
|
||||
//
|
||||
// data = index.export();
|
||||
//
|
||||
// if(env === ""){
|
||||
//
|
||||
// expect(data).to.equal(JSON.stringify(
|
||||
// [
|
||||
// index._map,
|
||||
// index._ctx,
|
||||
// Object.keys(index._ids)
|
||||
// ]
|
||||
// ));
|
||||
// }
|
||||
// });
|
||||
//
|
||||
// it("Should have been imported properly", function(){
|
||||
//
|
||||
// var index = new FlexSearch("match");
|
||||
//
|
||||
// index.import(data);
|
||||
//
|
||||
// expect(index.length).to.equal(3);
|
||||
//
|
||||
// expect(index.search("foo")).to.have.lengthOf(2);
|
||||
// expect(index.search("bar")).to.have.lengthOf(2);
|
||||
// expect(index.search("foobar")).to.have.lengthOf(1);
|
||||
// expect(index.search("foobar")[0]).to.equal(2);
|
||||
// });
|
||||
//
|
||||
// it("Should have been exported properly (documents)", function(){
|
||||
//
|
||||
// var index = new FlexSearch({
|
||||
//
|
||||
// tokenize: "strict",
|
||||
// threshold: 1,
|
||||
// resolution: 3,
|
||||
// depth: 1,
|
||||
// doc: {
|
||||
// id: "id",
|
||||
// field: ["title", "content"]
|
||||
// }
|
||||
// });
|
||||
//
|
||||
// var docs = [{
|
||||
// id: 1,
|
||||
// title: "Title 2",
|
||||
// content: "foobar"
|
||||
// },{
|
||||
// id: 0,
|
||||
// title: "Title 1",
|
||||
// content: "foo"
|
||||
// },{
|
||||
// id: 2,
|
||||
// title: "Title 3",
|
||||
// content: "bar"
|
||||
// }];
|
||||
//
|
||||
// index.add(docs);
|
||||
// data = index.export();
|
||||
//
|
||||
// if(env === ""){
|
||||
//
|
||||
// expect(index.doc.index["title"].length).to.equal(3);
|
||||
// expect(data).to.equal(JSON.stringify([
|
||||
// [
|
||||
// index.doc.index["title"]._map,
|
||||
// index.doc.index["title"]._ctx,
|
||||
// Object.keys(index.doc.index["title"]._ids)
|
||||
// ],
|
||||
// [
|
||||
// index.doc.index["content"]._map,
|
||||
// index.doc.index["content"]._ctx,
|
||||
// Object.keys(index.doc.index["content"]._ids)
|
||||
// ],
|
||||
// index._doc
|
||||
// ]));
|
||||
// }
|
||||
// });
|
||||
//
|
||||
// it("Should have been imported properly (documents)", function(){
|
||||
//
|
||||
// var index = new FlexSearch({
|
||||
//
|
||||
// tokenize: "strict",
|
||||
// threshold: 1,
|
||||
// resolution: 3,
|
||||
// depth: 1,
|
||||
// doc: {
|
||||
// id: "id",
|
||||
// field: ["title", "content"]
|
||||
// }
|
||||
// });
|
||||
//
|
||||
// index.import(data);
|
||||
//
|
||||
// if(env === ""){
|
||||
//
|
||||
// expect(index.doc.index["title"].length).to.equal(3);
|
||||
// expect(index.doc.index["content"].length).to.equal(3);
|
||||
// }
|
||||
//
|
||||
// expect(index.search("foo")).to.have.lengthOf(1);
|
||||
// expect(index.search("bar")).to.have.lengthOf(1);
|
||||
// expect(index.search("foobar")).to.have.lengthOf(1);
|
||||
// expect(index.search("foobar")[0].id).to.equal(1);
|
||||
// });
|
||||
// });
|
||||
//
|
||||
// // ------------------------------------------------------------------------
|
||||
// // Presets
|
||||
// // ------------------------------------------------------------------------
|
||||
//
|
||||
// describe("Presets", function(){
|
||||
//
|
||||
// it("Should have been properly initialized", function(){
|
||||
//
|
||||
// expect(FlexSearch("memory").length).to.equal(0);
|
||||
// expect(FlexSearch("speed").length).to.equal(0);
|
||||
// expect(FlexSearch("match").length).to.equal(0);
|
||||
// expect(FlexSearch("score").length).to.equal(0);
|
||||
// expect(FlexSearch("balance").length).to.equal(0);
|
||||
// expect(FlexSearch("fast").length).to.equal(0);
|
||||
// });
|
||||
//
|
||||
// it("Should have been properly extended", function(){
|
||||
//
|
||||
// var index = FlexSearch("fast");
|
||||
// index.add(0, "foobar");
|
||||
// expect(index.search("bar")).to.have.lengthOf(0);
|
||||
//
|
||||
// index = FlexSearch({preset: "speed", id: "test", tokenize: "reverse"});
|
||||
// expect(index.id).to.equal("test");
|
||||
// index.add(0, "foobar");
|
||||
// expect(index.search("bar")).to.have.lengthOf(1);
|
||||
// expect(index.search("bar")).to.have.members([0])
|
||||
// });
|
||||
// });
|
||||
//
|
||||
// // ------------------------------------------------------------------------
|
||||
// // Feature Tests
|
||||
// // ------------------------------------------------------------------------
|
||||
//
|
||||
// describe("Add Matchers", function(){
|
||||
//
|
||||
// it("Should have been added properly", function(){
|
||||
//
|
||||
// flexsearch_forward.init({
|
||||
//
|
||||
// tokenize: "forward",
|
||||
// matcher: {
|
||||
//
|
||||
// "1": "a",
|
||||
// "2": "b",
|
||||
// "3": "c",
|
||||
// "7": "e",
|
||||
// "8": "f",
|
||||
// "[456]": "d"
|
||||
// }
|
||||
//
|
||||
// }).add(0, "12345678");
|
||||
//
|
||||
// expect(flexsearch_forward.search("12345678")).to.include(0);
|
||||
// expect(flexsearch_forward.search("abcd")).to.include(0);
|
||||
// expect(flexsearch_forward.encode("12345678")).to.eql(["abcdddef"]);
|
||||
// });
|
||||
// });
|
||||
//
|
||||
// // ------------------------------------------------------------------------
|
||||
// // Caching
|
||||
// // ------------------------------------------------------------------------
|
||||
//
|
||||
// if(env !== "light"){
|
||||
//
|
||||
// describe("Caching", function(){
|
||||
//
|
||||
// it("Should have been cached properly", function(){
|
||||
//
|
||||
// flexsearch_cache.add(0, "foo")
|
||||
// .add(1, "bar")
|
||||
// .add(2, "foobar");
|
||||
// // fetch:
|
||||
// expect(flexsearch_cache.search("foo")).to.have.members([0, 2]);
|
||||
// expect(flexsearch_cache.search("bar")).to.have.members([1, 2]);
|
||||
// expect(flexsearch_cache.search("foobar")).to.include(2);
|
||||
//
|
||||
// // cache:
|
||||
// expect(flexsearch_cache.search("foo")).to.have.members([0, 2]);
|
||||
// expect(flexsearch_cache.search("bar")).to.have.members([1, 2]);
|
||||
// expect(flexsearch_cache.search("foobar")).to.include(2);
|
||||
//
|
||||
// // update:
|
||||
// flexsearch_cache.remove(2).update(1, "foo").add(3, "foobar");
|
||||
//
|
||||
// // fetch:
|
||||
// expect(flexsearch_cache.search("foo")).to.have.members([0, 1, 3]);
|
||||
// expect(flexsearch_cache.search("bar")).to.include(3);
|
||||
// expect(flexsearch_cache.search("foobar")).to.include(3);
|
||||
//
|
||||
// // cache:
|
||||
// expect(flexsearch_cache.search("foo")).to.have.members([0, 1, 3]);
|
||||
// expect(flexsearch_cache.search("bar")).to.include(3);
|
||||
// expect(flexsearch_cache.search("foobar")).to.include(3);
|
||||
// });
|
||||
// });
|
||||
// }
|
||||
//
|
||||
// // ------------------------------------------------------------------------
|
||||
// // Debug Information
|
||||
// // ------------------------------------------------------------------------
|
||||
//
|
||||
// if(env !== "light" && env !== "min"){
|
||||
//
|
||||
// describe("Debug", function(){
|
||||
//
|
||||
// it("Should have been debug mode activated", function(){
|
||||
//
|
||||
// var info = flexsearch_cache.info();
|
||||
//
|
||||
// expect(info).to.have.keys([
|
||||
//
|
||||
// "id",
|
||||
// //"chars",
|
||||
// "cache",
|
||||
// "items",
|
||||
// "matcher",
|
||||
// //"memory",
|
||||
// //"sequences",
|
||||
// "resolution",
|
||||
// "worker",
|
||||
// "contextual",
|
||||
// "depth",
|
||||
// "threshold"
|
||||
// ]);
|
||||
// });
|
||||
// });
|
||||
// }
|
||||
//
|
||||
// // ------------------------------------------------------------------------
|
||||
// // Destroy
|
||||
// // ------------------------------------------------------------------------
|
||||
//
|
||||
// describe("Destroy", function(){
|
||||
//
|
||||
// it("Should have been destroyed properly", function(){
|
||||
//
|
||||
// var index = FlexSearch()
|
||||
// .add(0, "foo")
|
||||
// .add(1, "bar");
|
||||
//
|
||||
// expect(index.search("foo")).to.include(0);
|
||||
// expect(index.search("bar")).to.include(1);
|
||||
//
|
||||
// index.destroy();
|
||||
//
|
||||
// expect(index.search("foo")).to.have.lengthOf(0);
|
||||
// expect(index.search("bar")).to.have.lengthOf(0);
|
||||
// });
|
||||
//
|
||||
// if(env !== "light") it("Should have been destroyed properly (documents)", function(){
|
||||
//
|
||||
// var data = [{id: 0, title: "foo"}, {id: 1, title: "bar"}];
|
||||
//
|
||||
// var index = FlexSearch({doc: {id: "id", field: "title"}})
|
||||
// .add(data)
|
||||
// .add(data);
|
||||
//
|
||||
// expect(index.search("foo")).to.have.members([data[0]]);
|
||||
// expect(index.search("bar")).to.have.members([data[1]]);
|
||||
//
|
||||
// index.destroy();
|
||||
//
|
||||
// expect(index.search("foo")).to.have.lengthOf(0);
|
||||
// expect(index.search("bar")).to.have.lengthOf(0);
|
||||
// });
|
||||
// });
|
||||
//
|
||||
// // ------------------------------------------------------------------------
|
||||
// // Chaining
|
||||
// // ------------------------------------------------------------------------
|
||||
//
|
||||
// describe("Chaining", function(){
|
||||
//
|
||||
// it("Should have been chained properly", function(){
|
||||
//
|
||||
// var index = FlexSearch({tokenize: "forward", matcher: {"â": "a"}})
|
||||
// .add(0, "foo")
|
||||
// .add(1, "bar");
|
||||
//
|
||||
// expect(index.search("foo")).to.include(0);
|
||||
// expect(index.search("bar")).to.include(1);
|
||||
// expect(index.encode("bâr")).to.eql(["bar"]);
|
||||
//
|
||||
// index.remove(0).update(1, "foo").add(2, "foobâr");
|
||||
//
|
||||
// expect(index.search("foo")).to.have.members([1, 2]);
|
||||
// expect(index.search("bar")).to.have.lengthOf(0);
|
||||
// expect(index.search("foobar")).to.include(2);
|
||||
//
|
||||
// index.clear().add(0, "foo").add(1, "bar");
|
||||
//
|
||||
// expect(index.search("foo")).to.include(0);
|
||||
// expect(index.search("bar")).to.include(1);
|
||||
// expect(index.search("foobar")).to.have.lengthOf(0);
|
||||
//
|
||||
// flexsearch_cache.destroy().init().add(0, "foo").add(1, "bar");
|
||||
//
|
||||
// expect(flexsearch_cache.search("foo")).to.include(0);
|
||||
// expect(flexsearch_cache.search("bar")).to.include(1);
|
||||
// expect(flexsearch_cache.search("foobar")).to.have.lengthOf(0);
|
||||
// });
|
||||
// });
|
||||
//}
|
||||
|
||||
/* Test Helpers */
|
||||
|
||||
function test_encoder(str){
|
||||
|
||||
return "-[" + str.toUpperCase() + "]-";
|
||||
}
|
56
test/context.js
Normal file
56
test/context.js
Normal file
@@ -0,0 +1,56 @@
|
||||
global.self = global;
|
||||
const env = process.argv[3];
|
||||
import { expect } from "chai";
|
||||
let FlexSearch = await import(env ? "../dist/" + env + ".js" : "../src/bundle.js");
|
||||
if(FlexSearch.default) FlexSearch = FlexSearch.default;
|
||||
if(FlexSearch.FlexSearch) FlexSearch = FlexSearch.FlexSearch;
|
||||
const { Index, Document, Worker, Charset: _Charset, Encoder, Resolver } = FlexSearch;
|
||||
const build_light = env && env.includes(".light");
|
||||
const build_compact = env && env.includes(".compact");
|
||||
const build_esm = !env || env.startsWith("module");
|
||||
const Charset = _Charset || (await import("../src/charset.js")).default;
|
||||
|
||||
describe("Context", function(){
|
||||
|
||||
it("Should have been added properly to the context", function(){
|
||||
|
||||
let index = new Index({
|
||||
tokenize: "strict",
|
||||
context: {
|
||||
depth: 2
|
||||
}
|
||||
});
|
||||
|
||||
index.add(0, "zero one two three four five six seven eight nine ten");
|
||||
|
||||
expect(index.reg.size).to.equal(1);
|
||||
expect(index.search("zero one")).to.include(0);
|
||||
expect(index.search("zero two")).to.include(0);
|
||||
expect(index.search("zero three").length).to.equal(0);
|
||||
expect(index.search("three seven").length).to.equal(0);
|
||||
expect(index.search("three five seven")).to.include(0);
|
||||
expect(index.search("eight six four")).to.include(0);
|
||||
expect(index.search("seven five three")).to.include(0);
|
||||
expect(index.search("three foobar seven").length).to.equal(0);
|
||||
expect(index.search("seven foobar three").length).to.equal(0);
|
||||
expect(index.search("eight ten")).to.include(0);
|
||||
expect(index.search("ten nine seven eight six five three four two zero one")).to.include(0);
|
||||
|
||||
index.add(1, "1 2 3 1 4 2 5 1");
|
||||
|
||||
expect(index.search("1")).to.include(1);
|
||||
expect(index.search("1 5")).to.include(1);
|
||||
expect(index.search("2 4 1")).to.include(1);
|
||||
|
||||
index = new Index({
|
||||
tokenize: "strict",
|
||||
context: {
|
||||
depth: 2,
|
||||
bidirectional: false
|
||||
}
|
||||
});
|
||||
|
||||
index.add(0, "zero one two three four five six seven eight nine ten");
|
||||
expect(index.search("ten nine seven eight six five three four two zero one").length).to.equal(0);
|
||||
});
|
||||
});
|
404
test/document.js
Normal file
404
test/document.js
Normal file
@@ -0,0 +1,404 @@
|
||||
global.self = global;
|
||||
const env = process.argv[3];
|
||||
import { expect } from "chai";
|
||||
let FlexSearch = await import(env ? "../dist/" + env + ".js" : "../src/bundle.js");
|
||||
if(FlexSearch.default) FlexSearch = FlexSearch.default;
|
||||
if(FlexSearch.FlexSearch) FlexSearch = FlexSearch.FlexSearch;
|
||||
const { Index, Document, Worker, Charset: _Charset, Encoder, Resolver } = FlexSearch;
|
||||
const build_light = env && env.includes(".light");
|
||||
const build_compact = env && env.includes(".compact");
|
||||
const build_esm = !env || env.startsWith("module");
|
||||
const Charset = _Charset || (await import("../src/charset.js")).default;
|
||||
|
||||
if(!build_light) describe("Document (Multi-Field Search)", function(){
|
||||
|
||||
const data = [{
|
||||
id: 2,
|
||||
data: { title: "Title 3", body: "Body 3" }
|
||||
},{
|
||||
id: 1,
|
||||
data: { title: "Title 2", body: "Body 2" }
|
||||
},{
|
||||
id: 0,
|
||||
data: { title: "Title 1", body: "Body 1" }
|
||||
}];
|
||||
|
||||
const update = [{
|
||||
id: 0,
|
||||
data: { title: "Foo 1", body: "Bar 1" }
|
||||
},{
|
||||
id: 1,
|
||||
data: { title: "Foo 2", body: "Bar 2" }
|
||||
},{
|
||||
id: 2,
|
||||
data: { title: "Foo 3", body: "Bar 3" }
|
||||
}];
|
||||
|
||||
it("Should have been indexed properly", function(){
|
||||
|
||||
const document = new Document({
|
||||
document: {
|
||||
id: "id",
|
||||
field: [
|
||||
"data:title",
|
||||
"data:body"
|
||||
]
|
||||
}
|
||||
});
|
||||
|
||||
const document_with_store = new Document({
|
||||
document: {
|
||||
store: true,
|
||||
id: "id",
|
||||
field: [
|
||||
"data:title",
|
||||
"data:body"
|
||||
]
|
||||
}
|
||||
});
|
||||
|
||||
for(let i = 0; i < data.length; i++){
|
||||
document.add(data[i]);
|
||||
document_with_store.add(data[i]);
|
||||
}
|
||||
|
||||
expect(document.index.size).to.equal(2);
|
||||
expect(document.reg.size).to.equal(3);
|
||||
// Registry Sharing
|
||||
expect(document.index.get("data:title").reg).to.equal(document.reg);
|
||||
expect(document.index.get("data:title").reg).to.not.equal(document_with_store.reg);
|
||||
|
||||
expect(document.search({
|
||||
query: "title"
|
||||
})).to.eql([{
|
||||
field: "data:title",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
expect(document.search({
|
||||
query: "title",
|
||||
field: "data:title"
|
||||
})).to.eql([{
|
||||
field: "data:title",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
expect(document_with_store.search({
|
||||
query: "title",
|
||||
pluck: "data:title",
|
||||
enrich: true
|
||||
}).map(res => res.doc)).to.eql(data);
|
||||
|
||||
expect(document.search({
|
||||
field: "data:body",
|
||||
query: "title"
|
||||
})).to.have.lengthOf(0)
|
||||
|
||||
expect(document.search({
|
||||
field: "data:title",
|
||||
query: "body"
|
||||
})).to.have.lengthOf(0);
|
||||
|
||||
expect(document.search({
|
||||
field: "data:body",
|
||||
query: "body"
|
||||
})).to.eql([{
|
||||
field: "data:body",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
expect(document.search({
|
||||
field: ["data:title"],
|
||||
query: "title"
|
||||
})).to.eql([{
|
||||
field: "data:title",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
expect(document.search({
|
||||
field: ["data:title", "data:body"],
|
||||
query: "body"
|
||||
})).to.eql([{
|
||||
field: "data:body",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
expect(document.search({
|
||||
field: ["data:body", "data:title"],
|
||||
query: "title"
|
||||
})).to.eql([{
|
||||
field: "data:title",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
expect(document.search({
|
||||
field: ["data:title", "data:body"],
|
||||
query: "body"
|
||||
})).to.eql([{
|
||||
field: "data:body",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
expect(document.search({
|
||||
field: ["data:body", "data:title"],
|
||||
query: "title"
|
||||
})).to.eql([{
|
||||
field: "data:title",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
expect(document.search("body", {
|
||||
field: "data:body"
|
||||
})).to.eql([{
|
||||
field: "data:body",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
expect(document.search("title", {
|
||||
field: ["data:title"]
|
||||
})).to.eql([{
|
||||
field: "data:title",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
expect(document.search({
|
||||
query: "body"
|
||||
})).to.eql([{
|
||||
field: "data:body",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
expect(document.search("title")).to.eql([{
|
||||
field: "data:title",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
expect(document.search([{
|
||||
field: "data:title",
|
||||
query: "body"
|
||||
},{
|
||||
field: "data:body",
|
||||
query: "body"
|
||||
}])).to.eql([{
|
||||
field: "data:body",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
// ---------------------------------------
|
||||
|
||||
for(let i = 0; i < update.length; i++){
|
||||
document.add(update[i]);
|
||||
document_with_store.add(update[i]);
|
||||
}
|
||||
|
||||
expect(document.search("foo")).to.eql([{
|
||||
field: "data:title",
|
||||
result: [0, 1, 2]
|
||||
}]);
|
||||
|
||||
expect(document.search("bar")).to.eql([{
|
||||
field: "data:body",
|
||||
result: [0, 1, 2]
|
||||
}]);
|
||||
|
||||
expect(document.search("foo bar", { suggest: true })).to.eql([{
|
||||
field: "data:title",
|
||||
result: [0, 1, 2]
|
||||
},{
|
||||
field: "data:body",
|
||||
result: [0, 1, 2]
|
||||
}]);
|
||||
|
||||
expect(document.search("foo bar", { suggest: true, merge: true })).to.eql([
|
||||
{ id: 0, field: [ 'data:title', 'data:body' ] },
|
||||
{ id: 1, field: [ 'data:title', 'data:body' ] },
|
||||
{ id: 2, field: [ 'data:title', 'data:body' ] }
|
||||
]);
|
||||
|
||||
expect(document_with_store.search({
|
||||
query: "foo",
|
||||
pluck: "data:title",
|
||||
enrich: true
|
||||
}).map(res => res.doc)).to.eql(update);
|
||||
|
||||
expect(document_with_store.search({
|
||||
query: "bar",
|
||||
pluck: "data:body",
|
||||
enrich: true
|
||||
}).map(res => res.doc)).to.eql(update);
|
||||
|
||||
// ---------------------------------------
|
||||
|
||||
for(let i = 0; i < update.length; i++){
|
||||
document.remove(update[i]);
|
||||
document_with_store.remove(update[i]);
|
||||
}
|
||||
|
||||
expect(document.reg.size).to.equal(0);
|
||||
expect(document.index.get("data:title").reg.size).to.equal(0);
|
||||
expect(document.index.get("data:body").reg.size).to.equal(0);
|
||||
expect(document.index.get("data:title").map.size).to.equal(0);
|
||||
expect(document.index.get("data:body").map.size).to.equal(0);
|
||||
expect(document_with_store.store.size).to.equal(0);
|
||||
|
||||
expect(document_with_store.search({
|
||||
query: "foo",
|
||||
})).to.eql([]);
|
||||
|
||||
expect(document_with_store.search({
|
||||
query: "bar"
|
||||
})).to.eql([]);
|
||||
});
|
||||
|
||||
|
||||
it("Should have been unique results", function(){
|
||||
|
||||
const document = new Document({
|
||||
document: {
|
||||
id: "id",
|
||||
field: ["field1", "field2"]
|
||||
}
|
||||
});
|
||||
|
||||
const data = [{
|
||||
id: 1,
|
||||
field1: "phrase",
|
||||
field2: "phrase next"
|
||||
},{
|
||||
id: 2,
|
||||
field1: "phrase next",
|
||||
field2: "phrase"
|
||||
}];
|
||||
|
||||
for(let i = 0; i < data.length; i++){
|
||||
document.add(data[i]);
|
||||
}
|
||||
|
||||
expect(document.search("phrase")).to.eql([{
|
||||
field: "field1",
|
||||
result: [1, 2]
|
||||
},{
|
||||
field: "field2",
|
||||
result: [1, 2]
|
||||
}]);
|
||||
|
||||
expect(document.search("phrase", { suggest: true })).to.eql([{
|
||||
field: "field1",
|
||||
result: [1, 2]
|
||||
},{
|
||||
field: "field2",
|
||||
result: [1, 2]
|
||||
}]);
|
||||
});
|
||||
|
||||
it("Should have been sorted properly by number of field count matches", function(){
|
||||
|
||||
const document = new Document({
|
||||
document: {
|
||||
id: "id",
|
||||
field: ["field1", "field2"]
|
||||
}
|
||||
});
|
||||
|
||||
const data = [{
|
||||
id: 1,
|
||||
field1: "phrase",
|
||||
field2: "phrase next"
|
||||
},{
|
||||
id: 2,
|
||||
field1: "phrase next",
|
||||
field2: "phrase"
|
||||
}];
|
||||
|
||||
for(let i = 0; i < data.length; i++){
|
||||
document.add(data[i]);
|
||||
}
|
||||
|
||||
expect(document.search("phrase", { suggest: true, merge: true })).to.eql([
|
||||
{ id: 1, field: [ 'field1', 'field2' ] },
|
||||
{ id: 2, field: [ 'field1', 'field2' ] }
|
||||
]);
|
||||
|
||||
expect(document.search("phrase next", { suggest: true, merge: true })).to.eql([
|
||||
{ id: 2, field: [ 'field1', 'field2' ] },
|
||||
{ id: 1, field: [ 'field1', 'field2' ] }
|
||||
]);
|
||||
});
|
||||
|
||||
it("Should not have been shared the Encoder", function(){
|
||||
|
||||
const document = new Document({
|
||||
document: {
|
||||
id: "id",
|
||||
field: ["field1", "field2"]
|
||||
}
|
||||
});
|
||||
|
||||
expect(document.index.get("field1").encoder).not.to.equal(
|
||||
document.index.get("field2").encoder
|
||||
);
|
||||
});
|
||||
|
||||
it("Should have been shared the Encoder", function(){
|
||||
|
||||
const document = new Document({
|
||||
encoder: new Encoder(),
|
||||
document: {
|
||||
id: "id",
|
||||
field: ["field1", "field2"]
|
||||
}
|
||||
});
|
||||
|
||||
expect(document.index.get("field1").encoder).to.equal(
|
||||
document.index.get("field2").encoder
|
||||
);
|
||||
});
|
||||
|
||||
it("Should have been applied limit/offset properly", function(){
|
||||
|
||||
const document = new Document({
|
||||
document: {
|
||||
store: true,
|
||||
id: "id",
|
||||
field: [
|
||||
"data:title",
|
||||
"data:body"
|
||||
]
|
||||
}
|
||||
});
|
||||
|
||||
for(let i = 0; i < data.length; i++){
|
||||
document.add(data[i]);
|
||||
}
|
||||
|
||||
expect(document.search({
|
||||
query: "title",
|
||||
pluck: "data:title",
|
||||
enrich: true,
|
||||
suggest: true,
|
||||
limit: 2
|
||||
}).map(res => res.doc)).to.eql([data[0], data[1]]);
|
||||
|
||||
expect(document.search({
|
||||
query: "body",
|
||||
pluck: "data:body",
|
||||
enrich: true,
|
||||
suggest: true,
|
||||
limit: 1,
|
||||
offset: 1
|
||||
}).map(res => res.doc)).to.eql([data[1]]);
|
||||
|
||||
expect(document.search({
|
||||
query: "title",
|
||||
suggest: true,
|
||||
limit: 1,
|
||||
offset: 3
|
||||
})).to.eql([]);
|
||||
|
||||
expect(document.search({
|
||||
query: "title",
|
||||
suggest: true,
|
||||
offset: 3
|
||||
})).to.eql([]);
|
||||
});
|
||||
});
|
204
test/document.tag.js
Normal file
204
test/document.tag.js
Normal file
@@ -0,0 +1,204 @@
|
||||
global.self = global;
|
||||
const env = process.argv[3];
|
||||
import { expect } from "chai";
|
||||
let FlexSearch = await import(env ? "../dist/" + env + ".js" : "../src/bundle.js");
|
||||
if(FlexSearch.default) FlexSearch = FlexSearch.default;
|
||||
if(FlexSearch.FlexSearch) FlexSearch = FlexSearch.FlexSearch;
|
||||
const { Index, Document, Worker, Charset: _Charset, Encoder, Resolver } = FlexSearch;
|
||||
const build_light = env && env.includes(".light");
|
||||
const build_compact = env && env.includes(".compact");
|
||||
const build_esm = !env || env.startsWith("module");
|
||||
const Charset = _Charset || (await import("../src/charset.js")).default;
|
||||
|
||||
if(!build_light) describe("Documents: Tag-Search", function(){
|
||||
|
||||
const data = [{
|
||||
id: 2,
|
||||
data: { title: "Title 3", body: "Body 3", cat: "A" }
|
||||
},{
|
||||
id: 1,
|
||||
data: { title: "Title 2", body: "Body 2", cat: "B" }
|
||||
},{
|
||||
id: 0,
|
||||
data: { title: "Title 1", body: "Body 1", cat: "A" }
|
||||
}];
|
||||
|
||||
const update = [{
|
||||
id: 0,
|
||||
data: { title: "Foo 1", body: "Bar 1", cat: "B" }
|
||||
},{
|
||||
id: 1,
|
||||
data: { title: "Foo 2", body: "Bar 2", cat: "A" }
|
||||
},{
|
||||
id: 2,
|
||||
data: { title: "Foo 3", body: "Bar 3", cat: "B" }
|
||||
}];
|
||||
|
||||
it("Should have been indexed properly (tag)", function(){
|
||||
|
||||
const document = new Document({
|
||||
document: {
|
||||
id: "id",
|
||||
field: ["data:body", "data:title"],
|
||||
tag: "data:cat"
|
||||
}
|
||||
});
|
||||
|
||||
const document_with_store = new Document({
|
||||
document: {
|
||||
store: true,
|
||||
id: "id",
|
||||
field: ["data:body", "data:title"],
|
||||
tag: "data:cat"
|
||||
}
|
||||
});
|
||||
|
||||
for(let i = 0; i < data.length; i++){
|
||||
document.add(data[i]);
|
||||
document_with_store.add(data[i]);
|
||||
}
|
||||
|
||||
expect(document.index.size).to.equal(2);
|
||||
expect(document.tag.size).to.equal(1);
|
||||
expect(document.reg.size).to.equal(3);
|
||||
expect(document_with_store.store.size).to.equal(3);
|
||||
|
||||
expect(document.search({
|
||||
query: "title"
|
||||
})).to.eql([{
|
||||
field: "data:title",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
expect(document.search({
|
||||
query: "title",
|
||||
tag: { "data:cat": "A" }
|
||||
})).to.eql([{
|
||||
field: "data:title",
|
||||
result: [2, 0]
|
||||
}]);
|
||||
|
||||
expect(document.search({
|
||||
query: "body",
|
||||
tag: { "data:cat": "B" }
|
||||
})).to.eql([{
|
||||
field: "data:body",
|
||||
result: [1]
|
||||
}]);
|
||||
|
||||
expect(document.search({
|
||||
query: "title",
|
||||
tag: [
|
||||
{ "data:cat": "A" },
|
||||
{ "data:cat": "B" }
|
||||
]
|
||||
})).to.eql([{
|
||||
field: "data:title",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
expect(document.search({
|
||||
query: "body title",
|
||||
suggest: true,
|
||||
tag: [
|
||||
{ "data:cat": "A" },
|
||||
{ "data:cat": "B" }
|
||||
]
|
||||
})).to.eql([{
|
||||
field: "data:body",
|
||||
result: [2, 1, 0]
|
||||
},{
|
||||
field: "data:title",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
// todo suggestions should return all results like one below
|
||||
expect(document.search({
|
||||
query: "body title",
|
||||
suggest: true,
|
||||
tag: [
|
||||
{ "data:cat": "C" }, // not exists
|
||||
{ "data:cat": "B" }
|
||||
]
|
||||
})).to.eql([{
|
||||
field: "data:body",
|
||||
result: [1]
|
||||
},{
|
||||
field: "data:title",
|
||||
result: [1]
|
||||
}]);
|
||||
|
||||
// suggestions on
|
||||
expect(document.search({
|
||||
query: "body title",
|
||||
suggest: true,
|
||||
tag: [
|
||||
{ "data:cat": "C" } // not exists
|
||||
]
|
||||
})).to.eql([{
|
||||
field: "data:body",
|
||||
result: [2, 1, 0]
|
||||
},{
|
||||
field: "data:title",
|
||||
result: [2, 1, 0]
|
||||
}]);
|
||||
|
||||
// suggestions off
|
||||
expect(document.search({
|
||||
query: "body title",
|
||||
tag: [
|
||||
{ "data:cat": "C" } // not exists
|
||||
]
|
||||
})).to.eql([]);
|
||||
|
||||
// ---------------------------------------
|
||||
|
||||
for(let i = 0; i < update.length; i++){
|
||||
document.add(update[i]);
|
||||
document_with_store.add(update[i]);
|
||||
}
|
||||
|
||||
expect(document.search("foo")).to.eql([{
|
||||
field: "data:title",
|
||||
result: [0, 1, 2]
|
||||
}]);
|
||||
|
||||
expect(document.search({
|
||||
query: "foo",
|
||||
tag: { "data:cat": "A" }
|
||||
})).to.eql([{
|
||||
field: "data:title",
|
||||
result: [1]
|
||||
}]);
|
||||
|
||||
expect(document.search({
|
||||
query: "bar",
|
||||
tag: { "data:cat": "B" }
|
||||
})).to.eql([{
|
||||
field: "data:body",
|
||||
result: [0, 2]
|
||||
}]);
|
||||
|
||||
// ---------------------------------------
|
||||
|
||||
for(let i = 0; i < update.length; i++){
|
||||
document.remove(update[i]);
|
||||
document_with_store.remove(update[i]);
|
||||
}
|
||||
|
||||
expect(document.reg.size).to.equal(0);
|
||||
expect(document.index.get("data:title").reg.size).to.equal(0);
|
||||
expect(document.index.get("data:body").reg.size).to.equal(0);
|
||||
expect(document.index.get("data:title").map.size).to.equal(0);
|
||||
expect(document.index.get("data:body").map.size).to.equal(0);
|
||||
expect(document_with_store.store.size).to.equal(0);
|
||||
|
||||
expect(document_with_store.search({
|
||||
query: "foo",
|
||||
})).to.eql([]);
|
||||
|
||||
expect(document_with_store.search({
|
||||
query: "bar"
|
||||
})).to.eql([]);
|
||||
});
|
||||
});
|
338
test/encoder.js
Normal file
338
test/encoder.js
Normal file
@@ -0,0 +1,338 @@
|
||||
global.self = global;
|
||||
const env = process.argv[3];
|
||||
import { expect } from "chai";
|
||||
let FlexSearch = await import(env ? "../dist/" + env + ".js" : "../src/bundle.js");
|
||||
if(FlexSearch.default) FlexSearch = FlexSearch.default;
|
||||
if(FlexSearch.FlexSearch) FlexSearch = FlexSearch.FlexSearch;
|
||||
const { Index, Document, Worker, Charset: _Charset, Encoder, Resolver } = FlexSearch;
|
||||
const build_light = env && env.includes(".light");
|
||||
const build_compact = env && env.includes(".compact");
|
||||
const build_esm = !env || env.startsWith("module");
|
||||
const Charset = _Charset || (await import("../src/charset.js")).default;
|
||||
|
||||
describe("Encoder", function(){
|
||||
|
||||
it("Should have been properly added a custom encoder", function(){
|
||||
|
||||
const encode = str => str.toLowerCase().split(/\s+/);
|
||||
const index = new Index({ encoder: encode });
|
||||
expect(index.encoder.encode).to.eql(encode);
|
||||
});
|
||||
|
||||
it("Should have been properly added a custom encode (alternative)", function(){
|
||||
|
||||
const encode = str => str.toLowerCase().split(/\s+/);
|
||||
const index = new Index({ encode });
|
||||
expect(index.encoder.encode).to.eql(encode);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Encoder: Latin Charset", function(){
|
||||
|
||||
it("Should have been encoded properly: LatinDefault", function(){
|
||||
|
||||
const index = new Index({ encoder: Charset.LatinDefault });
|
||||
expect(index.encoder.encode("Björn-Phillipp Mayer")).to.eql(["björn", "phillipp", "mayer"]);
|
||||
});
|
||||
|
||||
if(env !== "light"){
|
||||
|
||||
it("Should have been encoded properly: LatinExact", function(){
|
||||
|
||||
const index = new Index({ encoder: Charset.LatinExact });
|
||||
expect(index.encoder.encode("Björn-Phillipp Mayer")).to.eql(["Björn-Phillipp", "Mayer"]);
|
||||
});
|
||||
|
||||
it("Should have been encoded properly: LatinSimple", function(){
|
||||
|
||||
const index = new Index({ encoder: Charset.LatinSimple });
|
||||
expect(index.encoder.encode("Björn-Phillipp Mayer")).to.eql(index.encoder.encode("bjorn/phillipp mayer"));
|
||||
});
|
||||
|
||||
it("Should have been encoded properly: LatinBalance", function(){
|
||||
|
||||
const index = new Index({ encoder: Charset.LatinBalance });
|
||||
expect(index.encoder.encode("Björn-Phillipp Mayer")).to.eql(index.encoder.encode("bjorn philip mair"));
|
||||
});
|
||||
|
||||
it("Should have been encoded properly: LatinAdvanced", function(){
|
||||
|
||||
const index = new Index({ encoder: Charset.LatinAdvanced });
|
||||
expect(index.encoder.encode("Björn-Phillipp Mayer")).to.eql(index.encoder.encode("bjoern filip mair"));
|
||||
});
|
||||
|
||||
it("Should have been encoded properly: LatinExtra", function(){
|
||||
|
||||
const index = new Index({ encoder: Charset.LatinExtra });
|
||||
expect(index.encoder.encode("Björn-Phillipp Mayer")).to.eql(index.encoder.encode("bjorm filib mayr"));
|
||||
});
|
||||
|
||||
it("Should have been encoded properly: LatinSoundex", function(){
|
||||
|
||||
const index = new Index({ encoder: Charset.LatinSoundex });
|
||||
expect(index.encoder.encode("Björn-Phillipp Mayer")).to.eql(index.encoder.encode("bjoernsen philippo mayr"));
|
||||
});
|
||||
}
|
||||
|
||||
it("Should have been encoded properly: Custom Encoder", function(){
|
||||
|
||||
function test_encoder(str){
|
||||
return "-[" + str.toUpperCase() + "]-";
|
||||
}
|
||||
|
||||
const index = new Index({ encoder: test_encoder });
|
||||
expect(index.encoder.encode("Björn-Phillipp Mayer")).to.eql("-[BJÖRN-PHILLIPP MAYER]-");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Encoder: CJK Word Break", function(){
|
||||
|
||||
it("Should have been tokenized properly", function(){
|
||||
|
||||
const index = Index({
|
||||
encoder: Charset.CjkDefault,
|
||||
tokenize: "forward"
|
||||
});
|
||||
|
||||
index.add(0, "서울시가 잠이 든 시간에 아무 말, 미뤄, 미뤄");
|
||||
expect(index.search("든")).to.include(0);
|
||||
expect(index.search("시간에")).to.include(0);
|
||||
|
||||
index.add(1, "一个单词");
|
||||
expect(index.search("一个")).to.include(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Encoder: Cyrillic Word Break", function(){
|
||||
|
||||
it("Should have been tokenized properly", function(){
|
||||
|
||||
const index = Index({
|
||||
encoder: Charset.CyrillicDefault,
|
||||
tokenize: "forward"
|
||||
});
|
||||
|
||||
index.add(0, "Фообар");
|
||||
expect(index.search("Фообар")).to.include(0);
|
||||
expect(index.search("Фоо")).to.include(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Encoder: Arabic Word Break", function(){
|
||||
|
||||
it("Should have been tokenized properly", function(){
|
||||
|
||||
let index = Index({
|
||||
encoder: Charset.ArabicDefault,
|
||||
tokenize: "forward"
|
||||
});
|
||||
|
||||
index.add(0, "لكن لا بد أن أوضح لك أن كل");
|
||||
expect(index.search("بد أن")).to.include(0);
|
||||
expect(index.search("أو")).to.include(0);
|
||||
index = Index({
|
||||
encoder: Charset.ArabicDefault,
|
||||
tokenize: "reverse"
|
||||
});
|
||||
|
||||
index.add(0, "لكن لا بد أن أوضح لك أن كل");
|
||||
expect(index.search("ضح")).to.include(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Encoder: Right-to-Left", function(){
|
||||
|
||||
it("Should have been scored properly", function(){
|
||||
|
||||
let index = new Index({
|
||||
tokenize: "forward",
|
||||
rtl: true
|
||||
});
|
||||
|
||||
index.add(0, "54321 4 3 2 0");
|
||||
index.add(1, "0 2 3 4 54321");
|
||||
index.add(2, "0 2 3 4 12345");
|
||||
|
||||
expect(index.search("5")).to.eql([2]);
|
||||
expect(index.search("1")).to.eql([1, 0]);
|
||||
|
||||
index = new Index({
|
||||
tokenize: "reverse",
|
||||
rtl: true
|
||||
});
|
||||
|
||||
index.add(0, "54321 4 3 2 1 0");
|
||||
index.add(1, "0 1 2 3 4 54321");
|
||||
index.add(2, "0 1 2 3 4 12345");
|
||||
|
||||
expect(index.search("5")).to.eql([2, 1, 0]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Filter", function(){
|
||||
|
||||
it("Should have been filtered properly", function(){
|
||||
|
||||
let encoder = new Encoder({
|
||||
filter: ["in", "the"]
|
||||
});
|
||||
let index = new Index({
|
||||
tokenize: "strict",
|
||||
encoder: encoder
|
||||
});
|
||||
|
||||
index.add(0, "Today in the morning.");
|
||||
|
||||
expect(index.search("today in the morning.")).to.include(0);
|
||||
expect(index.search("today morning")).to.include(0);
|
||||
expect(index.search("in the")).to.have.length(0);
|
||||
|
||||
index = new Index({
|
||||
tokenize: "strict",
|
||||
encoder: encoder,
|
||||
context: true
|
||||
});
|
||||
|
||||
index.add(0, "Today in the morning.");
|
||||
expect(index.search("today morning")).to.include(0);
|
||||
|
||||
encoder = new Encoder();
|
||||
encoder.addFilter("in");
|
||||
index = new Index({
|
||||
tokenize: "strict",
|
||||
encoder: encoder
|
||||
});
|
||||
index.encoder.addFilter("the");
|
||||
|
||||
index.add(0, "Today in the morning.");
|
||||
expect(index.search("in the")).to.have.length(0);
|
||||
});
|
||||
|
||||
it("Should have been filtered properly (custom function)", function(){
|
||||
|
||||
const encoder = new Encoder({
|
||||
filter: ["in", "the"],
|
||||
finalize: function(word){
|
||||
return word.filter(t => t.length > 3);
|
||||
}
|
||||
});
|
||||
const index = new Index({
|
||||
tokenize: "strict",
|
||||
encoder: encoder
|
||||
});
|
||||
|
||||
index.add(0, "Today in the morning.");
|
||||
|
||||
expect(index.search("today in the morning.")).to.include(0);
|
||||
expect(index.search("today morning")).to.include(0);
|
||||
expect(index.search("in the")).to.have.length(0);
|
||||
});
|
||||
|
||||
it("Should have been filtered properly (minlength)", function(){
|
||||
|
||||
const encoder = new Encoder({
|
||||
filter: ["in", "the"],
|
||||
minlength: 4
|
||||
});
|
||||
const index = new Index({
|
||||
tokenize: "strict",
|
||||
encoder: encoder
|
||||
});
|
||||
|
||||
index.add(0, "Today in the morning.");
|
||||
|
||||
expect(index.search("today in the morning.")).to.include(0);
|
||||
expect(index.search("today morning")).to.include(0);
|
||||
expect(index.search("in the")).to.have.length(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Stemmer", function(){
|
||||
|
||||
it("Should have been stemmed properly", function(){
|
||||
|
||||
const encoder = new Encoder({
|
||||
stemmer: new Map([
|
||||
["ization", "ize"],
|
||||
["tional", "tion"]
|
||||
])
|
||||
});
|
||||
const index = new Index({
|
||||
tokenize: "strict",
|
||||
encoder: encoder
|
||||
});
|
||||
|
||||
index.add(0, "Just a multinational colonization.");
|
||||
|
||||
expect(index.search("Just a multinational colonization.")).to.include(0);
|
||||
expect(index.search("multinational colonization")).to.include(0);
|
||||
expect(index.search("tional tion")).to.have.length(0);
|
||||
});
|
||||
|
||||
// it("Should have been stemmed properly (custom function)", function(){
|
||||
//
|
||||
// var stems = {
|
||||
// "ization": "ize",
|
||||
// "tional": "tion"
|
||||
// };
|
||||
//
|
||||
// var index = new FlexSearch({
|
||||
// tokenize: "strict",
|
||||
// stemmer: function(word){
|
||||
// return stems[word] || word;
|
||||
// }
|
||||
// });
|
||||
//
|
||||
// index.add(0, "Just a multinational colonization.");
|
||||
//
|
||||
// expect(index.length).to.equal(1);
|
||||
// expect(index.search("Just a multinational colonization.")).to.include(0);
|
||||
// expect(index.search("multinational colonization")).to.include(0);
|
||||
// expect(index.search("tional tion")).to.have.length(0);
|
||||
// });
|
||||
// });
|
||||
//
|
||||
//
|
||||
// describe("Custom Language", function(){
|
||||
//
|
||||
// it("Should have been applied properly", function(){
|
||||
//
|
||||
// var index = new FlexSearch({
|
||||
// tokenize: "reverse",
|
||||
// filter: ["a", "an"],
|
||||
// stemmer: {
|
||||
// "ization": "ize",
|
||||
// "tional": "tion"
|
||||
// }
|
||||
// });
|
||||
//
|
||||
// index.add(0, "Just a multinational colonization.");
|
||||
//
|
||||
// expect(index.length).to.equal(1);
|
||||
// expect(index.search("Just a multinational colonization.")).to.include(0);
|
||||
// expect(index.search("Just an multinational colonization.")).to.include(0);
|
||||
// expect(index.search("multinational colonization")).to.include(0);
|
||||
// expect(index.search("tional tion")).to.have.length(0);
|
||||
//
|
||||
// FlexSearch.registerLanguage("custom", {
|
||||
// filter: ["a", "an"],
|
||||
// stemmer: {
|
||||
// "ization": "ize",
|
||||
// "tional": "tion"
|
||||
// }
|
||||
// });
|
||||
//
|
||||
// index = new FlexSearch({
|
||||
// tokenize: "reverse",
|
||||
// lang: "custom"
|
||||
// });
|
||||
//
|
||||
// index.add(0, "Just a multinational colonization.");
|
||||
//
|
||||
// expect(index.length).to.equal(1);
|
||||
// expect(index.search("Just a multinational colonization.")).to.include(0);
|
||||
// expect(index.search("Just an multinational colonization.")).to.include(0);
|
||||
// expect(index.search("multinational colonization")).to.include(0);
|
||||
// expect(index.search("tional tion")).to.have.length(0);
|
||||
// });
|
||||
});
|
90
test/issues.js
Normal file
90
test/issues.js
Normal file
@@ -0,0 +1,90 @@
|
||||
global.self = global;
|
||||
const env = process.argv[3];
|
||||
import { expect } from "chai";
|
||||
let FlexSearch = await import(env ? "../dist/" + env + ".js" : "../src/bundle.js");
|
||||
if(FlexSearch.default) FlexSearch = FlexSearch.default;
|
||||
if(FlexSearch.FlexSearch) FlexSearch = FlexSearch.FlexSearch;
|
||||
const { Index, Document, Worker, Charset: _Charset, Encoder, Resolver } = FlexSearch;
|
||||
const build_light = env && env.includes(".light");
|
||||
const build_compact = env && env.includes(".compact");
|
||||
const build_esm = !env || env.startsWith("module");
|
||||
const Charset = _Charset || (await import("../src/charset.js")).default;
|
||||
|
||||
describe("Github Issues", function(){
|
||||
|
||||
if(!build_light && !build_compact) it("#48", async function(){
|
||||
|
||||
const fs = await new Document({
|
||||
encoder: Charset.LatinExtra,
|
||||
resolution: 9,
|
||||
context: {
|
||||
depth: 4
|
||||
},
|
||||
worker: true,
|
||||
cache: true,
|
||||
doc: {
|
||||
id: "id",
|
||||
field: [ "intent", "text" ]
|
||||
}
|
||||
});
|
||||
|
||||
const doc = [{
|
||||
id: 0,
|
||||
intent: "intent",
|
||||
text: "text"
|
||||
},{
|
||||
id: 1,
|
||||
intent: "intent",
|
||||
text: "howdy - how are you doing"
|
||||
}];
|
||||
|
||||
for(let i = 0; i < doc.length; i++){
|
||||
await fs.add(doc[i]);
|
||||
}
|
||||
|
||||
expect(await fs.search("howdy")).to.eql([{
|
||||
field: "text",
|
||||
result: [1]
|
||||
}]);
|
||||
expect(await fs.search("howdy -")).to.eql([{
|
||||
field: "text",
|
||||
result: [1]
|
||||
}]);
|
||||
|
||||
// terminate workers
|
||||
fs.index.get("intent").worker.terminate();
|
||||
fs.index.get("text").worker.terminate();
|
||||
});
|
||||
|
||||
if(!build_light) it("#54", function(){
|
||||
|
||||
const index = new Document({
|
||||
doc: {
|
||||
id: "id",
|
||||
field: ["title", "content"]
|
||||
}
|
||||
});
|
||||
|
||||
const docs = [{
|
||||
id: 1,
|
||||
title: "Roaming Inquiry",
|
||||
content: "Some content"
|
||||
}, {
|
||||
id: 2,
|
||||
title: "New Service",
|
||||
content: "This is not roaming-inquiry"
|
||||
}];
|
||||
|
||||
for(let i = 0; i < docs.length; i++){
|
||||
index.add(docs[i]);
|
||||
}
|
||||
|
||||
expect(index.search("roaming")).to.eql([{
|
||||
field: "title",
|
||||
result: [1]
|
||||
},{
|
||||
field: "content",
|
||||
result: [2]
|
||||
}]);
|
||||
});
|
||||
});
|
21
test/misc/reporter.js
Normal file
21
test/misc/reporter.js
Normal file
@@ -0,0 +1,21 @@
|
||||
const libCoverage = require('istanbul-lib-coverage');
|
||||
const { createReporter } = require('istanbul-api');
|
||||
|
||||
const coverage_1 = require('./.nyc_output/coverage.json');
|
||||
const coverage_2 = require('./.nyc_output/coverage2.json');
|
||||
|
||||
const normalizeJestCoverage = (obj) => {
|
||||
const result = obj;
|
||||
Object.entries(result).forEach(([k, v]) => {
|
||||
if (v.data) result[k] = v.data;
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const map = libCoverage.createCoverageMap();
|
||||
map.merge(normalizeJestCoverage(coverage_1));
|
||||
map.merge(normalizeJestCoverage(coverage_2));
|
||||
|
||||
const reporter = createReporter();
|
||||
reporter.addAll(['html', 'json', 'lcov', 'text']);
|
||||
reporter.write(map);
|
0
test/misc/runner.js
Normal file
0
test/misc/runner.js
Normal file
11
test/persistent.js
Normal file
11
test/persistent.js
Normal file
@@ -0,0 +1,11 @@
|
||||
global.self = global;
|
||||
const env = process.argv[3];
|
||||
import { expect } from "chai";
|
||||
let FlexSearch = await import(env ? "../dist/" + env + ".js" : "../src/bundle.js");
|
||||
if(FlexSearch.default) FlexSearch = FlexSearch.default;
|
||||
if(FlexSearch.FlexSearch) FlexSearch = FlexSearch.FlexSearch;
|
||||
const { Index, Document, Worker, Charset: _Charset, Encoder, Resolver } = FlexSearch;
|
||||
const build_light = env && env.includes(".light");
|
||||
const build_compact = env && env.includes(".compact");
|
||||
const build_esm = !env || env.startsWith("module");
|
||||
const Charset = _Charset || (await import("../src/charset.js")).default;
|
431
test/resolver.js
Normal file
431
test/resolver.js
Normal file
@@ -0,0 +1,431 @@
|
||||
global.self = global;
|
||||
const env = process.argv[3];
|
||||
import { expect } from "chai";
|
||||
let FlexSearch = await import(env ? "../dist/" + env + ".js" : "../src/bundle.js");
|
||||
if(FlexSearch.default) FlexSearch = FlexSearch.default;
|
||||
if(FlexSearch.FlexSearch) FlexSearch = FlexSearch.FlexSearch;
|
||||
const { Index, Document, Worker, Charset: _Charset, Encoder, Resolver } = FlexSearch;
|
||||
const build_light = env && env.includes(".light");
|
||||
const build_compact = env && env.includes(".compact");
|
||||
const build_esm = !env || env.startsWith("module");
|
||||
const Charset = _Charset || (await import("../src/charset.js")).default;
|
||||
|
||||
if(!build_light && !build_compact) describe("Resolver", function(){
|
||||
|
||||
it("Should have been created a Resolver properly", function(){
|
||||
|
||||
const index = new Index({ tokenize: "reverse" });
|
||||
index.add(1, "foo");
|
||||
index.add(2, "bar");
|
||||
index.add(3, "FooBar");
|
||||
|
||||
let resolver = index.search("foo bar", { resolve: false, suggest: true });
|
||||
|
||||
expect(resolver).to.be.instanceof(Resolver);
|
||||
expect(resolver).to.respondTo("and");
|
||||
expect(resolver).to.respondTo("or");
|
||||
expect(resolver).to.respondTo("xor");
|
||||
expect(resolver).to.respondTo("not");
|
||||
expect(resolver).to.respondTo("boost");
|
||||
expect(resolver).to.respondTo("limit");
|
||||
expect(resolver).to.respondTo("offset");
|
||||
expect(resolver).to.respondTo("resolve");
|
||||
|
||||
expect(resolver.result).to.eql([[3, 1, 2]]);
|
||||
});
|
||||
|
||||
it("Should have been created a Resolver properly (alternative)", function(){
|
||||
|
||||
const index = new Index({ tokenize: "reverse" });
|
||||
index.add(1, "foo");
|
||||
index.add(2, "bar");
|
||||
index.add(3, "FooBar");
|
||||
|
||||
let resolver = new Resolver({
|
||||
index: index,
|
||||
query: "foo bar",
|
||||
suggest: true
|
||||
});
|
||||
|
||||
expect(resolver).to.be.instanceof(Resolver);
|
||||
expect(resolver.result).to.eql([[3, 1, 2]]);
|
||||
});
|
||||
|
||||
it("Should have been resolved a Resolver properly", function(){
|
||||
|
||||
const index = new Index({ tokenize: "reverse" });
|
||||
index.add(1, "foo");
|
||||
index.add(2, "bar");
|
||||
index.add(3, "FooBar");
|
||||
|
||||
let result = new Resolver({
|
||||
index: index,
|
||||
query: "foo bar",
|
||||
suggest: true
|
||||
}).resolve();
|
||||
|
||||
expect(result.length).to.equal(3);
|
||||
expect(result).to.eql([3, 1, 2]);
|
||||
|
||||
result = new Resolver({
|
||||
index: index,
|
||||
query: "foo bar",
|
||||
suggest: true
|
||||
}).resolve({
|
||||
limit: 1,
|
||||
offset: 1
|
||||
});
|
||||
|
||||
expect(result.length).to.equal(1);
|
||||
expect(result).to.eql([1]);
|
||||
|
||||
result = new Resolver({
|
||||
index: index,
|
||||
query: "bar",
|
||||
suggest: true
|
||||
}).and({
|
||||
index: index,
|
||||
query: "foo",
|
||||
suggest: true,
|
||||
resolve: true
|
||||
});
|
||||
|
||||
expect(result.length).to.equal(3);
|
||||
expect(result).to.eql([3, 2, 1]);
|
||||
});
|
||||
|
||||
it("Should have been apply \"and\" properly", function(){
|
||||
|
||||
const index = new Index({ tokenize: "forward" });
|
||||
[ 'cats abcd efgh ijkl mnop qrst uvwx cute',
|
||||
'cats abcd efgh ijkl mnop dogs cute', // <-- dogs
|
||||
'cats abcd efgh ijkl mnop cute',
|
||||
'cats abcd efgh ijkl cute',
|
||||
'cats abcd efgh cute',
|
||||
'cats abcd cute',
|
||||
'cats cute'
|
||||
].forEach((item, id) => {
|
||||
index.add(id, item);
|
||||
});
|
||||
|
||||
let resolver = new Resolver({
|
||||
index: index,
|
||||
query: "cat"
|
||||
});
|
||||
|
||||
expect(resolver.result).to.eql([[0, 1, 2, 3, 4, 5, 6]]);
|
||||
|
||||
resolver = resolver.and({
|
||||
index: index,
|
||||
query: "cute"
|
||||
});
|
||||
|
||||
expect(resolver.result).to.eql([
|
||||
void 0,
|
||||
[6],
|
||||
[5],
|
||||
[4],
|
||||
[3],
|
||||
[2],
|
||||
[1],
|
||||
[0]
|
||||
]);
|
||||
|
||||
resolver = resolver.and({
|
||||
index: index,
|
||||
query: "dog"
|
||||
});
|
||||
|
||||
expect(resolver.result).to.eql([
|
||||
void 0,
|
||||
void 0,
|
||||
void 0,
|
||||
void 0,
|
||||
void 0,
|
||||
void 0,
|
||||
[1]
|
||||
]);
|
||||
|
||||
resolver = resolver.and({
|
||||
index: index,
|
||||
query: "fish",
|
||||
suggest: true
|
||||
});
|
||||
|
||||
expect(resolver.result).to.eql([
|
||||
void 0,
|
||||
void 0,
|
||||
void 0,
|
||||
void 0,
|
||||
void 0,
|
||||
void 0,
|
||||
[1]
|
||||
]);
|
||||
|
||||
resolver = resolver.and({
|
||||
index: index,
|
||||
query: "bird"
|
||||
});
|
||||
|
||||
expect(resolver.result).to.eql([]);
|
||||
|
||||
resolver = resolver.and({
|
||||
index: index,
|
||||
query: "dog",
|
||||
suggest: true
|
||||
});
|
||||
|
||||
expect(resolver.result).to.eql([
|
||||
void 0,
|
||||
void 0,
|
||||
void 0,
|
||||
void 0,
|
||||
void 0,
|
||||
[1]
|
||||
]);
|
||||
});
|
||||
|
||||
it("Should have been apply \"or\" properly", function(){
|
||||
|
||||
const index = new Index({ tokenize: "forward" });
|
||||
[ 'cats abcd efgh ijkl mnop qrst uvwx cute',
|
||||
'cats abcd efgh ijkl mnop dogs cute', // <-- dogs
|
||||
'cats abcd efgh ijkl mnop cute',
|
||||
'cats abcd efgh ijkl cute',
|
||||
'cats abcd efgh cute',
|
||||
'cats abcd cute',
|
||||
'cats cute'
|
||||
].forEach((item, id) => {
|
||||
index.add(id, item);
|
||||
});
|
||||
|
||||
let resolver = new Resolver({
|
||||
index: index,
|
||||
query: "cat"
|
||||
}).or({
|
||||
index: index,
|
||||
query: "cute"
|
||||
});
|
||||
|
||||
expect(resolver.result).to.eql([[0, 1, 2, 3, 4, 5, 6]]);
|
||||
|
||||
// todo
|
||||
// resolver = resolver.or([{
|
||||
// index: index,
|
||||
// query: "fish"
|
||||
// },{
|
||||
// index: index,
|
||||
// query: "dog"
|
||||
// },{
|
||||
// index: index,
|
||||
// query: "horse"
|
||||
// }]);
|
||||
|
||||
resolver = resolver.or({
|
||||
index: index,
|
||||
query: "fish"
|
||||
}).or({
|
||||
index: index,
|
||||
query: "dog"
|
||||
}).or({
|
||||
index: index,
|
||||
query: "horse"
|
||||
}).or({
|
||||
index: index,
|
||||
query: "dog"
|
||||
}).or({
|
||||
index: index,
|
||||
query: "horse"
|
||||
})
|
||||
// todo
|
||||
/*.or({
|
||||
and: [{
|
||||
index: index,
|
||||
query: "dog"
|
||||
},{
|
||||
index: index,
|
||||
query: "cute"
|
||||
}]
|
||||
})*/;
|
||||
|
||||
expect(resolver.result).to.eql([[0, 1, 2, 3, 4, 5, 6]]);
|
||||
});
|
||||
|
||||
it("Should have been apply \"xor\" properly", function(){
|
||||
|
||||
const index = new Index();
|
||||
index.add(1, "foo foo");
|
||||
index.add(2, "bar bar");
|
||||
index.add(3, "foo bar");
|
||||
index.add(4, "bar foo");
|
||||
|
||||
let resolver = new Resolver({
|
||||
index: index,
|
||||
query: "foo"
|
||||
}).xor({
|
||||
index: index,
|
||||
query: "bar"
|
||||
});
|
||||
|
||||
expect(resolver.result).to.eql([[1, 2]]);
|
||||
|
||||
});
|
||||
|
||||
it("Should have been apply \"not\" properly", function(){
|
||||
|
||||
const index = new Index({ tokenize: "forward" });
|
||||
[ 'cats abcd efgh ijkl mnop qrst uvwx cute',
|
||||
'cats abcd efgh ijkl mnop dogs cute', // <-- dogs
|
||||
'cats abcd efgh ijkl mnop cute',
|
||||
'cats abcd efgh ijkl cute',
|
||||
'cats abcd efgh cute',
|
||||
'cats abcd cute',
|
||||
'cats cute'
|
||||
].forEach((item, id) => {
|
||||
index.add(id, item);
|
||||
});
|
||||
|
||||
let resolver = new Resolver({
|
||||
index: index,
|
||||
query: "cute"
|
||||
}).not({
|
||||
index: index,
|
||||
query: "cat"
|
||||
});
|
||||
|
||||
expect(resolver.result).to.eql([]);
|
||||
|
||||
resolver = new Resolver({
|
||||
index: index,
|
||||
query: "cute"
|
||||
}).not({
|
||||
index: index,
|
||||
query: "dog"
|
||||
});
|
||||
|
||||
expect(resolver.result).to.eql([
|
||||
void 0,
|
||||
[6],
|
||||
[5],
|
||||
[4],
|
||||
[3],
|
||||
[2],
|
||||
void 0, // dogs
|
||||
[0]
|
||||
]);
|
||||
|
||||
});
|
||||
|
||||
it("Should have been apply \"limit\" and \"offset\" properly", function(){
|
||||
|
||||
const index = new Index({ tokenize: "forward" });
|
||||
[ 'cats abcd efgh ijkl mnop qrst uvwx cute',
|
||||
'cats abcd efgh ijkl mnop dogs cute', // <-- dogs
|
||||
'cats abcd efgh ijkl mnop cute',
|
||||
'cats abcd efgh ijkl cute',
|
||||
'cats abcd efgh cute',
|
||||
'cats abcd cute',
|
||||
'cats cute'
|
||||
].forEach((item, id) => {
|
||||
index.add(id, item);
|
||||
});
|
||||
|
||||
let resolver = new Resolver({
|
||||
index: index,
|
||||
query: "cute"
|
||||
}).limit(3);
|
||||
|
||||
expect(resolver.result).to.eql([
|
||||
void 0,
|
||||
[6],
|
||||
[5],
|
||||
[4]
|
||||
]);
|
||||
|
||||
resolver = new Resolver({
|
||||
index: index,
|
||||
query: "cute"
|
||||
}).offset(3).limit(2);
|
||||
|
||||
expect(resolver.result).to.eql([
|
||||
void 0,
|
||||
void 0, // offset +1
|
||||
void 0, // offset +2
|
||||
void 0, // offset +3
|
||||
[3],
|
||||
[2]
|
||||
]);
|
||||
});
|
||||
|
||||
it("Should have been apply \"boost\" properly", function(){
|
||||
|
||||
const index = new Index({ tokenize: "forward" });
|
||||
[ 'cats abcd efgh ijkl mnop qrst uvwx cute',
|
||||
'cats abcd efgh ijkl mnop dogs cute', // <-- dogs
|
||||
'cats abcd efgh ijkl mnop cute',
|
||||
'cats abcd efgh ijkl cute',
|
||||
'cats abcd efgh cute',
|
||||
'cats abcd cute',
|
||||
'cats cute'
|
||||
].forEach((item, id) => {
|
||||
index.add(id, item);
|
||||
});
|
||||
|
||||
let resolver = new Resolver({
|
||||
index: index,
|
||||
query: "dog"
|
||||
}).boost(0).or({
|
||||
index: index,
|
||||
query: "cat"
|
||||
});
|
||||
|
||||
expect(resolver.result).to.eql([
|
||||
[ 0, 2, 3, 4, 5, 6 ],
|
||||
void 0,
|
||||
[ 1 ]
|
||||
]);
|
||||
|
||||
resolver = new Resolver({
|
||||
index: index,
|
||||
query: "dog"
|
||||
}).boost(1).or({
|
||||
index: index,
|
||||
query: "cat"
|
||||
});
|
||||
|
||||
expect(resolver.result).to.eql([
|
||||
void 0,
|
||||
[ 0, 2, 3, 4, 5, 6 ],
|
||||
[ 1 ]
|
||||
]);
|
||||
|
||||
resolver = new Resolver({
|
||||
index: index,
|
||||
query: "dog"
|
||||
}).boost(2).or({
|
||||
index: index,
|
||||
query: "cat"
|
||||
});
|
||||
|
||||
expect(resolver.result).to.eql([
|
||||
void 0,
|
||||
void 0,
|
||||
[ 1, 0, 2, 3, 4, 5, 6 ]
|
||||
]);
|
||||
|
||||
resolver = new Resolver({
|
||||
index: index,
|
||||
query: "dog"
|
||||
}).boost(3).or({
|
||||
index: index,
|
||||
query: "cat"
|
||||
});
|
||||
|
||||
expect(resolver.result).to.eql([
|
||||
void 0,
|
||||
void 0,
|
||||
[ 1 ],
|
||||
[ 0, 2, 3, 4, 5, 6 ]
|
||||
]);
|
||||
});
|
||||
});
|
118
test/scoring.js
Normal file
118
test/scoring.js
Normal file
@@ -0,0 +1,118 @@
|
||||
global.self = global;
|
||||
const env = process.argv[3];
|
||||
import { expect } from "chai";
|
||||
let FlexSearch = await import(env ? "../dist/" + env + ".js" : "../src/bundle.js");
|
||||
if(FlexSearch.default) FlexSearch = FlexSearch.default;
|
||||
if(FlexSearch.FlexSearch) FlexSearch = FlexSearch.FlexSearch;
|
||||
const { Index, Document, Worker, Charset: _Charset, Encoder, Resolver } = FlexSearch;
|
||||
const build_light = env && env.includes(".light");
|
||||
const build_compact = env && env.includes(".compact");
|
||||
const build_esm = !env || env.startsWith("module");
|
||||
const Charset = _Charset || (await import("../src/charset.js")).default;
|
||||
|
||||
describe("Scoring", function(){
|
||||
|
||||
it("Should have been sorted by relevance properly", function(){
|
||||
|
||||
let index = new Index({
|
||||
tokenize: "strict",
|
||||
resolution: 10
|
||||
});
|
||||
|
||||
index.add(0, "1 2 3 2 4 1 5 3");
|
||||
index.add(1, "zero one two three four five six seven eight nine ten");
|
||||
index.add(2, "four two zero one three ten five seven eight six nine");
|
||||
|
||||
expect(index.search("1")).to.eql([0]);
|
||||
expect(index.search("one")).to.eql([1, 2]);
|
||||
expect(index.search("one two")).to.eql([1, 2]);
|
||||
expect(index.search("four one")).to.eql([2, 1]);
|
||||
|
||||
index = new Index({
|
||||
tokenize: "strict",
|
||||
context: {
|
||||
depth: 3,
|
||||
bidirectional: false
|
||||
}
|
||||
});
|
||||
|
||||
index.add(0, "1 2 3 2 4 1 5 3");
|
||||
index.add(1, "zero one two three four five six seven eight nine ten");
|
||||
index.add(2, "four two zero one three ten five seven eight six nine");
|
||||
|
||||
expect(index.search("1")).to.eql([0]);
|
||||
expect(index.search("one")).to.eql([1, 2]);
|
||||
expect(index.search("one two")).to.eql([2]); // 1: no bi-directional
|
||||
expect(index.search("four one")).to.eql([1]); // 2: no bi-directional
|
||||
|
||||
index = new Index({
|
||||
tokenize: "strict",
|
||||
context: {
|
||||
depth: 3,
|
||||
bidirectional: true
|
||||
}
|
||||
});
|
||||
|
||||
index.add(0, "1 2 3 2 4 1 5 3");
|
||||
index.add(1, "zero one two three four five six seven eight nine ten");
|
||||
index.add(2, "five two zero one three four ten seven eight six nine");
|
||||
|
||||
expect(index.search("1 3 4")).to.eql([0]);
|
||||
expect(index.search("1 5 3 4")).to.eql([0]);
|
||||
expect(index.search("1 3 4 7")).to.have.lengthOf(0);
|
||||
expect(index.search("one")).to.eql([1, 2]);
|
||||
expect(index.search("one three")).to.eql([1, 2]);
|
||||
expect(index.search("three one")).to.eql([1, 2]);
|
||||
expect(index.search("zero five one ten")).to.eql([2]);
|
||||
expect(index.search("zero two one three two five")).to.eql([1]);
|
||||
expect(index.search("one zero two one zero three")).to.eql([1, 2]);
|
||||
// todo context chain
|
||||
//expect(index.search("zero two one three two five")).to.eql([1, 2]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Suggestions", function(){
|
||||
|
||||
it("Should have been suggested properly by relevance", function(){
|
||||
|
||||
let index = new Index({ tokenize: "strict" });
|
||||
|
||||
index.add(0, "1 2 3 2 4 1 5 3");
|
||||
index.add(1, "zero one two three four five six seven eight nine ten");
|
||||
index.add(2, "four two zero one three ten five seven eight six nine");
|
||||
|
||||
expect(index.search("1 3 4 7", { suggest: false })).to.have.lengthOf(0);
|
||||
expect(index.search("1 3 4 7", { suggest: true })).to.eql([0]);
|
||||
expect(index.search("1 3 9 7", { suggest: true })).to.eql([0]);
|
||||
|
||||
expect(index.search("foobar one two", { suggest: true })).to.eql([1, 2]);
|
||||
expect(index.search("foobar one four", { suggest: true })).to.eql([2, 1]);
|
||||
expect(index.search("one foobar two", { suggest: true })).to.eql([1, 2]);
|
||||
expect(index.search("one two foobar", { suggest: true })).to.eql([1, 2]);
|
||||
expect(index.search("zero one foobar two foobar", { suggest: true })).to.eql([1, 2]);
|
||||
});
|
||||
|
||||
it("Should have been suggested properly by context", function(){
|
||||
|
||||
let index = new Index({
|
||||
tokenize: "strict",
|
||||
context: {
|
||||
depth: 3,
|
||||
bidirectional: true
|
||||
}
|
||||
});
|
||||
|
||||
index.add(1, "zero one two three four five six seven eight nine ten");
|
||||
index.add(2, "four two zero one three ten five seven eight six nine");
|
||||
|
||||
expect(index.search("foobar one", { suggest: true })).to.eql([1, 2]);
|
||||
expect(index.search("foobar two", { suggest: true })).to.eql([2, 1]);
|
||||
expect(index.search("foobar foobar foobar one foobar two foobar foobar", { suggest: true })).to.eql([1, 2]);
|
||||
expect(index.search("foobar foobar foobar two foobar one foobar foobar", { suggest: true })).to.eql([1, 2]);
|
||||
expect(index.search("foobar one two", { suggest: true })).to.eql([1, 2]);
|
||||
expect(index.search("one foobar two", { suggest: true })).to.eql([1, 2]);
|
||||
expect(index.search("one two foobar", { suggest: true })).to.eql([1, 2]);
|
||||
expect(index.search("foobar one foobar two foobar", { suggest: true })).to.eql([1, 2]);
|
||||
expect(index.search("zero one foobar two foobar", { suggest: true })).to.eql([1, 2]);
|
||||
});
|
||||
});
|
11
test/serialize.js
Normal file
11
test/serialize.js
Normal file
@@ -0,0 +1,11 @@
|
||||
global.self = global;
|
||||
const env = process.argv[3];
|
||||
import { expect } from "chai";
|
||||
let FlexSearch = await import(env ? "../dist/" + env + ".js" : "../src/bundle.js");
|
||||
if(FlexSearch.default) FlexSearch = FlexSearch.default;
|
||||
if(FlexSearch.FlexSearch) FlexSearch = FlexSearch.FlexSearch;
|
||||
const { Index, Document, Worker, Charset: _Charset, Encoder, Resolver } = FlexSearch;
|
||||
const build_light = env && env.includes(".light");
|
||||
const build_compact = env && env.includes(".compact");
|
||||
const build_esm = !env || env.startsWith("module");
|
||||
const Charset = _Charset || (await import("../src/charset.js")).default;
|
57
test/tokenize.js
Normal file
57
test/tokenize.js
Normal file
@@ -0,0 +1,57 @@
|
||||
global.self = global;
|
||||
const env = process.argv[3];
|
||||
import { expect } from "chai";
|
||||
let FlexSearch = await import(env ? "../dist/" + env + ".js" : "../src/bundle.js");
|
||||
if(FlexSearch.default) FlexSearch = FlexSearch.default;
|
||||
if(FlexSearch.FlexSearch) FlexSearch = FlexSearch.FlexSearch;
|
||||
const { Index, Document, Worker, Charset: _Charset, Encoder, Resolver } = FlexSearch;
|
||||
const build_light = env && env.includes(".light");
|
||||
const build_compact = env && env.includes(".compact");
|
||||
const build_esm = !env || env.startsWith("module");
|
||||
const Charset = _Charset || (await import("../src/charset.js")).default;
|
||||
|
||||
describe("Tokenizer", function(){
|
||||
|
||||
it("Should have been added properly to the index: Strict", function(){
|
||||
|
||||
let index = new Index(/*{ tokenize: "strict" }*/);
|
||||
index.add(0, "björn phillipp mayer");
|
||||
|
||||
expect(index.search("björn phillipp")).to.include(0);
|
||||
expect(index.search("björn mayer")).to.include(0);
|
||||
|
||||
index = new Index({ tokenize: "strict" });
|
||||
index.add(0, "björn phillipp mayer");
|
||||
|
||||
expect(index.search("björn phillipp")).to.include(0);
|
||||
expect(index.search("björn mayer")).to.include(0);
|
||||
});
|
||||
|
||||
it("Should have been added properly to the index: Forward", function(){
|
||||
|
||||
let index = new Index({ tokenize: "forward" });
|
||||
index.add(0, "björn phillipp mayer");
|
||||
|
||||
expect(index.search("bjö phil may")).to.have.lengthOf(1);
|
||||
expect(index.search("bjö phil may")).to.include(0);
|
||||
});
|
||||
|
||||
it("Should have been added properly to the index: Reverse", function(){
|
||||
|
||||
let index = new Index({ tokenize: "reverse" });
|
||||
index.add(0, "björn phillipp mayer");
|
||||
|
||||
expect(index.search("jörn phil er")).to.have.lengthOf(1);
|
||||
expect(index.search("jörn lipp er")).to.have.lengthOf(1);
|
||||
expect(index.search("jörn lipp er")).to.include(0);
|
||||
});
|
||||
|
||||
it("Should have been added properly to the index: Full", function(){
|
||||
|
||||
let index = new Index({ tokenize: "full" });
|
||||
index.add(0, "björn phillipp mayer");
|
||||
|
||||
expect(index.search("jör illi may")).to.have.lengthOf(1);
|
||||
expect(index.search("jör illi may")).to.include(0);
|
||||
});
|
||||
});
|
102
test/worker.js
Normal file
102
test/worker.js
Normal file
@@ -0,0 +1,102 @@
|
||||
global.self = global;
|
||||
const env = process.argv[3];
|
||||
import { expect } from "chai";
|
||||
let FlexSearch = await import(env ? "../dist/" + env + ".js" : "../src/bundle.js");
|
||||
if(FlexSearch.default) FlexSearch = FlexSearch.default;
|
||||
if(FlexSearch.FlexSearch) FlexSearch = FlexSearch.FlexSearch;
|
||||
const { Index, Document, Worker: WorkerIndex, Charset: _Charset, Encoder, Resolver } = FlexSearch;
|
||||
const build_light = env && env.includes(".light");
|
||||
const build_compact = env && env.includes(".compact");
|
||||
const build_esm = !env || env.startsWith("module");
|
||||
const Charset = _Charset || (await import("../src/charset.js")).default;
|
||||
|
||||
if(!build_light && !build_compact) describe("Worker", function(){
|
||||
|
||||
let index;
|
||||
|
||||
afterEach(function() {
|
||||
index && index.worker.terminate();
|
||||
});
|
||||
|
||||
it("Should have the proper basic functionality", async function(){
|
||||
|
||||
index = await new WorkerIndex({
|
||||
encoder: "LatinAdvanced",
|
||||
tokenize: "forward"
|
||||
});
|
||||
|
||||
const data = [
|
||||
'cats abcd efgh ijkl mnop qrst uvwx cute',
|
||||
'cats abcd efgh ijkl mnop dogs cute',
|
||||
'cats abcd efgh ijkl mnop cute',
|
||||
'cats abcd efgh ijkl cute',
|
||||
'cats abcd efgh cute',
|
||||
'cats abcd cute',
|
||||
'cats cute'
|
||||
];
|
||||
|
||||
for(let i = 0; i < data.length; i++){
|
||||
await index.addAsync(i, data[i]);
|
||||
}
|
||||
|
||||
expect(index.reg).to.be.undefined;
|
||||
expect(index.map).to.be.undefined;
|
||||
|
||||
let result = await index.search("cat cute");
|
||||
expect(result).to.eql([6, 5, 4, 3, 2, 1, 0]);
|
||||
|
||||
result = await index.search("cute cat");
|
||||
expect(result).to.eql([6, 5, 4, 3, 2, 1, 0]);
|
||||
|
||||
result = await index.search("cudi tok-kat");
|
||||
expect(result).to.eql([1]);
|
||||
});
|
||||
|
||||
it("Should update the index contents properly", async function(){
|
||||
|
||||
index = await new WorkerIndex({ tokenize: "full" });
|
||||
|
||||
await index.add(1, "foo");
|
||||
await index.add(2, "bar");
|
||||
await index.add(3, "foobar");
|
||||
|
||||
await index.update(1, "bar");
|
||||
await index.update(2, "foobar");
|
||||
await index.update(3, "foo");
|
||||
|
||||
expect(await index.search("foo")).to.have.members([2, 3]);
|
||||
expect(await index.search("bar")).to.have.members([1, 2]);
|
||||
expect(await index.search("bar")).to.not.include(3);
|
||||
expect(await index.search("foobar")).to.have.members([2]);
|
||||
expect(await index.search("oba")).to.have.members([2]);
|
||||
|
||||
await index.update(1, "bar");
|
||||
await index.update(2, "foobar");
|
||||
await index.update(3, "foo");
|
||||
|
||||
expect(await index.search("foo")).to.have.members([2, 3]);
|
||||
expect(await index.search("bar")).to.have.members([1, 2]);
|
||||
expect(await index.search("bar")).to.not.include(3);
|
||||
expect(await index.search("foobar")).to.have.members([2]);
|
||||
expect(await index.search("oba")).to.have.members([2]);
|
||||
});
|
||||
|
||||
it("Should have been removed from the index", async function(){
|
||||
|
||||
index = await new WorkerIndex({ tokenize: "full" });
|
||||
await index.add(1, "bar");
|
||||
await index.add(2, "foobar");
|
||||
await index.add(3, "foo");
|
||||
|
||||
await index.remove(2);
|
||||
await index.remove(1);
|
||||
await index.remove(3);
|
||||
await index.remove(4);
|
||||
|
||||
expect(await index.search("foo")).to.have.lengthOf(0);
|
||||
expect(await index.search("bar")).to.have.lengthOf(0);
|
||||
expect(await index.search("foobar")).to.have.lengthOf(0);
|
||||
});
|
||||
});
|
||||
|
||||
|
Reference in New Issue
Block a user