Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
J
jio
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Nicolas Wavrant
jio
Commits
4c418987
Commit
4c418987
authored
Jun 10, 2013
by
Tristan Cavelier
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
indexstorage.js: reworked to manage queries, increase speed and to fix some bug
parent
c65d2469
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
867 additions
and
1265 deletions
+867
-1265
src/jio.storage/indexstorage.js
src/jio.storage/indexstorage.js
+598
-856
test/jiotests.js
test/jiotests.js
+269
-409
No files found.
src/jio.storage/indexstorage.js
View file @
4c418987
/*
* Copyright 2013, Nexedi SA
* Released under the LGPL license.
* http://www.gnu.org/licenses/lgpl.html
*/
/*jslint indent: 2, maxlen: 80, sloppy: true, nomen: true */
/*global jIO: true, localStorage: true, setTimeout: true */
* Copyright 2013, Nexedi SA
* Released under the LGPL license.
* http://www.gnu.org/licenses/lgpl.html
*/
/*jslint indent: 2, maxlen: 80, sloppy: true, nomen: true, regexp: true */
/*global jIO: true, localStorage: true, define: true, complex_queries: true */
/**
* JIO Index Storage.
* Manages indexes for specified storages.
* Description:
* {
* "type": "index",
* "indices": [
* {"indexA",["field_A"]},
* {"indexAB",["field_A","field_B"]}
* ],
* "field_types": {
* "field_A": "dateTime",
* "field_B": "string"
* "indices": [{
* "id": "index_title_subject.json", // doc id where to store indices
* "index": ["title", "subject"] // metadata to index
* "sub_storage": <sub storage where to store index>
* (default equal to parent sub_storage field)
* }, {
* "id": "index_year.json",
* "index": "year"
* ...
* }],
* "sub_storage": <sub storage description>
* }
*
* Sent document metadata will be:
* index_titre_subject.json
* {
* "_id": "index_title_subject.json",
* "indexing": ["title", "subject"],
* "free": [0],
* "location": {
* "foo": 1,
* "bar": 2,
* ...
* },
* "storage": [
* <sub storage description>,
* "database": [
* {},
* {"_id": "foo", "title": "...", "subject": ...},
* {"_id": "bar", "title": "...", "subject": ...},
* ...
* ]
* }
* Index file will contain
*
* index_year.json
* {
* "_id": "app-name_indices.json",
* "indexA":
* "fieldA": {
* "keyword_abc": ["some_id","some_other_id",...]
* }
* },
* "indexAB": {
* "fieldA": {
* "keyword_abc": ["some_id"]
* "_id": "index_year.json",
* "indexing": ["year"],
* "free": [1],
* "location": {
* "foo": 0,
* "bar": 2,
* ...
* },
* "fieldB": {
* "keyword_def": ["some_id"]
* }
* }
* "database": [
* {"_id": "foo", "year": "..."},
* {},
* {"_id": "bar", "year": "..."},
* ...
* ]
* }
* NOTES:
* It may be difficult to "un-sort" multi-field indices, like
* indexAB, because all keywords will be listed regrardless
* of underlying field, so an index on author and year would produce
* two entries per record like:
*
*
"William Shakespeare":["id_Romeo_and_Juliet", "id_Othello"],
*
"1591":["id_Romeo_and_Juliet"],
*
"1603":["id_Othello"]
*
A put document will be indexed to the free location if exist, else it will be
*
indexed at the end of the database. The document id will be indexed, also, in
*
'location' to quickly replace metadata.
*
* So for direct lookups, this should be convient, but for other types
* of queries, it depends
* Only one or two loops are executed:
* - one to filter retrieved document list (no query -> no loop)
* - one to format the result to a JIO response
*/
jIO
.
addStorageType
(
'
indexed
'
,
function
(
spec
,
my
)
{
(
function
()
{
"
use strict
"
;
var
that
,
priv
=
{};
spec
=
spec
||
{};
that
=
my
.
basicStorage
(
spec
,
my
);
priv
.
indices
=
spec
.
indices
;
priv
.
field_types
=
spec
.
field_types
;
priv
.
substorage_key
=
"
sub_storage
"
;
priv
.
substorage
=
spec
[
priv
.
substorage_key
];
priv
.
index_indicator
=
spec
.
sub_storage
.
application_name
||
"
index
"
;
priv
.
index_suffix
=
priv
.
index_indicator
+
"
_indices.json
"
;
my
.
env
=
my
.
env
||
spec
.
env
||
{};
var
error_dict
=
{
"
Corrupted Index
"
:
{
"
status
"
:
24
,
"
statusText
"
:
"
Corrupt
"
,
"
error
"
:
"
corrupt
"
,
"
reason
"
:
"
corrupted index database
"
},
"
Corrupted Metadata
"
:
{
"
status
"
:
24
,
"
statusText
"
:
"
Corrupt
"
,
"
error
"
:
"
corrupt
"
,
"
reason
"
:
"
corrupted document
"
},
"
Not Found
"
:
{
"
status
"
:
404
,
"
statusText
"
:
"
Not Found
"
,
"
error
"
:
"
not_found
"
,
"
reason
"
:
"
missing document
"
},
"
Conflict
"
:
{
"
status
"
:
409
,
"
statusText
"
:
"
Conflicts
"
,
"
error
"
:
"
conflicts
"
,
"
reason
"
:
"
already exist
"
}
};
that
.
specToStore
=
function
()
{
var
o
=
{};
o
[
priv
.
substorage_key
]
=
priv
.
substorage
;
o
.
env
=
my
.
env
;
return
o
;
/**
* Generate a JIO Error Object
*
* @method generateErrorObject
* @param {String} name The error name
* @param {String} message The error message
* @param {String} [reason] The error reason
* @return {Object} A jIO error object
*/
function
generateErrorObject
(
name
,
message
,
reason
)
{
if
(
!
error_dict
[
name
])
{
return
{
"
status
"
:
0
,
"
statusText
"
:
"
Unknown
"
,
"
error
"
:
"
unknown
"
,
"
message
"
:
message
,
"
reason
"
:
reason
||
"
unknown
"
};
}
return
{
"
status
"
:
error_dict
[
name
].
status
,
"
statusText
"
:
error_dict
[
name
].
statusText
,
"
error
"
:
error_dict
[
name
].
error
,
"
message
"
:
message
,
"
reason
"
:
reason
||
error_dict
[
name
].
reason
};
}
/**
* Get the real type of an object
* @method type
* @param {Any} value The value to check
* @return {String} The value type
*/
function
type
(
value
)
{
// returns "String", "Object", "Array", "RegExp", ...
return
(
/^
\[
object
([
a-zA-Z
]
+
)\]
$/
).
exec
(
Object
.
prototype
.
toString
.
call
(
value
)
)[
1
];
}
/**
* Generate a new uuid
* @method generateUuid
* @return {string} The new uuid
*/
priv
.
generateUuid
=
function
()
{
function
generateUuid
()
{
var
S4
=
function
()
{
var
i
,
string
=
Math
.
floor
(
Math
.
random
()
*
0x10000
/* 65536 */
...
...
@@ -98,610 +162,428 @@ jIO.addStorageType('indexed', function (spec, my) {
S4
()
+
"
-
"
+
S4
()
+
"
-
"
+
S4
()
+
S4
()
+
S4
();
}
;
}
/**
* Get number of elements in object
* @method getObjectSize
* @param {object} obj The object to check
* @return {number} size The amount of elements in the object
* A JSON Index manipulator
*
* @class JSONIndex
* @constructor
*/
priv
.
getObjectSize
=
function
(
obj
)
{
var
size
=
0
,
key
;
for
(
key
in
obj
)
{
if
(
obj
.
hasOwnProperty
(
key
))
{
size
+=
1
;
}
}
return
size
;
};
function
JSONIndex
(
spec
)
{
var
that
=
this
;
spec
=
spec
||
{};
/**
* Creates an empty indices array
* @method createEmptyIndexArray
* @param {array} indices An array of indices (optional)
* @return {object} The new index array
* The document id
*
* @property _id
* @type String
*/
priv
.
createEmptyIndexArray
=
function
(
indices
)
{
var
i
,
k
,
j
=
priv
.
indices
.
length
,
new_index
,
new_index_object
=
{},
new_index_name
,
new_index_fields
;
that
.
_id
=
spec
.
_id
;
if
(
indices
===
undefined
)
{
for
(
i
=
0
;
i
<
j
;
i
+=
1
)
{
new_index
=
priv
.
indices
[
i
];
new_index_name
=
new_index
.
name
;
new_index_fields
=
new_index
.
fields
;
new_index_object
[
new_index_name
]
=
{};
/**
* The array with metadata key to index
*
* @property _indexing
* @type Array
*/
that
.
_indexing
=
spec
.
indexing
||
[];
// loop index fields and add objects to hold value/id pairs
for
(
k
=
0
;
k
<
new_index_fields
.
length
;
k
+=
1
)
{
new_index_object
[
new_index_name
][
new_index_fields
[
k
]]
=
{};
}
}
}
return
new_index_object
;
};
/**
* The array of free location index
*
* @property _free
* @type Array
* @default []
*/
that
.
_free
=
spec
.
free
||
[];
/**
* The dictionnary document id -> database index
*
* @property _location
* @type Object
* @default {}
*/
that
.
_location
=
spec
.
location
||
{};
/**
* Determine if a key/value pair exists in an object by VALUE
* @method searchObjectByValue
* @param {object} indexToSearch The index to search
* @param {string} docid The document id to find
* @param {string} passback The value that should be returned
* @return {boolean} true/false
* The database array containing document metadata
*
* @property _database
* @type Array
* @default []
*/
priv
.
searchIndexByValue
=
function
(
indexToSearch
,
docid
,
passback
)
{
var
key
,
obj
,
prop
;
that
.
_database
=
spec
.
database
||
[];
for
(
key
in
indexToSearch
)
{
if
(
indexToSearch
.
hasOwnProperty
(
key
))
{
obj
=
indexToSearch
[
key
];
for
(
prop
in
obj
)
{
if
(
obj
[
prop
]
===
docid
)
{
return
passback
===
"
bool
"
?
true
:
key
;
}
/**
* Adds a metadata object in the database, replace if already exist
*
* @method put
* @param {Object} meta The metadata to add
* @return {Boolean} true if added, false otherwise
*/
that
.
put
=
function
(
meta
)
{
var
underscored_meta_re
=
/^_.*$/
,
k
,
needed_meta
=
{},
ok
=
false
;
if
(
typeof
meta
.
_id
!==
"
string
"
&&
meta
.
_id
!==
""
)
{
throw
new
TypeError
(
"
Corrupted Metadata
"
);
}
for
(
k
in
meta
)
{
if
(
meta
.
hasOwnProperty
(
k
))
{
if
(
underscored_meta_re
.
test
(
k
))
{
needed_meta
[
k
]
=
meta
[
k
];
}
else
if
(
that
.
_indexing_object
[
k
])
{
needed_meta
[
k
]
=
meta
[
k
];
ok
=
true
;
}
}
}
if
(
ok
)
{
if
(
typeof
that
.
_location
[
meta
.
_id
]
===
"
number
"
)
{
that
.
_database
[
that
.
_location
[
meta
.
_id
]]
=
needed_meta
;
}
else
if
(
that
.
_free
.
length
>
0
)
{
k
=
that
.
_free
.
shift
();
that
.
_database
[
k
]
=
needed_meta
;
that
.
_location
[
meta
.
_id
]
=
k
;
}
else
{
that
.
_database
.
push
(
needed_meta
);
that
.
_location
[
meta
.
_id
]
=
that
.
_database
.
length
-
1
;
}
return
true
;
}
if
(
typeof
that
.
_location
[
meta
.
_id
]
===
"
number
"
)
{
that
.
remove
(
meta
);
}
return
false
;
};
/**
* Get element position in array
* @method getPositionInArray
* @param {object} indices The index file
* @param {object} indices The index file
* @returns {number} i Position of element in array
* Removes a metadata object from the database if exist
*
* @method remove
* @param {Object} meta The metadata to remove
*/
priv
.
getPositionInArray
=
function
(
element
,
array
)
{
var
i
,
l
=
array
.
length
;
for
(
i
=
0
;
i
<
l
;
i
+=
1
)
{
if
(
array
[
i
]
===
element
)
{
return
i
;
that
.
remove
=
function
(
meta
)
{
if
(
typeof
meta
.
_id
!==
"
string
"
)
{
throw
new
TypeError
(
"
Corrupted Metadata
"
);
}
if
(
typeof
that
.
_location
[
meta
.
_id
]
!==
"
number
"
)
{
throw
new
ReferenceError
(
"
Not Found
"
);
}
return
null
;
that
.
_database
[
that
.
_location
[
meta
.
_id
]]
=
null
;
that
.
_free
.
push
(
that
.
_location
[
meta
.
_id
]);
delete
that
.
_location
[
meta
.
_id
];
};
/**
* Find id in indices
* @method isDocidInIndex
* @param {object} indices The file containing the indeces
* @param {object} doc The document which should be added to the index
* @return {boolean} true/false
* Checks if the index document is correct
*
* @method check
*/
priv
.
isDocidInIndex
=
function
(
indices
,
doc
)
{
var
index
,
i
,
j
,
label
,
l
=
priv
.
indices
.
length
;
// loop indices
for
(
i
=
0
;
i
<
l
;
i
+=
1
)
{
index
=
{};
index
.
reference
=
priv
.
indices
[
i
];
index
.
reference_size
=
index
.
reference
.
fields
.
length
;
index
.
current
=
indices
[
index
.
reference
.
name
];
for
(
j
=
0
;
j
<
index
.
reference_size
;
j
+=
1
)
{
label
=
index
.
reference
.
fields
[
j
];
index
.
current_size
=
priv
.
getObjectSize
(
index
.
current
[
label
]);
// check for existing entries to remove (put-update)
if
(
index
.
current_size
>
0
)
{
if
(
priv
.
searchIndexByValue
(
index
.
current
[
label
],
doc
.
_id
,
"
bool
"
))
{
return
true
;
}
that
.
check
=
function
()
{
var
id
,
database_meta
;
if
(
typeof
that
.
_id
!==
"
string
"
||
that
.
_id
===
""
||
type
(
that
.
_free
)
!==
"
Array
"
||
type
(
that
.
_indexing
)
!==
"
Array
"
||
type
(
that
.
_location
)
!==
"
Object
"
||
type
(
that
.
_database
)
!==
"
Array
"
||
that
.
_indexing
.
length
===
0
)
{
throw
new
TypeError
(
"
Corrupted Index
"
);
}
for
(
id
in
that
.
_location
)
{
if
(
that
.
_location
.
hasOwnProperty
(
id
))
{
database_meta
=
that
.
_database
[
that
.
_location
[
id
]];
if
(
type
(
database_meta
)
!==
"
Object
"
||
database_meta
.
_id
!==
id
)
{
throw
new
TypeError
(
"
Corrupted Index
"
);
}
}
}
return
false
;
};
/**
* Clean up indexes when removing a file
* @method cleanIndices
* @param {object} indices The file containing the indeces
* @param {object} doc The document which should be added to the index
* @return {object} indices The cleaned up file
* Recreates database indices and remove free space
*
* @method repair
*/
priv
.
cleanIndices
=
function
(
indices
,
doc
)
{
var
i
,
j
,
k
,
index
,
key
,
label
,
l
=
priv
.
indices
.
length
;
// loop indices (indexA, indexAB...)
for
(
i
=
0
;
i
<
l
;
i
+=
1
)
{
index
=
{};
index
.
reference
=
priv
.
indices
[
i
];
index
.
reference_size
=
index
.
reference
.
fields
.
length
;
index
.
current
=
indices
[
index
.
reference
.
name
];
// loop index fields
for
(
j
=
0
;
j
<
index
.
reference_size
;
j
+=
1
)
{
label
=
index
.
reference
.
fields
[
j
];
index
.
current_size
=
priv
.
getObjectSize
(
index
.
current
[
label
]);
// loop field entries
for
(
k
=
0
;
k
<
index
.
current_size
;
k
+=
1
)
{
key
=
priv
.
searchIndexByValue
(
index
.
current
[
label
],
doc
.
_id
,
"
key
"
);
index
.
result_array
=
index
.
current
[
label
][
key
];
if
(
!!
key
)
{
// if there is more than one docid in the result array,
// just remove this one and not the whole array
if
(
index
.
result_array
.
length
>
1
)
{
index
.
result_array
.
splice
(
k
,
1
);
that
.
repair
=
function
()
{
var
i
=
0
,
meta
;
that
.
_free
=
[];
that
.
_location
=
{};
if
(
type
(
that
.
_database
)
!==
"
Array
"
)
{
that
.
_database
=
[];
}
while
(
i
<
that
.
_database
.
length
)
{
meta
=
that
.
_database
[
i
];
if
(
type
(
meta
)
===
"
Object
"
&&
typeof
meta
.
_id
===
"
string
"
&&
meta
.
_id
!==
""
&&
!
that
.
_location
[
meta
.
_id
])
{
that
.
_location
[
meta
.
_id
]
=
i
;
i
+=
1
;
}
else
{
delete
index
.
current
[
label
][
key
];
}
}
}
that
.
_database
.
splice
(
i
,
1
);
}
}
return
indices
;
};
/**
* Adds entries to indices
* @method createEmptyIndexArray
* @param {object} indices The file containing the indeces
* @param {object} doc The document which should be added to the index
* Returns the serialized version of this object (not cloned)
*
* @method serialized
* @return {Object} The serialized version
*/
priv
.
updateIndices
=
function
(
indices
,
doc
)
{
var
i
,
j
,
index
,
value
,
label
,
key
,
l
=
priv
.
indices
.
length
;
// loop indices
for
(
i
=
0
;
i
<
l
;
i
+=
1
)
{
index
=
{};
index
.
reference
=
priv
.
indices
[
i
];
index
.
reference_size
=
index
.
reference
.
fields
.
length
;
index
.
current
=
indices
[
index
.
reference
.
name
];
// build array of values to create entries in index
for
(
j
=
0
;
j
<
index
.
reference_size
;
j
+=
1
)
{
label
=
index
.
reference
.
fields
[
j
];
value
=
doc
[
label
];
if
(
value
!==
undefined
)
{
index
.
current_size
=
priv
.
getObjectSize
(
index
.
current
[
label
]);
// check for existing entries to remove (put-update)
if
(
index
.
current_size
>
0
)
{
key
=
priv
.
searchIndexByValue
(
index
.
current
[
label
],
doc
.
_id
,
"
key
"
);
if
(
!!
key
)
{
delete
index
.
current
[
label
][
key
];
}
}
if
(
index
.
current
[
label
][
value
]
===
undefined
)
{
index
.
current
[
label
][
value
]
=
[];
}
// add a new entry
index
.
current
[
label
][
value
].
push
(
doc
.
_id
);
}
}
}
return
indices
;
that
.
serialized
=
function
()
{
return
{
"
_id
"
:
that
.
_id
,
"
indexing
"
:
that
.
_indexing
,
"
free
"
:
that
.
_free
,
"
location
"
:
that
.
_location
,
"
database
"
:
that
.
_database
};
};
that
.
check
();
that
.
_indexing_object
=
{};
that
.
_indexing
.
forEach
(
function
(
meta_key
)
{
that
.
_indexing_object
[
meta_key
]
=
true
;
});
}
/**
* Check available indices to find the best one.
* TODOS: NOT NICE, redo
* @method findBestIndexForQuery
* @param {object} syntax of query
* @returns {object} response The query object constructed from Index file
* The JIO index storage constructor
*/
priv
.
findBestIndexForQuery
=
function
(
syntax
)
{
var
i
,
j
,
k
,
l
,
n
,
p
,
o
,
element
,
key
,
block
,
search_ids
,
use_index
=
[],
select_ids
=
{},
index
,
query_param
,
current_query
,
current_query_size
;
// try to parse into object
if
(
syntax
.
query
!==
undefined
)
{
current_query
=
jIO
.
ComplexQueries
.
parse
(
syntax
.
query
);
}
else
{
current_query
=
{};
current_query_size
=
0
;
}
function
indexStorage
(
spec
,
my
)
{
var
that
,
priv
=
{};
// loop indices
for
(
i
=
0
;
i
<
priv
.
indices
.
length
;
i
+=
1
)
{
search_ids
=
[];
block
=
false
;
index
=
{};
index
.
reference
=
priv
.
indices
[
i
];
index
.
reference_size
=
index
.
reference
.
fields
.
length
;
if
(
current_query_size
!==
0
)
{
// rebuild search_ids for iteration
if
(
current_query
.
query_list
===
undefined
)
{
search_ids
.
push
(
current_query
.
id
);
}
else
{
for
(
j
=
0
;
j
<
current_query
.
query_list
.
length
;
j
+=
1
)
{
if
(
priv
.
getPositionInArray
(
current_query
.
query_list
[
j
].
id
,
search_ids
)
===
null
)
{
search_ids
.
push
(
current_query
.
query_list
[
j
].
id
);
}
}
}
that
=
my
.
basicStorage
(
spec
,
my
);
// loop search ids and find matches in index
for
(
k
=
0
;
k
<
search_ids
.
length
;
k
+=
1
)
{
query_param
=
search_ids
[
0
];
for
(
l
=
0
;
l
<
index
.
reference_size
;
l
+=
1
)
{
if
(
query_param
===
index
.
reference
.
fields
[
l
])
{
search_ids
.
splice
(
priv
.
getPositionInArray
(
query_param
,
search_ids
),
1
);
}
}
}
}
priv
.
indices
=
spec
.
indices
;
priv
.
sub_storage
=
spec
.
sub_storage
;
// rebuild select_ids
for
(
o
=
0
;
o
<
syntax
.
filter
.
select_list
.
length
;
o
+=
1
)
{
element
=
syntax
.
filter
.
select_list
[
o
];
select_ids
[
element
]
=
true
;
}
// Overrides
// search_ids empty = all needed search fields found on index
if
(
search_ids
.
length
===
0
)
{
p
=
priv
.
getObjectSize
(
select_ids
);
if
(
p
===
0
)
{
use_index
.
push
({
"
name
"
:
index
.
reference
.
name
,
"
search
"
:
true
,
"
results
"
:
false
});
}
else
{
for
(
n
=
0
;
n
<
index
.
reference_size
;
n
+=
1
)
{
delete
select_ids
[
index
.
reference
.
fields
[
n
]];
}
for
(
key
in
select_ids
)
{
if
(
select_ids
.
hasOwnProperty
(
key
))
{
use_index
.
push
({
"
name
"
:
index
.
reference
.
name
,
"
search
"
:
true
,
"
results
"
:
false
});
block
=
true
;
}
}
if
(
block
===
false
)
{
use_index
.
push
({
"
name
"
:
index
.
reference
.
name
,
"
search
"
:
true
,
"
results
"
:
true
});
}
}
}
}
return
use_index
;
that
.
specToStore
=
function
()
{
return
{
"
indices
"
:
priv
.
indices
,
"
sub_storage
"
:
priv
.
sub_storage
};
};
/**
* Converts the indices file into an object usable by complex queries
* @method constructQueryObject
* @param {object} indices The index file
* @returns {object} response The query object constructed from Index file
* Return the similarity percentage (1 >= p >= 0) between two index lists.
*
* @method similarityPercentage
* @param {Array} list_a An index list
* @param {Array} list_b Another index list
* @return {Number} The similarity percentage
*/
priv
.
constructQueryObject
=
function
(
indices
,
query_syntax
)
{
var
j
,
k
,
l
,
m
,
n
,
use_index
,
index
,
index_name
,
field_names
,
field
,
key
,
element
,
query_index
,
query_object
=
[],
field_name
,
entry
;
// returns index-to-use|can-do-query|can-do-query-and-results
use_index
=
priv
.
findBestIndexForQuery
(
query_syntax
);
if
(
use_index
.
length
>
0
)
{
for
(
j
=
0
;
j
<
use_index
.
length
;
j
+=
1
)
{
index
=
use_index
[
j
];
// NOTED: the index could be used to:
// (a) get all document ids matching query
// (b) get all document ids and results (= run complex query on index)
// right now, only (b) is supported, because the complex query is
// a single step process. If it was possible to first get the
// relevant document ids, then get the results, the index could be
// used to do the first step plus use GET on the returned documents
if
(
index
.
search
&&
index
.
results
)
{
index_name
=
use_index
[
j
].
name
;
query_index
=
indices
[
index_name
];
// get fieldnames from this index
for
(
k
=
0
;
k
<
priv
.
indices
.
length
;
k
+=
1
)
{
if
(
priv
.
indices
[
k
].
name
===
use_index
[
j
].
name
)
{
field_names
=
priv
.
indices
[
k
].
fields
;
}
}
for
(
l
=
0
;
l
<
field_names
.
length
;
l
+=
1
)
{
field_name
=
field_names
[
l
];
// loop entries for this field name
field
=
query_index
[
field_name
];
for
(
key
in
field
)
{
if
(
field
.
hasOwnProperty
(
key
))
{
element
=
field
[
key
];
// key can be "string" or "number" right now
if
(
priv
.
field_types
[
field_name
]
===
"
number
"
)
{
key
=
+
key
;
}
for
(
m
=
0
;
m
<
element
.
length
;
m
+=
1
)
{
if
(
priv
.
searchIndexByValue
(
query_object
,
element
[
m
],
"
bool
"
))
{
// loop object
for
(
n
=
0
;
n
<
query_object
.
length
;
n
+=
1
)
{
entry
=
query_object
[
n
];
if
(
entry
.
id
===
element
[
m
])
{
entry
[
field_name
]
=
key
;
}
}
}
else
{
entry
=
{};
entry
.
id
=
element
[
m
];
entry
[
field_name
]
=
key
;
query_object
.
push
(
entry
);
}
}
}
priv
.
similarityPercentage
=
function
(
list_a
,
list_b
)
{
var
ai
,
bi
,
count
=
0
;
for
(
ai
=
0
;
ai
<
list_a
.
length
;
ai
+=
1
)
{
for
(
bi
=
0
;
bi
<
list_b
.
length
;
bi
+=
1
)
{
if
(
list_a
[
ai
]
===
list_b
[
bi
])
{
count
+=
1
;
}
}
}
return
count
/
(
list_a
.
length
>
list_b
.
length
?
list_a
.
length
:
list_b
.
length
);
};
/**
* Select the good index to use according to a select list.
*
* @method selectIndex
* @param {Array} select_list An array of strings
* @return {Number} The index index
*/
priv
.
selectIndex
=
function
(
select_list
)
{
var
i
,
tmp
,
selector
=
{
"
index
"
:
0
,
"
similarity
"
:
0
};
for
(
i
=
0
;
i
<
priv
.
indices
.
length
;
i
+=
1
)
{
tmp
=
priv
.
similarityPercentage
(
select_list
,
priv
.
indices
[
i
].
index
);
if
(
tmp
>
selector
.
similarity
)
{
selector
.
index
=
i
;
selector
.
similarity
=
tmp
;
}
}
return
query_object
;
return
selector
.
index
;
};
/**
* Build the alldocs response from the index file (overriding substorage)
* @method allDocsResponseFromIndex
* @param {object} command The JIO command
* @param {boolean} include_docs Whether to also supply the document
* @param {object} option The options set for this method
* @returns {object} response The allDocs response
* Get a database
*
* @method getIndexDatabase
* @param {Object} option The command option
* @param {Number} number The location in priv.indices
* @param {Function} callback The callback
*/
priv
.
allDocsResponseFromIndex
=
function
(
indices
,
include_docs
,
option
)
{
var
i
,
j
,
k
,
m
,
n
=
0
,
l
=
priv
.
indices
.
length
,
index
,
key
,
obj
,
prop
,
found
,
file
,
label
,
unique_count
=
0
,
unique_docids
=
[],
all_doc_response
=
{},
success
=
function
(
content
)
{
file
=
{
value
:
{}
};
file
.
id
=
unique_docids
[
n
];
file
.
key
=
unique_docids
[
n
];
file
.
doc
=
content
;
all_doc_response
.
rows
.
push
(
file
);
// async counter, must be in callback
n
+=
1
;
if
(
n
===
unique_count
)
{
that
.
success
(
all_doc_response
);
}
priv
.
getIndexDatabase
=
function
(
option
,
number
,
callback
)
{
that
.
addJob
(
"
get
"
,
priv
.
indices
[
number
].
sub_storage
||
priv
.
sub_storage
,
{
"
_id
"
:
priv
.
indices
[
number
].
id
},
option
,
function
(
response
)
{
callback
(
new
JSONIndex
(
response
));
},
error
=
function
()
{
that
.
error
({
"
status
"
:
404
,
"
statusText
"
:
"
Not Found
"
,
"
error
"
:
"
not_found
"
,
"
message
"
:
"
Cannot find the document
"
,
"
reason
"
:
"
Cannot get a document from substorage
"
});
function
(
err
)
{
if
(
err
.
status
===
404
)
{
callback
(
new
JSONIndex
({
"
_id
"
:
priv
.
indices
[
number
].
id
,
"
indexing
"
:
priv
.
indices
[
number
].
index
}));
return
;
};
// loop indices
for
(
i
=
0
;
i
<
l
;
i
+=
1
)
{
index
=
{};
index
.
reference
=
priv
.
indices
[
i
];
index
.
reference_size
=
index
.
reference
.
fields
.
length
;
index
.
current
=
indices
[
index
.
reference
.
name
];
// a lot of loops, not sure this is the fastest way
// loop index fields
for
(
j
=
0
;
j
<
index
.
reference_size
;
j
+=
1
)
{
label
=
index
.
reference
.
fields
[
j
];
index
.
current_field
=
index
.
current
[
label
];
index
.
current_size
=
priv
.
getObjectSize
(
index
.
current_field
);
// loop field id array
for
(
j
=
0
;
j
<
index
.
current_size
;
j
+=
1
)
{
for
(
key
in
index
.
current_field
)
{
if
(
index
.
current_field
.
hasOwnProperty
(
key
))
{
obj
=
index
.
current_field
[
key
];
for
(
prop
in
obj
)
{
if
(
obj
.
hasOwnProperty
(
prop
))
{
for
(
k
=
0
;
k
<
unique_docids
.
length
;
k
+=
1
)
{
if
(
obj
[
prop
]
===
unique_docids
[
k
])
{
found
=
true
;
break
;
}
}
if
(
!
found
)
{
unique_docids
.
push
(
obj
[
prop
]);
unique_count
+=
1
;
}
}
}
}
err
.
message
=
"
Unable to get index database.
"
;
that
.
error
(
err
);
}
);
};
/**
* Gets a list containing all the databases set in the storage description.
*
* @method getIndexDatabaseList
* @param {Object} option The command option
* @param {Function} callback The result callback(database_list)
*/
priv
.
getIndexDatabaseList
=
function
(
option
,
callback
)
{
var
i
,
count
=
0
,
callbacks
=
{},
response_list
=
[];
callbacks
.
error
=
function
(
index
)
{
return
function
(
err
)
{
if
(
err
.
status
===
404
)
{
response_list
[
index
]
=
new
JSONIndex
({
"
_id
"
:
priv
.
indices
[
index
].
id
,
"
indexing
"
:
priv
.
indices
[
index
].
index
});
count
+=
1
;
if
(
count
===
priv
.
indices
.
length
)
{
callback
(
response_list
);
}
return
;
}
err
.
message
=
"
Unable to get index database.
"
;
that
.
error
(
err
);
};
};
callbacks
.
success
=
function
(
index
)
{
return
function
(
response
)
{
response_list
[
index
]
=
new
JSONIndex
(
response
);
count
+=
1
;
if
(
count
===
priv
.
indices
.
length
)
{
callback
(
response_list
);
}
// construct allDocs response
all_doc_response
.
total_rows
=
unique_count
;
all_doc_response
.
rows
=
[];
for
(
m
=
0
;
m
<
unique_count
;
m
+=
1
)
{
// include_docs
if
(
include_docs
)
{
};
};
for
(
i
=
0
;
i
<
priv
.
indices
.
length
;
i
+=
1
)
{
that
.
addJob
(
"
get
"
,
priv
.
sub
storage
,
unique_docids
[
m
]
,
priv
.
indices
[
i
].
sub_storage
||
priv
.
sub_
storage
,
{
"
_id
"
:
priv
.
indices
[
i
].
id
}
,
option
,
success
,
error
callbacks
.
success
(
i
)
,
callbacks
.
error
(
i
)
);
}
else
{
file
=
{
value
:
{}
};
file
.
id
=
unique_docids
[
m
];
file
.
key
=
unique_docids
[
m
];
all_doc_response
.
rows
.
push
(
file
);
if
(
m
===
(
unique_count
-
1
))
{
return
all_doc_response
;
}
};
/**
* Saves all the databases to the remote(s).
*
* @method storeIndexDatabaseList
* @param {Array} database_list The database list
* @param {Object} option The command option
* @param {Function} callback The result callback(err, response)
*/
priv
.
storeIndexDatabaseList
=
function
(
database_list
,
option
,
callback
)
{
var
i
,
count
=
0
,
onResponse
,
onError
;
onResponse
=
function
(
response
)
{
count
+=
1
;
if
(
count
===
priv
.
indices
.
length
)
{
callback
({
"
ok
"
:
true
});
}
};
onError
=
function
(
err
)
{
err
.
message
=
"
Unable to store index database.
"
;
that
.
error
(
err
);
};
for
(
i
=
0
;
i
<
priv
.
indices
.
length
;
i
+=
1
)
{
that
.
addJob
(
"
put
"
,
priv
.
indices
[
i
].
sub_storage
||
priv
.
sub_storage
,
database_list
[
i
].
serialized
(),
option
,
onResponse
,
onError
);
}
};
/**
* Post document to substorage and create/update index file(s)
* @method post
* @param {object} command The JIO command
* @param {string} source The source of the function call
* A generic request method which delegates the request to the sub storage.
* On response, it will index the document from the request and update all
* the databases.
*
* @method genericRequest
* @param {Command} command The JIO command
* @param {Function} method The request method
*/
priv
.
postOrPut
=
function
(
command
,
source
)
{
var
f
=
{},
indices
,
doc
;
doc
=
command
.
cloneDoc
();
if
(
typeof
doc
.
_id
!==
"
string
"
)
{
doc
.
_id
=
priv
.
generateUuid
();
}
f
.
getIndices
=
function
()
{
var
option
=
command
.
cloneOption
();
priv
.
genericRequest
=
function
(
command
,
method
)
{
var
doc
=
command
.
cloneDoc
(),
option
=
command
.
cloneOption
();
that
.
addJob
(
"
get
"
,
priv
.
substorage
,
{
"
_id
"
:
priv
.
index_suffix
}
,
method
,
priv
.
sub
_
storage
,
doc
,
option
,
function
(
response
)
{
indices
=
response
;
f
.
postDocument
(
"
put
"
);
},
function
(
err
)
{
switch
(
err
.
status
)
{
case
404
:
if
(
source
!==
'
PUTATTACHMENT
'
)
{
indices
=
priv
.
createEmptyIndexArray
();
f
.
postDocument
(
"
post
"
);
}
else
{
that
.
error
({
"
status
"
:
404
,
"
statusText
"
:
"
Not Found
"
,
"
error
"
:
"
not found
"
,
"
message
"
:
"
Document not found
"
,
"
reason
"
:
"
Document not found
"
});
return
;
switch
(
method
)
{
case
"
post
"
:
case
"
put
"
:
case
"
remove
"
:
doc
.
_id
=
response
.
id
;
priv
.
getIndexDatabaseList
(
option
,
function
(
database_list
)
{
var
i
;
switch
(
method
)
{
case
"
post
"
:
case
"
put
"
:
for
(
i
=
0
;
i
<
database_list
.
length
;
i
+=
1
)
{
database_list
[
i
].
put
(
doc
);
}
break
;
case
"
remove
"
:
for
(
i
=
0
;
i
<
database_list
.
length
;
i
+=
1
)
{
database_list
[
i
].
remove
(
doc
);
}
break
;
default
:
err
.
message
=
"
Cannot retrieve index array
"
;
that
.
error
(
err
);
break
;
}
}
);
};
f
.
postDocument
=
function
(
index_update_method
)
{
if
(
priv
.
isDocidInIndex
(
indices
,
doc
)
&&
source
===
'
POST
'
)
{
// POST the document already exists
that
.
error
({
"
status
"
:
409
,
"
statusText
"
:
"
Conflicts
"
,
"
error
"
:
"
conflicts
"
,
"
message
"
:
"
Cannot create a new document
"
,
"
reason
"
:
"
Document already exists
"
});
return
;
}
if
(
source
!==
'
PUTATTACHMENT
'
)
{
indices
=
priv
.
updateIndices
(
indices
,
doc
);
}
that
.
addJob
(
source
===
'
PUTATTACHMENT
'
?
"
putAttachment
"
:
"
post
"
,
priv
.
substorage
,
doc
,
command
.
cloneOption
(),
function
()
{
if
(
source
!==
'
PUTATTACHMENT
'
)
{
f
.
sendIndices
(
index_update_method
);
}
else
{
that
.
success
({
"
ok
"
:
true
,
"
id
"
:
doc
.
_id
,
"
attachment
"
:
doc
.
_attachment
priv
.
storeIndexDatabaseList
(
database_list
,
option
,
function
()
{
that
.
success
({
"
ok
"
:
true
,
"
id
"
:
doc
.
_id
});
});
}
},
function
(
err
)
{
switch
(
err
.
status
)
{
case
409
:
// file already exists
if
(
source
!==
'
PUTATTACHMENT
'
)
{
f
.
sendIndices
(
index_update_method
);
}
else
{
that
.
success
({
"
ok
"
:
true
,
"
id
"
:
doc
.
_id
});
}
break
;
default
:
err
.
message
=
"
Cannot upload document
"
;
that
.
error
(
err
);
that
.
success
(
response
);
break
;
}
}
);
};
f
.
sendIndices
=
function
(
method
)
{
indices
.
_id
=
priv
.
index_suffix
;
that
.
addJob
(
method
,
priv
.
substorage
,
indices
,
command
.
cloneOption
(),
function
()
{
that
.
success
({
"
ok
"
:
true
,
"
id
"
:
doc
.
_id
});
},
function
(
err
)
{
// xxx do we try to delete the posted document ?
err
.
message
=
"
Cannot save index file
"
;
that
.
error
(
err
);
return
that
.
error
(
err
);
}
);
};
f
.
getIndices
();
};
/**
* Update
the document metadata and update the index
* @method pu
t
* Post
the document metadata and update the index
* @method pos
t
* @param {object} command The JIO command
*/
that
.
post
=
function
(
command
)
{
priv
.
postOrPut
(
command
,
'
POST
'
);
priv
.
genericRequest
(
command
,
'
post
'
);
};
/**
...
...
@@ -710,7 +592,7 @@ jIO.addStorageType('indexed', function (spec, my) {
* @param {object} command The JIO command
*/
that
.
put
=
function
(
command
)
{
priv
.
postOrPut
(
command
,
'
PUT
'
);
priv
.
genericRequest
(
command
,
'
put
'
);
};
/**
...
...
@@ -719,7 +601,7 @@ jIO.addStorageType('indexed', function (spec, my) {
* @param {object} command The JIO command
*/
that
.
putAttachment
=
function
(
command
)
{
priv
.
postOrPut
(
command
,
'
PUTATTACHMENT
'
);
priv
.
genericRequest
(
command
,
'
putAttachment
'
);
};
/**
...
...
@@ -728,18 +610,7 @@ jIO.addStorageType('indexed', function (spec, my) {
* @param {object} command The JIO command
*/
that
.
get
=
function
(
command
)
{
that
.
addJob
(
"
get
"
,
priv
.
substorage
,
command
.
cloneDoc
(),
command
.
cloneOption
(),
function
(
response
)
{
that
.
success
(
response
);
},
function
(
err
)
{
that
.
error
(
err
);
}
);
priv
.
genericRequest
(
command
,
'
get
'
);
};
/**
...
...
@@ -748,18 +619,7 @@ jIO.addStorageType('indexed', function (spec, my) {
* @param {object} command The JIO command
*/
that
.
getAttachment
=
function
(
command
)
{
that
.
addJob
(
"
getAttachment
"
,
priv
.
substorage
,
command
.
cloneDoc
(),
command
.
cloneOption
(),
function
(
response
)
{
that
.
success
(
response
);
},
function
(
err
)
{
that
.
error
(
err
);
}
);
priv
.
genericRequest
(
command
,
'
getAttachment
'
);
};
/**
...
...
@@ -768,129 +628,16 @@ jIO.addStorageType('indexed', function (spec, my) {
* @param {object} command The JIO command
*/
that
.
remove
=
function
(
command
)
{
var
f
=
{},
indices
,
doc
,
docid
,
option
;
doc
=
command
.
cloneDoc
();
option
=
command
.
cloneOption
();
f
.
removeDocument
=
function
(
type
)
{
that
.
addJob
(
"
remove
"
,
priv
.
substorage
,
doc
,
option
,
function
(
response
)
{
that
.
success
(
response
);
},
function
()
{
that
.
error
({
"
status
"
:
409
,
"
statusText
"
:
"
Conflict
"
,
"
error
"
:
"
conflict
"
,
"
message
"
:
"
Document Update Conflict
"
,
"
reason
"
:
"
Could not delete document or attachment
"
});
}
);
};
f
.
getIndices
=
function
()
{
that
.
addJob
(
"
get
"
,
priv
.
substorage
,
{
"
_id
"
:
priv
.
index_suffix
},
option
,
function
(
response
)
{
// if deleting an attachment
if
(
typeof
command
.
getAttachmentId
()
===
'
string
'
)
{
f
.
removeDocument
(
'
attachment
'
);
}
else
{
indices
=
priv
.
cleanIndices
(
response
,
doc
);
// store update index file
that
.
addJob
(
"
put
"
,
priv
.
substorage
,
indices
,
command
.
cloneOption
(),
function
()
{
// remove actual document
f
.
removeDocument
(
'
doc
'
);
},
function
(
err
)
{
err
.
message
=
"
Cannot save index file
"
;
that
.
error
(
err
);
}
);
}
},
function
()
{
that
.
error
({
"
status
"
:
404
,
"
statusText
"
:
"
Not Found
"
,
"
error
"
:
"
not_found
"
,
"
message
"
:
"
Document index not found, please check document ID
"
,
"
reason
"
:
"
Incorrect document ID
"
});
return
;
}
);
};
f
.
getIndices
();
priv
.
genericRequest
(
command
,
'
remove
'
);
};
/**
* Remove document - removing documents updates index!.
* @method remove
* Remove attachment
* @method removeAttachment
* @param {object} command The JIO command
*/
that
.
removeAttachment
=
function
(
command
)
{
var
f
=
{},
indices
,
doc
,
docid
,
option
;
doc
=
command
.
cloneDoc
();
option
=
command
.
cloneOption
();
f
.
removeDocument
=
function
(
type
)
{
that
.
addJob
(
"
removeAttachment
"
,
priv
.
substorage
,
doc
,
option
,
that
.
success
,
that
.
error
);
};
f
.
getIndices
=
function
()
{
that
.
addJob
(
"
get
"
,
priv
.
substorage
,
{
"
_id
"
:
priv
.
index_suffix
},
option
,
function
(
response
)
{
// if deleting an attachment
if
(
typeof
command
.
getAttachmentId
()
===
'
string
'
)
{
f
.
removeDocument
(
'
attachment
'
);
}
else
{
indices
=
priv
.
cleanIndices
(
response
,
doc
);
// store update index file
that
.
addJob
(
"
put
"
,
priv
.
substorage
,
indices
,
command
.
cloneOption
(),
function
()
{
// remove actual document
f
.
removeDocument
(
'
doc
'
);
},
function
(
err
)
{
err
.
message
=
"
Cannot save index file
"
;
that
.
error
(
err
);
}
);
}
},
function
(
err
)
{
that
.
error
(
err
);
}
);
};
f
.
getIndices
();
priv
.
genericRequest
(
command
,
'
removeAttachment
'
);
};
/**
...
...
@@ -900,61 +647,56 @@ jIO.addStorageType('indexed', function (spec, my) {
* @method allDocs
* @param {object} command The JIO command
*/
//{
// "total_rows": 4,
// "rows": [
// {
// "id": "otherdoc",
// "key": "otherdoc",
// "value": {
// "rev": "1-3753476B70A49EA4D8C9039E7B04254C"
// }
// },{...}
// ]
//}
that
.
allDocs
=
function
(
command
)
{
var
f
=
{},
option
,
all_docs_response
,
query_object
,
query_syntax
,
query_response
;
option
=
command
.
cloneOption
();
var
option
=
command
.
cloneOption
(),
index
=
priv
.
selectIndex
(
option
.
select_list
||
[]);
// Include docs option is ignored, if you want to get all the document,
// don't use index storage!
option
.
select_list
=
option
.
select_list
||
[];
option
.
select_list
.
push
(
"
_id
"
);
priv
.
getIndexDatabase
(
option
,
index
,
function
(
db
)
{
var
i
,
id
;
db
=
db
.
_database
;
complex_queries
.
QueryFactory
.
create
(
option
.
query
||
''
).
exec
(
db
,
option
);
for
(
i
=
0
;
i
<
db
.
length
;
i
+=
1
)
{
id
=
db
[
i
].
_id
;
delete
db
[
i
].
_id
;
db
[
i
]
=
{
"
id
"
:
id
,
"
key
"
:
id
,
"
value
"
:
db
[
i
],
};
}
that
.
success
({
"
total_rows
"
:
db
.
length
,
"
rows
"
:
db
});
});
};
f
.
getIndices
=
function
()
{
that
.
addJob
(
"
get
"
,
priv
.
substorage
,
{
"
_id
"
:
priv
.
index_suffix
},
option
,
function
(
response
)
{
query_syntax
=
command
.
getOption
(
'
query
'
);
if
(
query_syntax
!==
undefined
)
{
// build complex query object
query_object
=
priv
.
constructQueryObject
(
response
,
query_syntax
);
if
(
query_object
.
length
===
0
)
{
that
.
addJob
(
"
allDocs
"
,
priv
.
substorage
,
undefined
,
option
,
that
.
success
,
that
.
error
);
}
else
{
// we can use index, run query on index
query_response
=
jIO
.
ComplexQueries
.
query
(
query_syntax
,
query_object
);
that
.
success
(
query_response
);
// that.repair = function (command) {
// todo: repair
// easy but don't have time
// if _id is an index id, then repair the index by doing an
// allDocs and recreating the database from scratch. end.
// };
return
that
;
}
}
else
if
(
command
.
getOption
(
'
include_docs
'
))
{
priv
.
allDocsResponseFromIndex
(
response
,
true
,
option
);
if
(
typeof
exports
===
"
object
"
)
{
// nodejs export module
Object
.
defineProperty
(
exports
,
"
indexStorage
"
,
{
configurable
:
false
,
enumerable
:
true
,
writable
:
false
,
value
:
indexStorage
});
}
else
if
(
typeof
define
===
"
function
"
&&
define
.
amd
)
{
// requirejs export
define
(
indexStorage
);
}
else
{
all_docs_response
=
priv
.
allDocsResponseFromIndex
(
response
,
false
,
option
);
that
.
success
(
all_docs_response
);
// classical browser and web workers JIO export
jIO
.
addStorageType
(
"
indexed
"
,
indexStorage
);
}
},
that
.
error
);
};
f
.
getIndices
();
};
return
that
;
});
}());
test/jiotests.js
View file @
4c418987
...
...
@@ -4327,13 +4327,9 @@ test ("Post", function () {
o
.
jio
=
JIO
.
newJio
({
"
type
"
:
"
indexed
"
,
"
indices
"
:
[
{
"
name
"
:
"
indexA
"
,
"
fields
"
:[
"
findMeA
"
]},
{
"
name
"
:
"
indexAB
"
,
"
fields
"
:[
"
findMeA
"
,
"
findMeB
"
]}
{
"
id
"
:
"
A
"
,
"
index
"
:
[
"
title
"
]},
{
"
id
"
:
"
B
"
,
"
index
"
:
[
"
title
"
,
"
year
"
]}
],
"
field_types
"
:
{
"
findMeA
"
:
"
string
"
,
"
findMeB
"
:
"
string
"
},
"
sub_storage
"
:
{
"
type
"
:
"
local
"
,
"
username
"
:
"
ipost
"
,
...
...
@@ -4342,39 +4338,49 @@ test ("Post", function () {
});
// post without id
o
.
spy
(
o
,
"
status
"
,
undefined
,
"
Post without id
"
);
o
.
jio
.
post
({},
o
.
f
);
o
.
spy
(
o
,
"
jobstatus
"
,
"
done
"
,
"
Post without id
"
);
o
.
jio
.
post
({},
function
(
err
,
response
)
{
o
.
id
=
(
response
||
{}).
id
;
o
.
f
(
err
,
response
);
});
o
.
tick
(
o
);
// post non empty document
o
.
doc
=
{
"
_id
"
:
"
some_id
"
,
"
title
"
:
"
myPost1
"
,
"
findMeA
"
:
"
keyword_abc
"
,
"
findMeB
"
:
"
keyword_def
"
};
o
.
doc
=
{
"
_id
"
:
"
some_id
"
,
"
title
"
:
"
My Title
"
,
"
year
"
:
2000
,
"
hey
"
:
"
def
"
};
o
.
spy
(
o
,
"
value
"
,
{
"
ok
"
:
true
,
"
id
"
:
"
some_id
"
},
"
Post document
"
);
o
.
jio
.
post
(
o
.
doc
,
o
.
f
);
o
.
tick
(
o
);
// check document
o
.
fakeIndex
=
{
"
_id
"
:
"
ipost_indices.json
"
,
"
indexAB
"
:
{
"
findMeA
"
:
{
"
keyword_abc
"
:[
"
some_id
"
]
o
.
fakeIndexA
=
{
"
_id
"
:
"
A
"
,
"
indexing
"
:
[
"
title
"
],
"
free
"
:
[],
"
location
"
:
{
"
some_id
"
:
0
},
"
findMeB
"
:
{
"
keyword_def
"
:[
"
some_id
"
]
}
"
database
"
:
[
{
"
_id
"
:
"
some_id
"
,
"
title
"
:
"
My Title
"
}
]
};
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
o
.
tick
(
o
);
o
.
fakeIndexB
=
{
"
_id
"
:
"
B
"
,
"
indexing
"
:
[
"
title
"
,
"
year
"
],
"
free
"
:
[],
"
location
"
:
{
"
some_id
"
:
0
},
"
indexA
"
:
{
"
findMeA
"
:
{
"
keyword_abc
"
:[
"
some_id
"
]
}
}
"
database
"
:
[
{
"
_id
"
:
"
some_id
"
,
"
title
"
:
"
My Title
"
,
"
year
"
:
2000
}
]
};
o
.
jio
.
get
({
"
_id
"
:
"
ipost_indices.json
"
},
function
(
err
,
response
)
{
o
.
actualIndex
=
response
;
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexB
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
B
"
},
o
.
f
);
o
.
tick
(
o
);
// post with escapable characters
...
...
@@ -4404,13 +4410,9 @@ test ("Put", function(){
o
.
jio
=
JIO
.
newJio
({
"
type
"
:
"
indexed
"
,
"
indices
"
:
[
{
"
name
"
:
"
indexA
"
,
"
fields
"
:
[
"
author
"
]},
{
"
name
"
:
"
indexAB
"
,
"
fields
"
:[
"
author
"
,
"
year
"
]}
{
"
id
"
:
"
A
"
,
"
index
"
:
[
"
author
"
]},
{
"
id
"
:
"
B
"
,
"
index
"
:
[
"
year
"
]}
],
"
field_types
"
:
{
"
author
"
:
"
string
"
,
"
year
"
:
"
number
"
},
"
sub_storage
"
:
{
"
type
"
:
"
local
"
,
"
username
"
:
"
iput
"
,
...
...
@@ -4425,89 +4427,64 @@ test ("Put", function(){
o
.
tick
(
o
);
// put non empty document
o
.
doc
=
{
"
_id
"
:
"
put1
"
,
"
title
"
:
"
myPut1
"
,
"
author
"
:
"
John Doe
"
};
o
.
doc
=
{
"
_id
"
:
"
put1
"
,
"
title
"
:
"
myPut1
"
,
"
author
"
:
"
John Doe
"
};
o
.
spy
(
o
,
"
value
"
,
{
"
ok
"
:
true
,
"
id
"
:
"
put1
"
},
"
Put-create document
"
);
o
.
jio
.
put
(
o
.
doc
,
o
.
f
);
o
.
tick
(
o
);
// check index file
o
.
fakeIndex
=
{
"
indexA
"
:
{
"
author
"
:
{
"
John Doe
"
:
[
"
put1
"
]
}
o
.
fakeIndexA
=
{
"
_id
"
:
"
A
"
,
"
indexing
"
:
[
"
author
"
],
"
free
"
:
[],
"
location
"
:
{
"
put1
"
:
0
},
"
indexAB
"
:
{
"
author
"
:
{
"
John Doe
"
:
[
"
put1
"
]
},
"
year
"
:
{}
},
"
_id
"
:
"
iput_indices.json
"
"
database
"
:
[{
"
_id
"
:
"
put1
"
,
"
author
"
:
"
John Doe
"
}]
};
o
.
jio
.
get
({
"
_id
"
:
"
iput_indices.json
"
},
function
(
err
,
response
)
{
o
.
actualIndex
=
response
;
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
o
.
tick
(
o
);
o
.
fakeIndexB
=
{
"
_id
"
:
"
B
"
,
"
indexing
"
:
[
"
year
"
],
"
free
"
:
[],
"
location
"
:
{},
"
database
"
:
[]
};
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexB
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
B
"
},
o
.
f
);
o
.
tick
(
o
);
// modify document - modify keyword on index!
o
.
doc
=
{
"
_id
"
:
"
put1
"
,
"
title
"
:
"
myPuttter1
"
,
"
author
"
:
"
Jane Doe
"
};
o
.
doc
=
{
"
_id
"
:
"
put1
"
,
"
title
"
:
"
myPuttter1
"
,
"
author
"
:
"
Jane Doe
"
};
o
.
spy
(
o
,
"
value
"
,
{
"
ok
"
:
true
,
"
id
"
:
"
put1
"
},
"
Modify existing document
"
);
o
.
jio
.
put
(
o
.
doc
,
o
.
f
);
o
.
tick
(
o
);
// check index file
o
.
fakeIndex
=
{
"
indexA
"
:
{
"
author
"
:
{
"
Jane Doe
"
:
[
"
put1
"
]
}
},
"
indexAB
"
:
{
"
author
"
:
{
"
Jane Doe
"
:
[
"
put1
"
]
},
"
year
"
:
{}
},
"
_id
"
:
"
iput_indices.json
"
};
o
.
jio
.
get
({
"
_id
"
:
"
iput_indices.json
"
},
function
(
err
,
response
)
{
o
.
actualIndex
=
response
;
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
fakeIndexA
.
database
[
0
].
author
=
"
Jane Doe
"
;
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
o
.
tick
(
o
);
// add new document with same keyword!
o
.
doc
=
{
"
_id
"
:
"
new_doc
"
,
"
title
"
:
"
myPut2
"
,
"
author
"
:
"
Jane Doe
"
};
o
.
doc
=
{
"
_id
"
:
"
new_doc
"
,
"
title
"
:
"
myPut2
"
,
"
author
"
:
"
Jane Doe
"
};
o
.
spy
(
o
,
"
value
"
,
{
"
ok
"
:
true
,
"
id
"
:
"
new_doc
"
},
"
Add new document with same keyword
"
);
o
.
jio
.
put
(
o
.
doc
,
o
.
f
);
o
.
tick
(
o
);
// check index file
o
.
fakeIndex
=
{
"
indexA
"
:
{
"
author
"
:
{
"
Jane Doe
"
:
[
"
put1
"
,
"
new_doc
"
]
}
},
"
indexAB
"
:
{
"
author
"
:
{
"
Jane Doe
"
:
[
"
put1
"
,
"
new_doc
"
]
},
"
year
"
:
{}
},
"
_id
"
:
"
iput_indices.json
"
};
o
.
jio
.
get
({
"
_id
"
:
"
iput_indices.json
"
},
function
(
err
,
response
)
{
o
.
actualIndex
=
response
;
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
fakeIndexA
.
location
.
new_doc
=
1
;
o
.
fakeIndexA
.
database
.
push
({
"
_id
"
:
"
new_doc
"
,
"
author
"
:
"
Jane Doe
"
});
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
o
.
tick
(
o
);
// add second keyword to index file
o
.
doc
=
{
"
_id
"
:
"
put1
"
,
"
title
"
:
"
myPut2
"
,
"
author
"
:
"
Jane Doe
"
,
o
.
doc
=
{
"
_id
"
:
"
put1
"
,
"
title
"
:
"
myPut2
"
,
"
author
"
:
"
Jane Doe
"
,
"
year
"
:
"
1912
"
};
o
.
spy
(
o
,
"
value
"
,
{
"
ok
"
:
true
,
"
id
"
:
"
put1
"
},
"
add second keyword to index file
"
);
...
...
@@ -4515,26 +4492,14 @@ test ("Put", function(){
o
.
tick
(
o
);
// check index file
o
.
fakeIndex
=
{
"
indexA
"
:
{
"
author
"
:
{
"
Jane Doe
"
:
[
"
put1
"
]
}
},
"
indexAB
"
:
{
"
author
"
:
{
"
Jane Doe
"
:
[
"
put1
"
]
},
"
year
"
:
{
"
1912
"
:
[
"
put1
"
]
}
},
"
_id
"
:
"
iput_indices.json
"
};
o
.
jio
.
get
({
"
_id
"
:
"
iput_indices.json
"
},
function
(
err
,
response
)
{
o
.
actualIndex
=
response
;
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
o
.
tick
(
o
);
o
.
fakeIndexB
.
location
.
put1
=
0
;
o
.
fakeIndexB
.
database
.
push
({
"
_id
"
:
"
put1
"
,
"
year
"
:
"
1912
"
});
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexB
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
B
"
},
o
.
f
);
o
.
tick
(
o
);
// remove a keyword from an existing document
...
...
@@ -4545,26 +4510,11 @@ test ("Put", function(){
o
.
tick
(
o
);
// check index file
o
.
fakeIndex
=
{
"
indexA
"
:
{
"
author
"
:
{
"
Jane Doe
"
:
[
"
put1
"
]
}
},
"
indexAB
"
:
{
"
author
"
:
{
"
Jane Doe
"
:
[
"
put1
"
]
},
"
year
"
:
{
"
1912
"
:
[
"
put1
"
]
}
},
"
_id
"
:
"
iput_indices.json
"
};
o
.
jio
.
get
({
"
_id
"
:
"
iput_indices.json
"
},
function
(
err
,
response
)
{
o
.
actualIndex
=
response
;
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
delete
o
.
fakeIndexA
.
location
.
new_doc
;
o
.
fakeIndexA
.
database
[
1
]
=
null
;
o
.
fakeIndexA
.
free
.
push
(
1
);
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
o
.
tick
(
o
);
o
.
jio
.
stop
();
...
...
@@ -4580,13 +4530,9 @@ test ("PutAttachment", function(){
o
.
jio
=
JIO
.
newJio
({
"
type
"
:
"
indexed
"
,
"
indices
"
:
[
{
"
name
"
:
"
indexA
"
,
"
fields
"
:
[
"
author
"
]},
{
"
name
"
:
"
indexAB
"
,
"
fields
"
:[
"
author
"
,
"
year
"
]}
{
"
id
"
:
"
A
"
,
"
index
"
:
[
"
author
"
]},
{
"
id
"
:
"
B
"
,
"
index
"
:
[
"
year
"
]}
],
"
field_types
"
:
{
"
author
"
:
"
string
"
,
"
year
"
:
"
number
"
},
"
sub_storage
"
:
{
"
type
"
:
"
local
"
,
"
username
"
:
"
iputatt
"
,
...
...
@@ -4697,13 +4643,9 @@ test ("Get", function(){
o
.
jio
=
JIO
.
newJio
({
"
type
"
:
"
indexed
"
,
"
indices
"
:
[
{
"
name
"
:
"
indexA
"
,
"
fields
"
:
[
"
author
"
]},
{
"
name
"
:
"
indexAB
"
,
"
fields
"
:[
"
author
"
,
"
year
"
]}
{
"
id
"
:
"
A
"
,
"
index
"
:
[
"
author
"
]},
{
"
id
"
:
"
B
"
,
"
index
"
:
[
"
year
"
]}
],
"
field_types
"
:
{
"
author
"
:
"
string
"
,
"
year
"
:
"
number
"
},
"
sub_storage
"
:
{
"
type
"
:
"
local
"
,
"
username
"
:
"
iget
"
,
...
...
@@ -4767,13 +4709,9 @@ test ("Remove", function(){
o
.
jio
=
JIO
.
newJio
({
"
type
"
:
"
indexed
"
,
"
indices
"
:
[
{
"
name
"
:
"
indexA
"
,
"
fields
"
:
[
"
author
"
]},
{
"
name
"
:
"
indexAB
"
,
"
fields
"
:[
"
author
"
,
"
year
"
]}
{
"
id
"
:
"
A
"
,
"
index
"
:
[
"
author
"
]},
{
"
id
"
:
"
B
"
,
"
index
"
:
[
"
year
"
]}
],
"
field_types
"
:
{
"
author
"
:
"
string
"
,
"
year
"
:
"
number
"
},
"
sub_storage
"
:
{
"
type
"
:
"
local
"
,
"
username
"
:
"
irem
"
,
...
...
@@ -4809,26 +4747,30 @@ test ("Remove", function(){
o
.
tick
(
o
);
// check index
o
.
fakeIndex
=
{
"
_id
"
:
"
irem_indices.json
"
,
"
index
A
"
:
{
"
author
"
:
{
"
Martin Mustermann
"
:
[
"
removeAlso
"
]
}
o
.
fakeIndex
A
=
{
"
_id
"
:
"
A
"
,
"
index
ing
"
:
[
"
author
"
],
"
free
"
:
[
0
],
"
location
"
:
{
"
removeAlso
"
:
1
},
"
indexAB
"
:
{
"
year
"
:
{
"
2525
"
:
[
"
removeAlso
"
]
"
database
"
:
[
null
,
{
"
_id
"
:
"
removeAlso
"
,
"
author
"
:
"
Martin Mustermann
"
}]
};
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
o
.
tick
(
o
);
o
.
fakeIndexB
=
{
"
_id
"
:
"
B
"
,
"
indexing
"
:
[
"
year
"
],
"
free
"
:
[
0
],
"
location
"
:
{
"
removeAlso
"
:
1
},
"
author
"
:
{
"
Martin Mustermann
"
:
[
"
removeAlso
"
]
}
}
"
database
"
:
[
null
,
{
"
_id
"
:
"
removeAlso
"
,
"
year
"
:
"
2525
"
}]
};
o
.
jio
.
get
({
"
_id
"
:
"
irem_indices.json
"
},
function
(
err
,
response
){
o
.
actualIndex
=
response
;
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexB
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
B
"
},
o
.
f
);
o
.
tick
(
o
);
// check document
...
...
@@ -4868,29 +4810,18 @@ test ("Remove", function(){
o
.
tick
(
o
);
// check index
o
.
fakeIndex
=
{
"
_id
"
:
"
irem_indices.json
"
,
"
indexA
"
:
{
"
author
"
:{
"
Martin Mustermann
"
:
[
"
removeAlso
"
],
"
Mrs Sunshine
"
:
[
"
remove3
"
]
}
},
"
indexAB
"
:
{
"
year
"
:
{
"
1234
"
:
[
"
remove3
"
],
"
2525
"
:
[
"
removeAlso
"
]
},
"
author
"
:
{
"
Martin Mustermann
"
:
[
"
removeAlso
"
],
"
Mrs Sunshine
"
:
[
"
remove3
"
]
}
}
};
o
.
jio
.
get
({
"
_id
"
:
"
irem_indices.json
"
},
function
(
err
,
response
)
{
o
.
actualIndex
=
response
;
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
fakeIndexA
.
free
=
[];
o
.
fakeIndexA
.
location
.
remove3
=
0
;
o
.
fakeIndexA
.
database
[
0
]
=
{
"
_id
"
:
"
remove3
"
,
"
author
"
:
"
Mrs Sunshine
"
};
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
o
.
tick
(
o
);
o
.
fakeIndexB
.
free
=
[];
o
.
fakeIndexB
.
location
.
remove3
=
0
;
o
.
fakeIndexB
.
database
[
0
]
=
{
"
_id
"
:
"
remove3
"
,
"
year
"
:
"
1234
"
};
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexB
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
B
"
},
o
.
f
);
o
.
tick
(
o
);
// remove document and attachment together
...
...
@@ -4900,26 +4831,18 @@ test ("Remove", function(){
o
.
tick
(
o
);
// check index
o
.
fakeIndex
=
{
"
_id
"
:
"
irem_indices.json
"
,
"
indexA
"
:
{
"
author
"
:
{
"
Martin Mustermann
"
:
[
"
removeAlso
"
]
}
},
"
indexAB
"
:
{
"
year
"
:
{
"
2525
"
:
[
"
removeAlso
"
]
},
"
author
"
:
{
"
Martin Mustermann
"
:
[
"
removeAlso
"
]
}
}
};
o
.
jio
.
get
({
"
_id
"
:
"
irem_indices.json
"
},
function
(
err
,
response
)
{
o
.
actualIndex
=
response
;
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
fakeIndexA
.
free
=
[
0
];
delete
o
.
fakeIndexA
.
location
.
remove3
;
o
.
fakeIndexA
.
database
[
0
]
=
null
;
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
o
.
tick
(
o
);
o
.
fakeIndexB
.
free
=
[
0
];
delete
o
.
fakeIndexB
.
location
.
remove3
;
o
.
fakeIndexB
.
database
[
0
]
=
null
;
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexB
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
B
"
},
o
.
f
);
o
.
tick
(
o
);
// check attachment
...
...
@@ -4942,13 +4865,9 @@ test ("AllDocs", function () {
o
.
jio
=
JIO
.
newJio
({
"
type
"
:
"
indexed
"
,
"
indices
"
:
[
{
"
name
"
:
"
indexA
"
,
"
fields
"
:
[
"
author
"
]},
{
"
name
"
:
"
indexAB
"
,
"
fields
"
:[
"
author
"
,
"
year
"
]}
{
"
id
"
:
"
A
"
,
"
index
"
:
[
"
author
"
]},
{
"
id
"
:
"
B
"
,
"
index
"
:
[
"
year
"
]}
],
"
field_types
"
:
{
"
author
"
:
"
string
"
,
"
year
"
:
"
number
"
},
"
sub_storage
"
:
{
"
type
"
:
"
local
"
,
"
username
"
:
"
iall
"
,
...
...
@@ -4983,34 +4902,25 @@ test ("AllDocs", function () {
o
.
tick
(
o
);
// check index
o
.
fakeIndex
=
{
"
_id
"
:
"
iall_indices.json
"
,
"
indexA
"
:
{
"
author
"
:
{
"
Dr. No
"
:
[
"
dragon.doc
"
],
"
Dr. Who
"
:
[
"
timemachine
"
],
"
Dr. Snuggles
"
:
[
"
rocket.ppt
"
],
"
Dr. House
"
:[
"
stick.jpg
"
]
}
},
"
indexAB
"
:
{
"
author
"
:
{
"
Dr. No
"
:
[
"
dragon.doc
"
],
"
Dr. Who
"
:
[
"
timemachine
"
],
"
Dr. Snuggles
"
:
[
"
rocket.ppt
"
],
"
Dr. House
"
:[
"
stick.jpg
"
]
o
.
fakeIndexA
=
{
"
_id
"
:
"
A
"
,
"
indexing
"
:
[
"
author
"
],
"
free
"
:
[],
"
location
"
:
{
"
dragon.doc
"
:
0
,
"
timemachine
"
:
1
,
"
rocket.ppt
"
:
2
,
"
stick.jpg
"
:
3
},
"
year
"
:
{
"
1968
"
:
[
"
dragon.doc
"
,
"
timemachine
"
]
,
"
1985
"
:
[
"
rocket.ppt
"
]
,
"
2005
"
:[
"
stick.jpg
"
]
}
}
"
database
"
:
[
{
"
_id
"
:
"
dragon.doc
"
,
"
author
"
:
"
Dr. No
"
}
,
{
"
_id
"
:
"
timemachine
"
,
"
author
"
:
"
Dr. Who
"
}
,
{
"
_id
"
:
"
rocket.ppt
"
,
"
author
"
:
"
Dr. Snuggles
"
},
{
"
_id
"
:
"
stick.jpg
"
,
"
author
"
:
"
Dr. House
"
}
]
};
o
.
jio
.
get
({
"
_id
"
:
"
iall_indices.json
"
},
function
(
err
,
response
)
{
o
.
actualIndex
=
response
;
deepEqual
(
o
.
actualIndex
,
o
.
fakeIndex
,
"
Check index file
"
);
});
o
.
spy
(
o
,
"
value
"
,
o
.
fakeIndexA
,
"
Check index file
"
);
o
.
jio
.
get
({
"
_id
"
:
"
A
"
},
o
.
f
);
o
.
tick
(
o
);
o
.
thisShouldBeTheAnswer
=
{
...
...
@@ -5026,19 +4936,6 @@ test ("AllDocs", function () {
o
.
jio
.
allDocs
(
o
.
f
);
o
.
tick
(
o
);
o
.
thisShouldBeTheAnswer2
=
{
"
rows
"
:
[
{
"
id
"
:
"
dragon.doc
"
,
"
key
"
:
"
dragon.doc
"
,
"
value
"
:
{},
"
doc
"
:
o
.
all1
},
{
"
id
"
:
"
timemachine
"
,
"
key
"
:
"
timemachine
"
,
"
value
"
:
{},
"
doc
"
:
o
.
all2
},
{
"
id
"
:
"
rocket.ppt
"
,
"
key
"
:
"
rocket.ppt
"
,
"
value
"
:
{},
"
doc
"
:
o
.
all3
},
{
"
id
"
:
"
stick.jpg
"
,
"
key
"
:
"
stick.jpg
"
,
"
value
"
:
{},
"
doc
"
:
o
.
all4
}
],
"
total_rows
"
:
4
}
o
.
spy
(
o
,
"
value
"
,
o
.
thisShouldBeTheAnswer2
,
"
allDocs (include_docs)
"
);
o
.
jio
.
allDocs
({
"
include_docs
"
:
true
},
o
.
f
);
o
.
tick
(
o
);
o
.
jio
.
stop
();
});
...
...
@@ -5049,16 +4946,11 @@ test ("AllDocs Complex Queries", function () {
o
.
jio
=
JIO
.
newJio
({
"
type
"
:
"
indexed
"
,
"
indices
"
:
[
{
"
name
"
:
"
indexA
"
,
"
fields
"
:
[
"
director
"
]},
{
"
name
"
:
"
indexAB
"
,
"
fields
"
:[
"
title
"
,
"
year
"
]}
{
"
id
"
:
"
A
"
,
"
index
"
:
[
"
director
"
]},
{
"
id
"
:
"
B
"
,
"
index
"
:
[
"
title
"
,
"
year
"
]}
//,
//{"name":"indexABC", "fields":["title","year","director"]}
],
"
field_types
"
:
{
"
director
"
:
"
string
"
,
"
title
"
:
"
string
"
,
"
year
"
:
"
number
"
},
"
sub_storage
"
:
{
"
type
"
:
"
local
"
,
"
username
"
:
"
icomplex
"
,
...
...
@@ -5083,7 +4975,7 @@ test ("AllDocs Complex Queries", function () {
"
Sidney Lumet
"
,
"
Christopher Nolan
"
,
"
Steven Spielberg
"
,
"
Peter Jackson
"
,
"
David Fincher
"
,
"
Irvin Kershner
"
,
"
Peter Jackson
"
,
"
Milos Forman
"
,
"
Christopher Nolan
"
,
"
Martin Scorsese
"
]
]
;
for
(
i
=
0
;
i
<
m
;
i
+=
1
)
{
o
.
fakeDoc
=
{};
...
...
@@ -5094,6 +4986,7 @@ test ("AllDocs Complex Queries", function () {
o
.
jio
.
put
(
o
.
fakeDoc
);
o
.
clock
.
tick
(
1000
);
}
// o.clock.tick(1000);
// response
o
.
allDocsResponse
=
{};
...
...
@@ -5101,123 +4994,90 @@ test ("AllDocs Complex Queries", function () {
o
.
allDocsResponse
.
total_rows
=
15
;
for
(
i
=
0
;
i
<
m
;
i
+=
1
)
{
o
.
allDocsResponse
.
rows
.
push
({
"
id
"
:
""
+
i
,
"
key
"
:
""
+
i
,
"
value
"
:
{}
});
};
// alldocs
o
.
jio
.
allDocs
(
function
(
e
,
r
)
{
var
x
=
r
.
rows
.
sort
(
o
.
sortArrayById
(
'
id
'
,
true
,
parseInt
));
deepEqual
(
{
"
total_rows
"
:
r
.
total_rows
,
"
rows
"
:
x
},
o
.
allDocsResponse
,
"
AllDocs response generated from index
"
);
});
o
.
clock
.
tick
(
1000
);
// include docs
o
.
allDocsResponse2
=
{};
o
.
allDocsResponse2
.
rows
=
[];
o
.
allDocsResponse2
.
total_rows
=
15
;
for
(
i
=
0
;
i
<
m
;
i
+=
1
)
{
o
.
allDocsResponse2
.
rows
.
push
({
"
id
"
:
""
+
i
,
"
key
"
:
""
+
i
,
"
value
"
:
{},
"
doc
"
:
localstorage
.
getItem
(
o
.
localpath
+
"
/
"
+
i
)
"
doc
"
:
{
"
_id
"
:
""
+
i
,
"
title
"
:
o
.
titles
[
i
],
"
year
"
:
o
.
years
[
i
],
"
director
"
:
o
.
director
[
i
]
}
});
};
}
o
.
response
=
JSON
.
parse
(
JSON
.
stringify
(
o
.
allDocsResponse
));
for
(
i
=
0
;
i
<
o
.
response
.
rows
.
length
;
i
+=
1
)
{
delete
o
.
response
.
rows
[
i
].
doc
;
}
// alldocs
o
.
jio
.
allDocs
({
"
include_docs
"
:
true
},
function
(
e
,
r
)
{
var
x
=
r
.
rows
.
sort
(
o
.
sortArrayById
(
'
id
'
,
true
,
parseInt
));
deepEqual
(
{
"
total_rows
"
:
r
.
total_rows
,
"
rows
"
:
x
},
o
.
allDocsResponse2
,
"
AllDocs response generated from index (include docs)
"
);
});
o
.
clock
.
tick
(
1000
);
o
.
spy
(
o
,
"
value
"
,
o
.
response
,
"
AllDocs response generated from index
"
);
o
.
jio
.
allDocs
(
o
.
f
);
o
.
tick
(
o
,
1000
);
// complex queries
o
.
thisShouldBeTheAnswer4
=
[
{
"
title
"
:
"
Inception
"
,
"
year
"
:
2010
},
{
"
title
"
:
"
The Dark Knight
"
,
"
year
"
:
2008
},
{
"
title
"
:
"
Lord of the Rings - Return of the King
"
,
"
year
"
:
2003
},
{
"
title
"
:
"
Lord Of the Rings - Fellowship of the Ring
"
,
"
year
"
:
2001
},
{
"
title
"
:
"
Fight Club
"
,
"
year
"
:
1999
}
];
o
.
spy
(
o
,
"
value
"
,
o
.
thisShouldBeTheAnswer4
,
o
.
response
=
JSON
.
parse
(
JSON
.
stringify
(
o
.
allDocsResponse
));
i
=
0
;
while
(
i
<
o
.
response
.
rows
.
length
)
{
if
(
o
.
response
.
rows
[
i
].
year
<
1980
)
{
o
.
response
.
rows
.
splice
(
i
,
1
);
}
else
{
o
.
response
.
rows
[
i
].
value
=
{
"
year
"
:
o
.
response
.
rows
[
i
].
doc
.
year
,
"
title
"
:
o
.
response
.
rows
[
i
].
doc
.
title
}
delete
o
.
response
.
rows
[
i
].
doc
;
i
+=
1
;
}
}
o
.
response
.
rows
.
sort
(
function
(
a
,
b
)
{
return
a
.
value
.
year
>
b
.
value
.
year
?
-
1
:
a
.
value
.
year
<
b
.
value
.
year
?
1
:
0
;
});
o
.
response
.
rows
.
length
=
5
;
o
.
response
.
total_rows
=
5
;
o
.
spy
(
o
,
"
value
"
,
o
.
response
,
"
allDocs (complex queries year >= 1980, index used to do query)
"
);
o
.
jio
.
allDocs
({
"
query
"
:{
// "query":'(year: >= "1980" AND year: < "2000")',
"
query
"
:
'
(year: >= "1980")
'
,
"
filter
"
:
{
"
limit
"
:[
0
,
5
],
"
sort_on
"
:[[
'
year
'
,
'
descending
'
]],
"
select_list
"
:[
'
title
'
,
'
year
'
]
},
"
wildcard_character
"
:
'
%
'
}
"
query
"
:
'
(year: >= "1980")
'
,
"
limit
"
:
[
0
,
5
],
"
sort_on
"
:
[[
'
year
'
,
'
descending
'
]],
"
select_list
"
:
[
'
title
'
,
'
year
'
]
},
o
.
f
);
o
.
tick
(
o
);
// complex queries
o
.
thisShouldBeTheAnswer5
=
[
{
"
director
"
:
"
Christopher Nolan
"
,
"
year
"
:
2010
},
{
"
director
"
:
"
Christopher Nolan
"
,
"
year
"
:
2008
},
{
"
director
"
:
"
Peter Jackson
"
,
"
year
"
:
2003
},
{
"
director
"
:
"
Peter Jackson
"
,
"
year
"
:
2001
},
{
"
director
"
:
"
David Fincher
"
,
"
year
"
:
1999
}
];
o
.
spy
(
o
,
"
value
"
,
o
.
thisShouldBeTheAnswer5
,
o
.
spy
(
o
,
"
value
"
,
{
"
total_rows
"
:
0
,
"
rows
"
:
[]},
"
allDocs (complex queries year >= 1980, can't use index)
"
);
o
.
jio
.
allDocs
({
"
query
"
:{
// "query":'(year: >= "1980" AND year: < "2000")',
"
query
"
:
'
(year: >= "1980")
'
,
"
filter
"
:
{
"
limit
"
:[
0
,
5
],
"
sort_on
"
:[[
'
year
'
,
'
descending
'
]],
"
select_list
"
:[
'
director
'
,
'
year
'
]
},
"
wildcard_character
"
:
'
%
'
}
"
query
"
:
'
(year: >= "1980")
'
,
"
limit
"
:
[
0
,
5
],
"
sort_on
"
:
[[
'
year
'
,
'
descending
'
]],
"
select_list
"
:
[
'
director
'
,
'
year
'
]
},
o
.
f
);
o
.
tick
(
o
);
// empty query returns all
o
.
thisShouldBeTheAnswer6
=
[
{
"
title
"
:
"
The Good, The Bad and The Ugly
"
},
{
"
title
"
:
"
The Dark Knight
"
},
{
"
title
"
:
"
Star Wars Episode V
"
},
{
"
title
"
:
"
Shawshank Redemption
"
},
{
"
title
"
:
"
Schindlers List
"
},
{
"
title
"
:
"
Pulp Fiction
"
},
{
"
title
"
:
"
One flew over the Cuckoo's Nest
"
},
{
"
title
"
:
"
Lord of the Rings - Return of the King
"
},
{
"
title
"
:
"
Lord Of the Rings - Fellowship of the Ring
"
},
{
"
title
"
:
"
Inception
"
},
{
"
title
"
:
"
Godfellas
"
},
{
"
title
"
:
"
Godfather 2
"
},
{
"
title
"
:
"
Godfather
"
},
{
"
title
"
:
"
Fight Club
"
},
{
"
title
"
:
"
12 Angry Men
"
}
];
o
.
spy
(
o
,
"
value
"
,
o
.
thisShouldBeTheAnswer6
,
o
.
response
=
JSON
.
parse
(
JSON
.
stringify
(
o
.
allDocsResponse
));
i
=
0
;
while
(
i
<
o
.
response
.
rows
.
length
)
{
o
.
response
.
rows
[
i
].
value
.
title
=
o
.
response
.
rows
[
i
].
doc
.
title
;
delete
o
.
response
.
rows
[
i
].
doc
;
i
+=
1
;
}
o
.
response
.
rows
.
sort
(
function
(
a
,
b
)
{
return
a
.
value
.
title
>
b
.
value
.
title
?
-
1
:
a
.
value
.
title
<
b
.
value
.
title
?
1
:
0
;
});
o
.
spy
(
o
,
"
value
"
,
o
.
response
,
"
allDocs (empty query in complex query)
"
);
o
.
jio
.
allDocs
({
"
query
"
:{
"
filter
"
:
{
"
sort_on
"
:[[
'
title
'
,
'
descending
'
]],
"
select_list
"
:[
'
title
'
]
},
"
wildcard_character
"
:
'
%
'
}
},
o
.
f
);
o
.
tick
(
o
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment