Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
E
ecommerce-ui
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
nexedi
ecommerce-ui
Commits
d5c2fc5b
Commit
d5c2fc5b
authored
Feb 19, 2014
by
Sven Franck
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
added check for force_sync and call to repair for un-sync records
parent
24f195a6
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
49 additions
and
40 deletions
+49
-40
js/erp5_loader.js
js/erp5_loader.js
+49
-40
No files found.
js/erp5_loader.js
View file @
d5c2fc5b
...
...
@@ -5566,41 +5566,34 @@
/**
* First pass at sync
* @method sync
* @param {object} response Response object to be synced
* @param {string} storage Name of storage
**/
storage
.
sync
=
function
()
{
/*
0. state machine?
> detect offline/online
> detect sync/need_sync
> detect login/logoff (later)
1. when to sync
> whenever we have connection and async is set.
> on application init, async is set
> when offline we always set, tool (because remote and local can change)
2. how to sync
> loop over remote, but how
> hardcode or robot?
> also have a manual sync button and a settings button
> besides records, all other JSON config also needs to be fetched
for offline. This will be in settings.
> sync means POST (+ error handling for storage which already has the file)
> question is whether it's possible to fetch all records of a portal_type
> this must be an allDocs (include_docs) with no-limit
> once all files are fetched...
> loop over
> not exists in local, POST
> exists in local, calculate hash and compare
> different hash needs timestamp to decide what to do...
> where do have information when the file was created... as we have no revisions...
> this is retarded...
> store initial hash of every file so it can be compared against?
>
storage
.
sync
=
function
(
response
,
storage
)
{
var
i
,
record
,
use
,
repair
,
promise_list
=
[];
for
(
i
=
0
;
i
<
response
.
data
.
total_rows
;
i
+=
1
)
{
record
=
response
.
data
.
rows
[
i
];
*/
// NOTE: if multiple versions exist, we should pick one here!
if
(
record
.
_missing
)
{
repair
=
true
;
use
=
record
.
doc
;
}
if
(
record
.
_conflict_list
)
{
repair
=
true
;
use
=
record
.
_conflict_list
[
0
].
doc
;
}
if
(
repair
)
{
util
.
loader
(
""
,
"
status_dict.sync
"
);
promise_list
[
i
]
=
app
.
storage_dict
[
storage
].
repair
(
use
);
}
}
return
RSVP
.
all
(
promise_list
)
.
then
(
function
()
{
return
undefined
;
});
};
/**
...
...
@@ -5625,7 +5618,8 @@
/**
* Convert storage responses into allDocs response format which
* is used in subsequent methods
* is used in subsequent methods. Will be called for allDocs(selcect_list)
* and get!
* @method mapResponse
* @param {object} response Response object
* @param {string} convert Type of conversion required
...
...
@@ -5635,18 +5629,25 @@
var
answer
,
i
,
record
,
key
,
data
,
pushit
,
value_dict
;
answer
=
{
"
data
"
:
{
"
total_rows
"
:
null
,
"
rows
"
:
[]}};
pushit
=
function
(
obj
,
record
_id
)
{
return
{
"
id
"
:
record
_
id
,
"
key
"
:
record
_
id
,
pushit
=
function
(
obj
,
record
)
{
var
new_record
=
{
"
id
"
:
record
.
id
,
"
key
"
:
record
.
id
,
"
value
"
:
{},
"
doc
"
:
obj
};
if
(
record
.
_missing
)
{
new_record
.
_missing
=
true
;
}
if
(
record
.
_conflict_list
)
{
new_record
.
_conflict_list
=
record
.
_conflict_list
;
}
return
new_record
;
};
if
(
convert
===
"
single_item
"
)
{
answer
.
data
.
total_rows
=
1
;
answer
.
data
.
rows
.
push
(
pushit
(
response
.
data
,
response
.
id
));
answer
.
data
.
rows
.
push
(
pushit
(
response
.
data
,
response
));
}
else
{
answer
.
data
.
total_rows
=
response
.
data
.
total_rows
;
for
(
i
=
0
;
i
<
response
.
data
.
total_rows
;
i
+=
1
)
{
...
...
@@ -5660,7 +5661,7 @@
}
// TODO: verfiy this is valid!
data
[
"
_id
"
]
=
record
.
id
;
answer
.
data
.
rows
.
push
(
pushit
(
data
,
record
.
id
));
answer
.
data
.
rows
.
push
(
pushit
(
data
,
record
));
}
}
return
answer
;
...
...
@@ -7671,7 +7672,7 @@
* @return {object} promise object/pass
*/
app
.
fetchData
=
function
(
parcel
)
{
var
method
,
convert
,
select_list
,
hacked_view
,
pass
,
skip
,
query
;
var
method
,
convert
,
select_list
,
hacked_view
,
pass
,
skip
,
query
,
mapped
;
pass
=
parcel
.
pass
;
query
=
parcel
.
query
;
...
...
@@ -7704,10 +7705,18 @@
return
app
.
storage_dict
[
parcel
.
storage
][
method
||
"
allDocs
"
](
parcel
.
query
,
hacked_view
)
.
then
(
function
(
response
)
{
// TODO: best way?
if
(
convert
!==
undefined
&&
response
.
status
===
200
)
{
mapped
=
storage
.
mapResponse
(
response
,
convert
);
// force sync
if
(
app
.
storage_dict
.
property_dict
.
force_sync
)
{
storage
.
sync
(
mapped
,
parcel
.
storage
);
}
return
{
"
response
"
:
storage
.
mapResponse
(
response
,
convert
)
,
"
response
"
:
mapped
,
"
pass
"
:
parcel
.
pass
};
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment