2017-07-28 07:40:24 +00:00
|
|
|
import localDbSvc from './localDbSvc';
|
|
|
|
import store from '../store';
|
|
|
|
import utils from './utils';
|
2017-08-25 10:37:46 +00:00
|
|
|
import diffUtils from './diffUtils';
|
2017-12-11 00:53:46 +00:00
|
|
|
import networkSvc from './networkSvc';
|
2017-09-23 19:01:50 +00:00
|
|
|
import providerRegistry from './providers/providerRegistry';
|
2017-12-10 23:49:20 +00:00
|
|
|
import googleDriveAppDataProvider from './providers/googleDriveAppDataProvider';
|
2017-12-17 15:08:52 +00:00
|
|
|
import './providers/googleDriveWorkspaceProvider';
|
2017-08-15 10:43:26 +00:00
|
|
|
|
|
|
|
const inactivityThreshold = 3 * 1000; // 3 sec
|
2017-08-17 23:10:35 +00:00
|
|
|
const restartSyncAfter = 30 * 1000; // 30 sec
|
2017-12-17 15:08:52 +00:00
|
|
|
const minAutoSyncEvery = 60 * 1000; // 60 sec
|
2017-08-25 10:37:46 +00:00
|
|
|
|
2017-12-17 15:08:52 +00:00
|
|
|
let syncProvider;
|
2017-12-10 23:49:20 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Use a lock in the local storage to prevent multiple windows concurrency.
|
|
|
|
*/
|
|
|
|
let lastSyncActivity;
|
2017-12-11 00:53:46 +00:00
|
|
|
const getLastStoredSyncActivity = () =>
|
|
|
|
parseInt(localStorage.getItem(store.getters['workspace/lastSyncActivityKey']), 10) || 0;
|
2017-12-10 23:49:20 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Return true if workspace sync is possible.
|
|
|
|
*/
|
2017-12-17 15:08:52 +00:00
|
|
|
const isWorkspaceSyncPossible = () => !!store.getters['workspace/syncToken'];
|
2017-12-10 23:49:20 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Return true if file has at least one explicit sync location.
|
|
|
|
*/
|
2017-08-25 10:37:46 +00:00
|
|
|
const hasCurrentFileSyncLocations = () => !!store.getters['syncLocation/current'].length;
|
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Return true if we are online and we have something to sync.
|
|
|
|
*/
|
2017-08-25 10:37:46 +00:00
|
|
|
const isSyncPossible = () => !store.state.offline &&
|
2017-12-10 23:49:20 +00:00
|
|
|
(isWorkspaceSyncPossible() || hasCurrentFileSyncLocations());
|
2017-08-15 10:43:26 +00:00
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Return true if we are the many window, ie we have the lastSyncActivity lock.
|
|
|
|
*/
|
2017-08-15 10:43:26 +00:00
|
|
|
function isSyncWindow() {
|
2017-10-07 11:22:24 +00:00
|
|
|
const storedLastSyncActivity = getLastStoredSyncActivity();
|
2017-08-15 10:43:26 +00:00
|
|
|
return lastSyncActivity === storedLastSyncActivity ||
|
|
|
|
Date.now() > inactivityThreshold + storedLastSyncActivity;
|
|
|
|
}
|
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
2017-12-11 00:53:46 +00:00
|
|
|
* Return true if auto sync can start, ie if lastSyncActivity is old enough.
|
2017-12-10 23:49:20 +00:00
|
|
|
*/
|
2017-08-17 23:10:35 +00:00
|
|
|
function isAutoSyncReady() {
|
2017-12-17 15:08:52 +00:00
|
|
|
let autoSyncEvery = store.getters['data/computedSettings'].autoSyncEvery;
|
|
|
|
if (autoSyncEvery < minAutoSyncEvery) {
|
|
|
|
autoSyncEvery = minAutoSyncEvery;
|
|
|
|
}
|
|
|
|
return Date.now() > autoSyncEvery + getLastStoredSyncActivity();
|
2017-08-15 10:43:26 +00:00
|
|
|
}
|
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Update the lastSyncActivity, assuming we have the lock.
|
|
|
|
*/
|
2017-08-15 10:43:26 +00:00
|
|
|
function setLastSyncActivity() {
|
|
|
|
const currentDate = Date.now();
|
|
|
|
lastSyncActivity = currentDate;
|
2017-12-11 00:53:46 +00:00
|
|
|
localStorage.setItem(store.getters['workspace/lastSyncActivityKey'], currentDate);
|
2017-08-15 10:43:26 +00:00
|
|
|
}
|
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Clean a syncedContent.
|
|
|
|
*/
|
2017-08-25 10:37:46 +00:00
|
|
|
function cleanSyncedContent(syncedContent) {
|
|
|
|
// Clean syncHistory from removed syncLocations
|
|
|
|
Object.keys(syncedContent.syncHistory).forEach((syncLocationId) => {
|
|
|
|
if (syncLocationId !== 'main' && !store.state.syncLocation.itemMap[syncLocationId]) {
|
|
|
|
delete syncedContent.syncHistory[syncLocationId];
|
|
|
|
}
|
|
|
|
});
|
2017-12-10 23:49:20 +00:00
|
|
|
const allSyncLocationHashSet = new Set([].concat(
|
2017-08-25 10:37:46 +00:00
|
|
|
...Object.keys(syncedContent.syncHistory).map(
|
|
|
|
id => syncedContent.syncHistory[id])));
|
|
|
|
// Clean historyData from unused contents
|
|
|
|
Object.keys(syncedContent.historyData).map(hash => parseInt(hash, 10)).forEach((hash) => {
|
2017-12-10 23:49:20 +00:00
|
|
|
if (!allSyncLocationHashSet.has(hash)) {
|
2017-08-25 10:37:46 +00:00
|
|
|
delete syncedContent.historyData[hash];
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Apply changes retrieved from the main provider. Update sync data accordingly.
|
|
|
|
*/
|
2017-08-17 23:10:35 +00:00
|
|
|
function applyChanges(changes) {
|
|
|
|
const storeItemMap = { ...store.getters.allItemMap };
|
|
|
|
const syncData = { ...store.getters['data/syncData'] };
|
2017-12-23 18:25:14 +00:00
|
|
|
let saveSyncData = false;
|
2017-08-17 23:10:35 +00:00
|
|
|
|
|
|
|
changes.forEach((change) => {
|
2017-12-17 15:08:52 +00:00
|
|
|
const existingSyncData = syncData[change.syncDataId];
|
2017-10-05 07:18:21 +00:00
|
|
|
const existingItem = existingSyncData && storeItemMap[existingSyncData.itemId];
|
2017-12-23 18:25:14 +00:00
|
|
|
if (!change.item && existingSyncData) {
|
|
|
|
// Item was removed
|
|
|
|
delete syncData[change.syncDataId];
|
|
|
|
saveSyncData = true;
|
2017-10-05 07:18:21 +00:00
|
|
|
if (existingItem) {
|
|
|
|
// Remove object from the store
|
|
|
|
store.commit(`${existingItem.type}/deleteItem`, existingItem.id);
|
|
|
|
delete storeItemMap[existingItem.id];
|
|
|
|
}
|
2017-12-23 18:25:14 +00:00
|
|
|
} else if (change.item && change.item.hash) {
|
|
|
|
// Item was modifed
|
2017-12-17 15:08:52 +00:00
|
|
|
syncData[change.syncDataId] = change.syncData;
|
2017-12-23 18:25:14 +00:00
|
|
|
saveSyncData = true;
|
|
|
|
if (
|
|
|
|
// If no sync data or existing one is different
|
|
|
|
(existingSyncData || {}).hash !== change.item.hash
|
|
|
|
// And no existing item or existing item is different
|
|
|
|
&& (existingItem || {}).hash !== change.item.hash
|
|
|
|
// And item is not content nor data, which will be merged later
|
|
|
|
&& change.item.type !== 'content' && change.item.type !== 'data'
|
|
|
|
) {
|
|
|
|
store.commit(`${change.item.type}/setItem`, change.item);
|
|
|
|
storeItemMap[change.item.id] = change.item;
|
|
|
|
}
|
2017-08-17 23:10:35 +00:00
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2017-12-23 18:25:14 +00:00
|
|
|
if (saveSyncData) {
|
2017-08-17 23:10:35 +00:00
|
|
|
store.dispatch('data/setSyncData', syncData);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-25 10:37:46 +00:00
|
|
|
const LAST_SENT = 0;
|
|
|
|
const LAST_MERGED = 1;
|
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Create a sync location by uploading the current file content.
|
|
|
|
*/
|
2017-09-23 19:01:50 +00:00
|
|
|
function createSyncLocation(syncLocation) {
|
|
|
|
syncLocation.id = utils.uid();
|
|
|
|
const currentFile = store.getters['file/current'];
|
|
|
|
const fileId = currentFile.id;
|
|
|
|
syncLocation.fileId = fileId;
|
|
|
|
// Use deepCopy to freeze item
|
|
|
|
const content = utils.deepCopy(store.getters['content/current']);
|
|
|
|
store.dispatch('queue/enqueue',
|
|
|
|
() => {
|
|
|
|
const provider = providerRegistry.providers[syncLocation.providerId];
|
|
|
|
const token = provider.getToken(syncLocation);
|
|
|
|
return provider.uploadContent(token, {
|
|
|
|
...content,
|
|
|
|
history: [content.hash],
|
|
|
|
}, syncLocation)
|
2017-09-27 20:27:12 +00:00
|
|
|
.then(syncLocationToStore => localDbSvc.loadSyncedContent(fileId)
|
2017-09-23 19:01:50 +00:00
|
|
|
.then(() => {
|
|
|
|
const newSyncedContent = utils.deepCopy(
|
|
|
|
store.state.syncedContent.itemMap[`${fileId}/syncedContent`]);
|
|
|
|
const newSyncHistoryItem = [];
|
|
|
|
newSyncedContent.syncHistory[syncLocation.id] = newSyncHistoryItem;
|
|
|
|
newSyncHistoryItem[LAST_SENT] = content.hash;
|
|
|
|
newSyncedContent.historyData[content.hash] = content;
|
|
|
|
|
|
|
|
store.commit('syncedContent/patchItem', newSyncedContent);
|
|
|
|
store.commit('syncLocation/setItem', syncLocationToStore);
|
|
|
|
store.dispatch('notification/info', `A new synchronized location was added to "${currentFile.name}".`);
|
|
|
|
}));
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2017-11-04 16:59:48 +00:00
|
|
|
class SyncContext {
|
|
|
|
constructor() {
|
|
|
|
this.restart = false;
|
|
|
|
this.synced = {};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
class FileSyncContext {
|
|
|
|
constructor() {
|
|
|
|
this.downloaded = {};
|
|
|
|
this.errors = {};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Sync one file with all its locations.
|
|
|
|
*/
|
2017-11-04 16:59:48 +00:00
|
|
|
function syncFile(fileId, syncContext = new SyncContext()) {
|
|
|
|
const fileSyncContext = new FileSyncContext();
|
|
|
|
syncContext.synced[`${fileId}/content`] = true;
|
2017-09-27 20:27:12 +00:00
|
|
|
return localDbSvc.loadSyncedContent(fileId)
|
|
|
|
.then(() => localDbSvc.loadItem(`${fileId}/content`)
|
|
|
|
.catch(() => {})) // Item may not exist if content has not been downloaded yet
|
2017-08-25 10:37:46 +00:00
|
|
|
.then(() => {
|
2017-11-04 16:59:48 +00:00
|
|
|
const getFile = () => store.state.file.itemMap[fileId];
|
2017-08-25 10:37:46 +00:00
|
|
|
const getContent = () => store.state.content.itemMap[`${fileId}/content`];
|
|
|
|
const getSyncedContent = () => store.state.syncedContent.itemMap[`${fileId}/syncedContent`];
|
|
|
|
const getSyncHistoryItem = syncLocationId => getSyncedContent().syncHistory[syncLocationId];
|
|
|
|
|
2017-09-23 19:01:50 +00:00
|
|
|
const isLocationSynced = (syncLocation) => {
|
|
|
|
const syncHistoryItem = getSyncHistoryItem(syncLocation.id);
|
|
|
|
return syncHistoryItem && syncHistoryItem[LAST_SENT] === getContent().hash;
|
|
|
|
};
|
2017-08-25 10:37:46 +00:00
|
|
|
|
2017-11-04 16:59:48 +00:00
|
|
|
const isWelcomeFile = () => {
|
2017-11-15 08:12:56 +00:00
|
|
|
if (store.getters['data/syncDataByItemId'][`${fileId}/content`]) {
|
|
|
|
// If file has already been synced, keep on syncing
|
|
|
|
return false;
|
|
|
|
}
|
2017-11-04 16:59:48 +00:00
|
|
|
const file = getFile();
|
|
|
|
const content = getContent();
|
2017-11-15 08:12:56 +00:00
|
|
|
if (!file || !content) {
|
|
|
|
return false;
|
|
|
|
}
|
2017-11-04 17:26:55 +00:00
|
|
|
const welcomeFileHashes = store.getters['data/localSettings'].welcomeFileHashes;
|
2017-11-15 08:12:56 +00:00
|
|
|
const hash = utils.hash(content.text);
|
|
|
|
const hasDiscussions = Object.keys(content.discussions).length;
|
|
|
|
return file.name === 'Welcome file' && welcomeFileHashes[hash] && !hasDiscussions;
|
2017-11-04 16:59:48 +00:00
|
|
|
};
|
|
|
|
|
2017-08-25 10:37:46 +00:00
|
|
|
const syncOneContentLocation = () => {
|
|
|
|
const syncLocations = [
|
|
|
|
...store.getters['syncLocation/groupedByFileId'][fileId] || [],
|
|
|
|
];
|
2017-12-10 23:49:20 +00:00
|
|
|
if (isWorkspaceSyncPossible()) {
|
2017-12-17 15:08:52 +00:00
|
|
|
syncLocations.unshift({ id: 'main', providerId: syncProvider.id, fileId });
|
2017-08-25 10:37:46 +00:00
|
|
|
}
|
|
|
|
let result;
|
|
|
|
syncLocations.some((syncLocation) => {
|
2017-11-04 16:59:48 +00:00
|
|
|
const provider = providerRegistry.providers[syncLocation.providerId];
|
|
|
|
if (
|
|
|
|
// Skip if it previously threw an error
|
|
|
|
!fileSyncContext.errors[syncLocation.id] &&
|
|
|
|
// Skip if it has previously been downloaded and has not changed since then
|
|
|
|
(!fileSyncContext.downloaded[syncLocation.id] || !isLocationSynced(syncLocation)) &&
|
|
|
|
// Skip welcome file if not synchronized explicitly
|
|
|
|
(syncLocations.length > 1 || !isWelcomeFile())
|
2017-09-23 19:01:50 +00:00
|
|
|
) {
|
2017-11-15 08:12:56 +00:00
|
|
|
const token = provider && provider.getToken(syncLocation);
|
|
|
|
result = token && store.dispatch('queue/doWithLocation', {
|
2017-09-23 19:01:50 +00:00
|
|
|
location: syncLocation,
|
|
|
|
promise: provider.downloadContent(token, syncLocation)
|
|
|
|
.then((serverContent = null) => {
|
2017-11-04 16:59:48 +00:00
|
|
|
fileSyncContext.downloaded[syncLocation.id] = true;
|
2017-09-23 19:01:50 +00:00
|
|
|
|
|
|
|
const syncedContent = getSyncedContent();
|
|
|
|
const syncHistoryItem = getSyncHistoryItem(syncLocation.id);
|
|
|
|
let mergedContent = (() => {
|
|
|
|
const clientContent = utils.deepCopy(getContent());
|
2017-09-27 20:27:12 +00:00
|
|
|
if (!clientContent) {
|
|
|
|
return utils.deepCopy(serverContent);
|
|
|
|
}
|
2017-09-23 19:01:50 +00:00
|
|
|
if (!serverContent) {
|
|
|
|
// Sync location has not been created yet
|
|
|
|
return clientContent;
|
|
|
|
}
|
|
|
|
if (serverContent.hash === clientContent.hash) {
|
|
|
|
// Server and client contents are synced
|
|
|
|
return clientContent;
|
|
|
|
}
|
|
|
|
if (syncedContent.historyData[serverContent.hash]) {
|
|
|
|
// Server content has not changed or has already been merged
|
|
|
|
return clientContent;
|
|
|
|
}
|
|
|
|
// Perform a merge with last merged content if any, or a simple fusion otherwise
|
|
|
|
let lastMergedContent;
|
|
|
|
serverContent.history.some((hash) => {
|
|
|
|
lastMergedContent = syncedContent.historyData[hash];
|
|
|
|
return lastMergedContent;
|
|
|
|
});
|
|
|
|
if (!lastMergedContent && syncHistoryItem) {
|
|
|
|
lastMergedContent = syncedContent.historyData[syncHistoryItem[LAST_MERGED]];
|
|
|
|
}
|
|
|
|
return diffUtils.mergeContent(serverContent, clientContent, lastMergedContent);
|
|
|
|
})();
|
|
|
|
|
2017-09-27 20:27:12 +00:00
|
|
|
if (!mergedContent) {
|
2017-11-04 16:59:48 +00:00
|
|
|
fileSyncContext.errors[syncLocation.id] = true;
|
2017-09-27 20:27:12 +00:00
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Update or set content in store
|
|
|
|
store.commit('content/setItem', {
|
2017-09-23 19:01:50 +00:00
|
|
|
id: `${fileId}/content`,
|
2017-10-09 07:11:18 +00:00
|
|
|
text: utils.sanitizeText(mergedContent.text),
|
|
|
|
properties: utils.sanitizeText(mergedContent.properties),
|
|
|
|
discussions: mergedContent.discussions,
|
|
|
|
comments: mergedContent.comments,
|
2017-09-27 20:27:12 +00:00
|
|
|
hash: 0,
|
2017-09-23 19:01:50 +00:00
|
|
|
});
|
|
|
|
|
2017-09-26 22:54:26 +00:00
|
|
|
// Retrieve content with new `hash` and freeze it
|
2017-09-23 19:01:50 +00:00
|
|
|
mergedContent = utils.deepCopy(getContent());
|
|
|
|
|
|
|
|
// Make merged content history
|
|
|
|
const mergedContentHistory = serverContent ? serverContent.history.slice() : [];
|
|
|
|
let skipUpload = true;
|
|
|
|
if (mergedContentHistory[0] !== mergedContent.hash) {
|
|
|
|
// Put merged content hash at the beginning of history
|
|
|
|
mergedContentHistory.unshift(mergedContent.hash);
|
|
|
|
// Server content is either out of sync or its history is incomplete, do upload
|
|
|
|
skipUpload = false;
|
2017-08-25 10:37:46 +00:00
|
|
|
}
|
2017-09-23 19:01:50 +00:00
|
|
|
if (syncHistoryItem && syncHistoryItem[0] !== mergedContent.hash) {
|
|
|
|
// Clean up by removing the hash we've previously added
|
|
|
|
const idx = mergedContentHistory.indexOf(syncHistoryItem[LAST_SENT]);
|
|
|
|
if (idx !== -1) {
|
|
|
|
mergedContentHistory.splice(idx, 1);
|
|
|
|
}
|
2017-08-25 10:37:46 +00:00
|
|
|
}
|
2017-09-23 19:01:50 +00:00
|
|
|
|
|
|
|
// Store last sent if it's in the server history,
|
|
|
|
// and merged content which will be sent if different
|
|
|
|
const newSyncedContent = utils.deepCopy(syncedContent);
|
|
|
|
const newSyncHistoryItem = newSyncedContent.syncHistory[syncLocation.id] || [];
|
|
|
|
newSyncedContent.syncHistory[syncLocation.id] = newSyncHistoryItem;
|
|
|
|
if (serverContent && (serverContent.hash === newSyncHistoryItem[LAST_SENT] ||
|
|
|
|
serverContent.history.indexOf(newSyncHistoryItem[LAST_SENT]) !== -1)
|
|
|
|
) {
|
|
|
|
// The server has accepted the content we previously sent
|
|
|
|
newSyncHistoryItem[LAST_MERGED] = newSyncHistoryItem[LAST_SENT];
|
2017-08-25 10:37:46 +00:00
|
|
|
}
|
2017-09-23 19:01:50 +00:00
|
|
|
newSyncHistoryItem[LAST_SENT] = mergedContent.hash;
|
|
|
|
newSyncedContent.historyData[mergedContent.hash] = mergedContent;
|
|
|
|
|
|
|
|
// Clean synced content from unused revisions
|
|
|
|
cleanSyncedContent(newSyncedContent);
|
|
|
|
// Store synced content
|
|
|
|
store.commit('syncedContent/patchItem', newSyncedContent);
|
|
|
|
|
|
|
|
if (skipUpload) {
|
|
|
|
// Server content and merged content are equal, skip content upload
|
|
|
|
return null;
|
2017-08-25 10:37:46 +00:00
|
|
|
}
|
2017-09-23 19:01:50 +00:00
|
|
|
|
|
|
|
// Prevent from sending new content too long after old content has been fetched
|
|
|
|
const syncStartTime = Date.now();
|
|
|
|
const ifNotTooLate = cb => (res) => {
|
|
|
|
// No time to refresh a token...
|
|
|
|
if (syncStartTime + 500 < Date.now()) {
|
|
|
|
throw new Error('TOO_LATE');
|
|
|
|
}
|
|
|
|
return cb(res);
|
|
|
|
};
|
|
|
|
|
|
|
|
// Upload merged content
|
|
|
|
return provider.uploadContent(token, {
|
|
|
|
...mergedContent,
|
|
|
|
history: mergedContentHistory,
|
|
|
|
}, syncLocation, ifNotTooLate)
|
|
|
|
.then((syncLocationToStore) => {
|
|
|
|
// Replace sync location if modified
|
|
|
|
if (utils.serializeObject(syncLocation) !==
|
|
|
|
utils.serializeObject(syncLocationToStore)
|
|
|
|
) {
|
|
|
|
store.commit('syncLocation/patchItem', syncLocationToStore);
|
|
|
|
}
|
2017-09-27 20:27:12 +00:00
|
|
|
|
|
|
|
// If content was just created, restart sync to create the file as well
|
2017-12-17 15:08:52 +00:00
|
|
|
if (provider === syncProvider &&
|
2017-11-04 16:59:48 +00:00
|
|
|
!store.getters['data/syncDataByItemId'][fileId]
|
|
|
|
) {
|
|
|
|
syncContext.restart = true;
|
2017-09-27 20:27:12 +00:00
|
|
|
}
|
2017-09-23 19:01:50 +00:00
|
|
|
});
|
|
|
|
})
|
|
|
|
.catch((err) => {
|
|
|
|
if (store.state.offline) {
|
|
|
|
throw err;
|
2017-08-25 10:37:46 +00:00
|
|
|
}
|
2017-09-23 19:01:50 +00:00
|
|
|
console.error(err); // eslint-disable-line no-console
|
|
|
|
store.dispatch('notification/error', err);
|
2017-11-04 16:59:48 +00:00
|
|
|
fileSyncContext.errors[syncLocation.id] = true;
|
2017-09-23 19:01:50 +00:00
|
|
|
}),
|
|
|
|
})
|
|
|
|
.then(() => syncOneContentLocation());
|
2017-08-25 10:37:46 +00:00
|
|
|
}
|
|
|
|
return result;
|
|
|
|
});
|
|
|
|
return result;
|
|
|
|
};
|
|
|
|
|
|
|
|
return syncOneContentLocation();
|
|
|
|
})
|
2017-09-17 15:32:39 +00:00
|
|
|
.then(
|
|
|
|
() => localDbSvc.unloadContents(),
|
|
|
|
err => localDbSvc.unloadContents()
|
|
|
|
.then(() => {
|
|
|
|
throw err;
|
|
|
|
}))
|
2017-11-04 16:59:48 +00:00
|
|
|
.catch((err) => {
|
|
|
|
if (err && err.message === 'TOO_LATE') {
|
|
|
|
// Restart sync
|
|
|
|
return syncFile(fileId, syncContext);
|
|
|
|
}
|
|
|
|
throw err;
|
|
|
|
});
|
2017-08-25 10:37:46 +00:00
|
|
|
}
|
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
2017-12-17 15:08:52 +00:00
|
|
|
* Sync a data item, typically settings, workspaces and templates.
|
2017-12-10 23:49:20 +00:00
|
|
|
*/
|
2017-09-26 22:54:26 +00:00
|
|
|
function syncDataItem(dataId) {
|
2017-12-17 15:08:52 +00:00
|
|
|
const getItem = () => store.state.data.itemMap[dataId]
|
|
|
|
|| store.state.data.lsItemMap[dataId];
|
|
|
|
|
|
|
|
const item = getItem();
|
2017-09-26 22:54:26 +00:00
|
|
|
const syncData = store.getters['data/syncDataByItemId'][dataId];
|
|
|
|
// Sync if item hash and syncData hash are inconsistent
|
|
|
|
if (syncData && item && item.hash === syncData.hash) {
|
|
|
|
return null;
|
|
|
|
}
|
2017-12-17 15:08:52 +00:00
|
|
|
|
2017-12-23 18:25:14 +00:00
|
|
|
return syncProvider.downloadData(dataId)
|
2017-09-26 22:54:26 +00:00
|
|
|
.then((serverItem = null) => {
|
|
|
|
const dataSyncData = store.getters['data/dataSyncData'][dataId];
|
|
|
|
let mergedItem = (() => {
|
2017-12-17 15:08:52 +00:00
|
|
|
const clientItem = utils.deepCopy(getItem());
|
2017-09-27 20:27:12 +00:00
|
|
|
if (!clientItem) {
|
|
|
|
return serverItem;
|
|
|
|
}
|
2017-09-26 22:54:26 +00:00
|
|
|
if (!serverItem) {
|
|
|
|
return clientItem;
|
|
|
|
}
|
|
|
|
if (!dataSyncData) {
|
|
|
|
return serverItem;
|
|
|
|
}
|
|
|
|
if (dataSyncData.hash !== serverItem.hash) {
|
|
|
|
// Server version has changed
|
|
|
|
if (dataSyncData.hash !== clientItem.hash && typeof clientItem.data === 'object') {
|
|
|
|
// Client version has changed as well, merge data objects
|
|
|
|
return {
|
|
|
|
...clientItem,
|
|
|
|
data: diffUtils.mergeObjects(serverItem.data, clientItem.data),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
return serverItem;
|
|
|
|
}
|
|
|
|
return clientItem;
|
|
|
|
})();
|
|
|
|
|
2017-09-27 20:27:12 +00:00
|
|
|
if (!mergedItem) {
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
2017-09-26 22:54:26 +00:00
|
|
|
// Update item in store
|
|
|
|
store.commit('data/setItem', {
|
|
|
|
id: dataId,
|
|
|
|
...mergedItem,
|
|
|
|
});
|
|
|
|
|
|
|
|
// Retrieve item with new `hash` and freeze it
|
2017-12-17 15:08:52 +00:00
|
|
|
mergedItem = utils.deepCopy(getItem());
|
2017-09-26 22:54:26 +00:00
|
|
|
|
|
|
|
return Promise.resolve()
|
|
|
|
.then(() => {
|
|
|
|
if (serverItem && serverItem.hash === mergedItem.hash) {
|
|
|
|
return null;
|
|
|
|
}
|
2017-12-23 18:25:14 +00:00
|
|
|
return syncProvider.uploadData(mergedItem, dataId);
|
2017-09-26 22:54:26 +00:00
|
|
|
})
|
|
|
|
.then(() => {
|
|
|
|
store.dispatch('data/patchDataSyncData', {
|
|
|
|
[dataId]: utils.deepCopy(store.getters['data/syncDataByItemId'][dataId]),
|
|
|
|
});
|
|
|
|
});
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Sync the whole workspace with the main provider and the current file explicit locations.
|
|
|
|
*/
|
2017-12-23 18:25:14 +00:00
|
|
|
function syncWorkspace() {
|
|
|
|
const workspace = store.getters['workspace/currentWorkspace'];
|
2017-11-04 16:59:48 +00:00
|
|
|
const syncContext = new SyncContext();
|
2017-12-17 15:08:52 +00:00
|
|
|
|
|
|
|
return Promise.resolve()
|
|
|
|
.then(() => {
|
|
|
|
// Store the sub in the DB since it's not safely stored in the token
|
2017-12-23 18:25:14 +00:00
|
|
|
const syncToken = store.getters['workspace/syncToken'];
|
2017-12-17 15:08:52 +00:00
|
|
|
const localSettings = store.getters['data/localSettings'];
|
|
|
|
if (!localSettings.syncSub) {
|
|
|
|
store.dispatch('data/patchLocalSettings', {
|
2017-12-23 18:25:14 +00:00
|
|
|
syncSub: syncToken.sub,
|
2017-12-17 15:08:52 +00:00
|
|
|
});
|
|
|
|
} else if (localSettings.syncSub !== syncToken.sub) {
|
|
|
|
throw new Error('Synchronization failed due to token inconsistency.');
|
|
|
|
}
|
|
|
|
})
|
2017-12-23 18:25:14 +00:00
|
|
|
.then(() => syncProvider.getChanges())
|
2017-08-15 10:43:26 +00:00
|
|
|
.then((changes) => {
|
2017-08-17 23:10:35 +00:00
|
|
|
// Apply changes
|
|
|
|
applyChanges(changes);
|
2017-12-17 15:08:52 +00:00
|
|
|
syncProvider.setAppliedChanges(changes);
|
2017-08-17 23:10:35 +00:00
|
|
|
|
|
|
|
// Prevent from sending items too long after changes have been retrieved
|
|
|
|
const syncStartTime = Date.now();
|
|
|
|
const ifNotTooLate = cb => (res) => {
|
|
|
|
if (syncStartTime + restartSyncAfter < Date.now()) {
|
2017-08-25 10:37:46 +00:00
|
|
|
throw new Error('TOO_LATE');
|
2017-08-17 23:10:35 +00:00
|
|
|
}
|
|
|
|
return cb(res);
|
|
|
|
};
|
|
|
|
|
|
|
|
// Called until no item to save
|
|
|
|
const saveNextItem = ifNotTooLate(() => {
|
2017-08-25 10:37:46 +00:00
|
|
|
const storeItemMap = {
|
|
|
|
...store.state.file.itemMap,
|
|
|
|
...store.state.folder.itemMap,
|
|
|
|
...store.state.syncLocation.itemMap,
|
2017-09-23 19:01:50 +00:00
|
|
|
...store.state.publishLocation.itemMap,
|
2017-09-26 22:54:26 +00:00
|
|
|
// Deal with contents and data later
|
2017-08-25 10:37:46 +00:00
|
|
|
};
|
2017-08-17 23:10:35 +00:00
|
|
|
const syncDataByItemId = store.getters['data/syncDataByItemId'];
|
2017-12-23 18:25:14 +00:00
|
|
|
let promise;
|
2017-12-10 23:49:20 +00:00
|
|
|
Object.entries(storeItemMap).some(([id, item]) => {
|
2017-08-17 23:10:35 +00:00
|
|
|
const existingSyncData = syncDataByItemId[id];
|
2017-12-23 18:25:14 +00:00
|
|
|
if ((!existingSyncData || existingSyncData.hash !== item.hash)
|
|
|
|
// Add file/folder if parent has been added
|
|
|
|
&& (!storeItemMap[item.parentId] || syncDataByItemId[item.parentId])
|
|
|
|
// Add file if content has been added
|
|
|
|
&& (item.type !== 'file' || syncDataByItemId[`${id}/content`])
|
2017-09-27 20:27:12 +00:00
|
|
|
) {
|
2017-12-23 18:25:14 +00:00
|
|
|
promise = syncProvider.saveSimpleItem(
|
2017-08-17 23:10:35 +00:00
|
|
|
// Use deepCopy to freeze objects
|
|
|
|
utils.deepCopy(item),
|
|
|
|
utils.deepCopy(existingSyncData),
|
|
|
|
ifNotTooLate,
|
|
|
|
)
|
|
|
|
.then(resultSyncData => store.dispatch('data/patchSyncData', {
|
|
|
|
[resultSyncData.id]: resultSyncData,
|
|
|
|
}))
|
|
|
|
.then(() => saveNextItem());
|
|
|
|
}
|
2017-12-23 18:25:14 +00:00
|
|
|
return promise;
|
2017-08-15 10:43:26 +00:00
|
|
|
});
|
2017-12-23 18:25:14 +00:00
|
|
|
return promise;
|
2017-08-15 10:43:26 +00:00
|
|
|
});
|
2017-08-17 23:10:35 +00:00
|
|
|
|
|
|
|
// Called until no item to remove
|
|
|
|
const removeNextItem = ifNotTooLate(() => {
|
2017-08-25 10:37:46 +00:00
|
|
|
const storeItemMap = {
|
|
|
|
...store.state.file.itemMap,
|
|
|
|
...store.state.folder.itemMap,
|
|
|
|
...store.state.syncLocation.itemMap,
|
2017-09-23 19:01:50 +00:00
|
|
|
...store.state.publishLocation.itemMap,
|
|
|
|
...store.state.content.itemMap,
|
2017-08-25 10:37:46 +00:00
|
|
|
};
|
2017-08-17 23:10:35 +00:00
|
|
|
const syncData = store.getters['data/syncData'];
|
2017-12-23 18:25:14 +00:00
|
|
|
let promise;
|
2017-12-10 23:49:20 +00:00
|
|
|
Object.entries(syncData).some(([, existingSyncData]) => {
|
2017-08-25 10:37:46 +00:00
|
|
|
if (!storeItemMap[existingSyncData.itemId] &&
|
2017-12-17 15:08:52 +00:00
|
|
|
// We don't want to delete data items, especially on first sync
|
|
|
|
existingSyncData.type !== 'data' &&
|
2017-08-25 10:37:46 +00:00
|
|
|
// Remove content only if file has been removed
|
|
|
|
(existingSyncData.type !== 'content' || !storeItemMap[existingSyncData.itemId.split('/')[0]])
|
|
|
|
) {
|
2017-08-17 23:10:35 +00:00
|
|
|
// Use deepCopy to freeze objects
|
|
|
|
const syncDataToRemove = utils.deepCopy(existingSyncData);
|
2017-12-23 18:25:14 +00:00
|
|
|
promise = syncProvider
|
|
|
|
.removeItem(syncDataToRemove, ifNotTooLate)
|
2017-08-17 23:10:35 +00:00
|
|
|
.then(() => {
|
|
|
|
const syncDataCopy = { ...store.getters['data/syncData'] };
|
|
|
|
delete syncDataCopy[syncDataToRemove.id];
|
|
|
|
store.dispatch('data/setSyncData', syncDataCopy);
|
|
|
|
})
|
|
|
|
.then(() => removeNextItem());
|
|
|
|
}
|
2017-12-23 18:25:14 +00:00
|
|
|
return promise;
|
2017-08-17 23:10:35 +00:00
|
|
|
});
|
2017-12-23 18:25:14 +00:00
|
|
|
return promise;
|
2017-08-17 23:10:35 +00:00
|
|
|
});
|
|
|
|
|
2017-08-25 10:37:46 +00:00
|
|
|
const getOneFileIdToSync = () => {
|
2017-09-27 20:27:12 +00:00
|
|
|
const contentIds = [...new Set([
|
|
|
|
...Object.keys(localDbSvc.hashMap.content),
|
|
|
|
...store.getters['file/items'].map(file => `${file.id}/content`),
|
|
|
|
])];
|
2017-08-25 10:37:46 +00:00
|
|
|
let fileId;
|
2017-09-27 20:27:12 +00:00
|
|
|
contentIds.some((contentId) => {
|
2017-08-25 10:37:46 +00:00
|
|
|
// Get content hash from itemMap or from localDbSvc if not loaded
|
|
|
|
const loadedContent = store.state.content.itemMap[contentId];
|
|
|
|
const hash = loadedContent ? loadedContent.hash : localDbSvc.hashMap.content[contentId];
|
|
|
|
const syncData = store.getters['data/syncDataByItemId'][contentId];
|
2017-11-04 16:59:48 +00:00
|
|
|
if (
|
|
|
|
// Sync if syncData does not exist and content syncing was not attempted yet
|
|
|
|
(!syncData && !syncContext.synced[contentId]) ||
|
|
|
|
// Or if content hash and syncData hash are inconsistent
|
|
|
|
(syncData && hash !== syncData.hash)
|
|
|
|
) {
|
2017-08-25 10:37:46 +00:00
|
|
|
[fileId] = contentId.split('/');
|
2017-08-17 23:10:35 +00:00
|
|
|
}
|
2017-08-25 10:37:46 +00:00
|
|
|
return fileId;
|
|
|
|
});
|
|
|
|
return fileId;
|
|
|
|
};
|
|
|
|
|
2017-11-04 16:59:48 +00:00
|
|
|
const syncNextFile = () => {
|
2017-08-25 10:37:46 +00:00
|
|
|
const fileId = getOneFileIdToSync();
|
2017-09-27 20:27:12 +00:00
|
|
|
if (!fileId) {
|
2017-11-04 16:59:48 +00:00
|
|
|
return null;
|
2017-09-27 20:27:12 +00:00
|
|
|
}
|
2017-11-04 16:59:48 +00:00
|
|
|
return syncFile(fileId, syncContext)
|
|
|
|
.then(() => syncNextFile());
|
2017-08-25 10:37:46 +00:00
|
|
|
};
|
2017-08-17 23:10:35 +00:00
|
|
|
|
|
|
|
return Promise.resolve()
|
|
|
|
.then(() => saveNextItem())
|
|
|
|
.then(() => removeNextItem())
|
2017-12-17 15:08:52 +00:00
|
|
|
// Sync settings only in the main workspace
|
|
|
|
.then(() => workspace.id === 'main' && syncDataItem('settings'))
|
|
|
|
// Sync workspaces only in the main workspace
|
|
|
|
.then(() => workspace.id === 'main' && syncDataItem('workspaces'))
|
2017-09-26 22:54:26 +00:00
|
|
|
.then(() => syncDataItem('templates'))
|
2017-08-25 10:37:46 +00:00
|
|
|
.then(() => {
|
2017-09-27 20:27:12 +00:00
|
|
|
const currentFileId = store.getters['file/current'].id;
|
2017-09-26 22:54:26 +00:00
|
|
|
if (currentFileId) {
|
2017-08-25 10:37:46 +00:00
|
|
|
// Sync current file first
|
2017-11-04 16:59:48 +00:00
|
|
|
return syncFile(currentFileId, syncContext)
|
|
|
|
.then(() => syncNextFile());
|
2017-08-25 10:37:46 +00:00
|
|
|
}
|
|
|
|
return syncNextFile();
|
|
|
|
})
|
2017-09-27 20:27:12 +00:00
|
|
|
.then(
|
2017-11-04 16:59:48 +00:00
|
|
|
() => {
|
|
|
|
if (syncContext.restart) {
|
2017-09-27 20:27:12 +00:00
|
|
|
// Restart sync
|
2017-12-23 18:25:14 +00:00
|
|
|
return syncWorkspace();
|
2017-09-27 20:27:12 +00:00
|
|
|
}
|
|
|
|
return null;
|
|
|
|
},
|
|
|
|
(err) => {
|
|
|
|
if (err && err.message === 'TOO_LATE') {
|
|
|
|
// Restart sync
|
2017-12-23 18:25:14 +00:00
|
|
|
return syncWorkspace();
|
2017-09-27 20:27:12 +00:00
|
|
|
}
|
|
|
|
throw err;
|
|
|
|
});
|
2017-08-15 10:43:26 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Enqueue a sync task, if possible.
|
|
|
|
*/
|
2017-08-15 10:43:26 +00:00
|
|
|
function requestSync() {
|
|
|
|
store.dispatch('queue/enqueueSyncRequest', () => new Promise((resolve, reject) => {
|
|
|
|
let intervalId;
|
|
|
|
const attempt = () => {
|
|
|
|
// Only start syncing when these conditions are met
|
2017-12-11 00:53:46 +00:00
|
|
|
if (networkSvc.isUserActive() && isSyncWindow()) {
|
2017-08-15 10:43:26 +00:00
|
|
|
clearInterval(intervalId);
|
2017-08-25 10:37:46 +00:00
|
|
|
if (!isSyncPossible()) {
|
2017-08-15 10:43:26 +00:00
|
|
|
// Cancel sync
|
2017-09-17 15:32:39 +00:00
|
|
|
reject('Sync not possible.');
|
2017-08-25 10:37:46 +00:00
|
|
|
return;
|
2017-08-15 10:43:26 +00:00
|
|
|
}
|
2017-08-25 10:37:46 +00:00
|
|
|
|
2017-10-07 11:22:24 +00:00
|
|
|
// Determine if we have to clean files
|
|
|
|
const fileHashesToClean = {};
|
|
|
|
if (getLastStoredSyncActivity() + utils.cleanTrashAfter < Date.now()) {
|
|
|
|
// Last synchronization happened 7 days ago
|
|
|
|
const syncDataByItemId = store.getters['data/syncDataByItemId'];
|
|
|
|
store.getters['file/items'].forEach((file) => {
|
|
|
|
// If file is in the trash and has not been modified since it was last synced
|
|
|
|
const syncData = syncDataByItemId[file.id];
|
|
|
|
if (syncData && file.parentId === 'trash' && file.hash === syncData.hash) {
|
|
|
|
fileHashesToClean[file.id] = file.hash;
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2017-08-25 10:37:46 +00:00
|
|
|
// Call setLastSyncActivity periodically
|
|
|
|
intervalId = utils.setInterval(() => setLastSyncActivity(), 1000);
|
|
|
|
setLastSyncActivity();
|
|
|
|
const cleaner = cb => (res) => {
|
|
|
|
clearInterval(intervalId);
|
|
|
|
cb(res);
|
|
|
|
};
|
2017-10-07 11:22:24 +00:00
|
|
|
|
2017-08-25 10:37:46 +00:00
|
|
|
Promise.resolve()
|
|
|
|
.then(() => {
|
2017-12-10 23:49:20 +00:00
|
|
|
if (isWorkspaceSyncPossible()) {
|
2017-12-23 18:25:14 +00:00
|
|
|
return syncWorkspace();
|
2017-08-25 10:37:46 +00:00
|
|
|
}
|
|
|
|
if (hasCurrentFileSyncLocations()) {
|
2017-12-17 15:08:52 +00:00
|
|
|
// Only sync current file if workspace sync is unavailable.
|
|
|
|
// We could also sync files that are out-of-sync but it would
|
|
|
|
// require to load all the syncedContent objects from the DB.
|
2017-08-25 10:37:46 +00:00
|
|
|
return syncFile(store.getters['file/current'].id);
|
|
|
|
}
|
|
|
|
return null;
|
|
|
|
})
|
2017-10-07 11:22:24 +00:00
|
|
|
.then(() => {
|
|
|
|
// Clean files
|
2017-12-10 23:49:20 +00:00
|
|
|
Object.entries(fileHashesToClean).forEach(([fileId, fileHash]) => {
|
2017-10-07 11:22:24 +00:00
|
|
|
const file = store.state.file.itemMap[fileId];
|
2017-12-10 23:49:20 +00:00
|
|
|
if (file && file.hash === fileHash) {
|
2017-10-07 11:22:24 +00:00
|
|
|
store.dispatch('deleteFile', fileId);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
})
|
2017-08-25 10:37:46 +00:00
|
|
|
.then(cleaner(resolve), cleaner(reject));
|
2017-08-15 10:43:26 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
intervalId = utils.setInterval(() => attempt(), 1000);
|
|
|
|
attempt();
|
|
|
|
}));
|
|
|
|
}
|
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
export default {
|
|
|
|
init() {
|
|
|
|
// Load workspaces and tokens from localStorage
|
|
|
|
localDbSvc.syncLocalStorage();
|
|
|
|
|
2017-12-17 15:08:52 +00:00
|
|
|
// Try to find a suitable workspace sync provider
|
|
|
|
syncProvider = providerRegistry.providers[utils.queryParams.providerId];
|
|
|
|
if (!syncProvider || !syncProvider.initWorkspace) {
|
|
|
|
syncProvider = googleDriveAppDataProvider;
|
2017-12-10 23:49:20 +00:00
|
|
|
}
|
2017-07-28 07:40:24 +00:00
|
|
|
|
2017-12-17 15:08:52 +00:00
|
|
|
return syncProvider.initWorkspace()
|
|
|
|
.then(workspace => store.dispatch('workspace/setCurrentWorkspaceId', workspace.id))
|
2017-12-10 23:49:20 +00:00
|
|
|
.then(() => localDbSvc.init())
|
|
|
|
.then(() => {
|
|
|
|
// Sync periodically
|
|
|
|
utils.setInterval(() => {
|
|
|
|
if (isSyncPossible() &&
|
2017-12-11 00:53:46 +00:00
|
|
|
networkSvc.isUserActive() &&
|
2017-12-10 23:49:20 +00:00
|
|
|
isSyncWindow() &&
|
|
|
|
isAutoSyncReady()
|
|
|
|
) {
|
|
|
|
requestSync();
|
|
|
|
}
|
|
|
|
}, 1000);
|
2017-08-19 10:24:08 +00:00
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
// Unload contents from memory periodically
|
|
|
|
utils.setInterval(() => {
|
|
|
|
// Wait for sync and publish to finish
|
|
|
|
if (store.state.queue.isEmpty) {
|
|
|
|
localDbSvc.unloadContents();
|
|
|
|
}
|
|
|
|
}, 5000);
|
|
|
|
});
|
|
|
|
},
|
2017-08-25 10:37:46 +00:00
|
|
|
isSyncPossible,
|
2017-08-15 10:43:26 +00:00
|
|
|
requestSync,
|
2017-09-23 19:01:50 +00:00
|
|
|
createSyncLocation,
|
2017-08-15 10:43:26 +00:00
|
|
|
};
|