Stackedit/src/services/syncSvc.js

728 lines
27 KiB
JavaScript
Raw Normal View History

2017-07-28 07:40:24 +00:00
import localDbSvc from './localDbSvc';
import store from '../store';
import utils from './utils';
2017-08-25 10:37:46 +00:00
import diffUtils from './diffUtils';
2017-09-23 19:01:50 +00:00
import providerRegistry from './providers/providerRegistry';
2017-12-10 23:49:20 +00:00
import googleDriveAppDataProvider from './providers/googleDriveAppDataProvider';
2017-08-15 10:43:26 +00:00
const inactivityThreshold = 3 * 1000; // 3 sec
2017-08-17 23:10:35 +00:00
const restartSyncAfter = 30 * 1000; // 30 sec
2017-08-25 10:37:46 +00:00
const autoSyncAfter = utils.randomize(60 * 1000); // 60 sec
2017-12-10 23:49:20 +00:00
let workspaceProvider;
/**
* Use a lock in the local storage to prevent multiple windows concurrency.
*/
const lastSyncActivityKey = `${utils.workspaceId}/lastSyncActivity`;
let lastSyncActivity;
const getLastStoredSyncActivity = () => parseInt(localStorage[lastSyncActivityKey], 10) || 0;
/**
* Return true if workspace sync is possible.
*/
const isWorkspaceSyncPossible = () => {
const loginToken = store.getters['data/loginToken'];
if (!loginToken && Object.keys(store.getters['data/syncData']).length) {
// Reset sync data if token was removed
store.dispatch('data/setSyncData', {});
}
return !!loginToken;
};
/**
* Return true if file has at least one explicit sync location.
*/
2017-08-25 10:37:46 +00:00
const hasCurrentFileSyncLocations = () => !!store.getters['syncLocation/current'].length;
2017-12-10 23:49:20 +00:00
/**
* Return true if we are online and we have something to sync.
*/
2017-08-25 10:37:46 +00:00
const isSyncPossible = () => !store.state.offline &&
2017-12-10 23:49:20 +00:00
(isWorkspaceSyncPossible() || hasCurrentFileSyncLocations());
2017-08-15 10:43:26 +00:00
2017-12-10 23:49:20 +00:00
/**
* Return true if we are the many window, ie we have the lastSyncActivity lock.
*/
2017-08-15 10:43:26 +00:00
function isSyncWindow() {
2017-10-07 11:22:24 +00:00
const storedLastSyncActivity = getLastStoredSyncActivity();
2017-08-15 10:43:26 +00:00
return lastSyncActivity === storedLastSyncActivity ||
Date.now() > inactivityThreshold + storedLastSyncActivity;
}
2017-12-10 23:49:20 +00:00
/**
* Return true if auto sync can start, ie that lastSyncActivity is old enough.
*/
2017-08-17 23:10:35 +00:00
function isAutoSyncReady() {
2017-10-07 11:22:24 +00:00
const storedLastSyncActivity = getLastStoredSyncActivity();
2017-08-15 10:43:26 +00:00
return Date.now() > autoSyncAfter + storedLastSyncActivity;
}
2017-12-10 23:49:20 +00:00
/**
* Update the lastSyncActivity, assuming we have the lock.
*/
2017-08-15 10:43:26 +00:00
function setLastSyncActivity() {
const currentDate = Date.now();
lastSyncActivity = currentDate;
localStorage[lastSyncActivityKey] = currentDate;
}
2017-12-10 23:49:20 +00:00
/**
* Clean a syncedContent.
*/
2017-08-25 10:37:46 +00:00
function cleanSyncedContent(syncedContent) {
// Clean syncHistory from removed syncLocations
Object.keys(syncedContent.syncHistory).forEach((syncLocationId) => {
if (syncLocationId !== 'main' && !store.state.syncLocation.itemMap[syncLocationId]) {
delete syncedContent.syncHistory[syncLocationId];
}
});
2017-12-10 23:49:20 +00:00
const allSyncLocationHashSet = new Set([].concat(
2017-08-25 10:37:46 +00:00
...Object.keys(syncedContent.syncHistory).map(
id => syncedContent.syncHistory[id])));
// Clean historyData from unused contents
Object.keys(syncedContent.historyData).map(hash => parseInt(hash, 10)).forEach((hash) => {
2017-12-10 23:49:20 +00:00
if (!allSyncLocationHashSet.has(hash)) {
2017-08-25 10:37:46 +00:00
delete syncedContent.historyData[hash];
}
});
}
2017-12-10 23:49:20 +00:00
/**
* Apply changes retrieved from the main provider. Update sync data accordingly.
* @param {*} changes The changes to apply.
*/
2017-08-17 23:10:35 +00:00
function applyChanges(changes) {
const storeItemMap = { ...store.getters.allItemMap };
const syncData = { ...store.getters['data/syncData'] };
let syncDataChanged = false;
changes.forEach((change) => {
2017-10-05 07:18:21 +00:00
const existingSyncData = syncData[change.fileId];
const existingItem = existingSyncData && storeItemMap[existingSyncData.itemId];
if (change.removed && existingSyncData) {
if (existingItem) {
// Remove object from the store
store.commit(`${existingItem.type}/deleteItem`, existingItem.id);
delete storeItemMap[existingItem.id];
}
delete syncData[change.fileId];
syncDataChanged = true;
} else if (!change.removed && change.item && change.item.hash) {
if (!existingSyncData || (existingSyncData.hash !== change.item.hash && (
!existingItem || existingItem.hash !== change.item.hash
))) {
// Put object in the store
if (change.item.type !== 'content') { // Merge contents later
store.commit(`${change.item.type}/setItem`, change.item);
storeItemMap[change.item.id] = change.item;
2017-08-17 23:10:35 +00:00
}
}
2017-10-05 07:18:21 +00:00
syncData[change.fileId] = change.syncData;
syncDataChanged = true;
2017-08-17 23:10:35 +00:00
}
});
if (syncDataChanged) {
store.dispatch('data/setSyncData', syncData);
}
}
2017-08-25 10:37:46 +00:00
const LAST_SENT = 0;
const LAST_MERGED = 1;
2017-12-10 23:49:20 +00:00
/**
* Create a sync location by uploading the current file content.
*/
2017-09-23 19:01:50 +00:00
function createSyncLocation(syncLocation) {
syncLocation.id = utils.uid();
const currentFile = store.getters['file/current'];
const fileId = currentFile.id;
syncLocation.fileId = fileId;
// Use deepCopy to freeze item
const content = utils.deepCopy(store.getters['content/current']);
store.dispatch('queue/enqueue',
() => {
const provider = providerRegistry.providers[syncLocation.providerId];
const token = provider.getToken(syncLocation);
return provider.uploadContent(token, {
...content,
history: [content.hash],
}, syncLocation)
2017-09-27 20:27:12 +00:00
.then(syncLocationToStore => localDbSvc.loadSyncedContent(fileId)
2017-09-23 19:01:50 +00:00
.then(() => {
const newSyncedContent = utils.deepCopy(
store.state.syncedContent.itemMap[`${fileId}/syncedContent`]);
const newSyncHistoryItem = [];
newSyncedContent.syncHistory[syncLocation.id] = newSyncHistoryItem;
newSyncHistoryItem[LAST_SENT] = content.hash;
newSyncedContent.historyData[content.hash] = content;
store.commit('syncedContent/patchItem', newSyncedContent);
store.commit('syncLocation/setItem', syncLocationToStore);
store.dispatch('notification/info', `A new synchronized location was added to "${currentFile.name}".`);
}));
});
}
class SyncContext {
constructor() {
this.restart = false;
this.synced = {};
}
}
class FileSyncContext {
constructor() {
this.downloaded = {};
this.errors = {};
}
}
2017-12-10 23:49:20 +00:00
/**
* Sync one file with all its locations.
*/
function syncFile(fileId, syncContext = new SyncContext()) {
const fileSyncContext = new FileSyncContext();
syncContext.synced[`${fileId}/content`] = true;
2017-09-27 20:27:12 +00:00
return localDbSvc.loadSyncedContent(fileId)
.then(() => localDbSvc.loadItem(`${fileId}/content`)
.catch(() => {})) // Item may not exist if content has not been downloaded yet
2017-08-25 10:37:46 +00:00
.then(() => {
const getFile = () => store.state.file.itemMap[fileId];
2017-08-25 10:37:46 +00:00
const getContent = () => store.state.content.itemMap[`${fileId}/content`];
const getSyncedContent = () => store.state.syncedContent.itemMap[`${fileId}/syncedContent`];
const getSyncHistoryItem = syncLocationId => getSyncedContent().syncHistory[syncLocationId];
2017-09-23 19:01:50 +00:00
const isLocationSynced = (syncLocation) => {
const syncHistoryItem = getSyncHistoryItem(syncLocation.id);
return syncHistoryItem && syncHistoryItem[LAST_SENT] === getContent().hash;
};
2017-08-25 10:37:46 +00:00
const isWelcomeFile = () => {
2017-11-15 08:12:56 +00:00
if (store.getters['data/syncDataByItemId'][`${fileId}/content`]) {
// If file has already been synced, keep on syncing
return false;
}
const file = getFile();
const content = getContent();
2017-11-15 08:12:56 +00:00
if (!file || !content) {
return false;
}
const welcomeFileHashes = store.getters['data/localSettings'].welcomeFileHashes;
2017-11-15 08:12:56 +00:00
const hash = utils.hash(content.text);
const hasDiscussions = Object.keys(content.discussions).length;
return file.name === 'Welcome file' && welcomeFileHashes[hash] && !hasDiscussions;
};
2017-08-25 10:37:46 +00:00
const syncOneContentLocation = () => {
const syncLocations = [
...store.getters['syncLocation/groupedByFileId'][fileId] || [],
];
2017-12-10 23:49:20 +00:00
if (isWorkspaceSyncPossible()) {
2017-09-23 19:01:50 +00:00
syncLocations.unshift({ id: 'main', providerId: mainProvider.id, fileId });
2017-08-25 10:37:46 +00:00
}
let result;
syncLocations.some((syncLocation) => {
const provider = providerRegistry.providers[syncLocation.providerId];
if (
// Skip if it previously threw an error
!fileSyncContext.errors[syncLocation.id] &&
// Skip if it has previously been downloaded and has not changed since then
(!fileSyncContext.downloaded[syncLocation.id] || !isLocationSynced(syncLocation)) &&
// Skip welcome file if not synchronized explicitly
(syncLocations.length > 1 || !isWelcomeFile())
2017-09-23 19:01:50 +00:00
) {
2017-11-15 08:12:56 +00:00
const token = provider && provider.getToken(syncLocation);
result = token && store.dispatch('queue/doWithLocation', {
2017-09-23 19:01:50 +00:00
location: syncLocation,
promise: provider.downloadContent(token, syncLocation)
.then((serverContent = null) => {
fileSyncContext.downloaded[syncLocation.id] = true;
2017-09-23 19:01:50 +00:00
const syncedContent = getSyncedContent();
const syncHistoryItem = getSyncHistoryItem(syncLocation.id);
let mergedContent = (() => {
const clientContent = utils.deepCopy(getContent());
2017-09-27 20:27:12 +00:00
if (!clientContent) {
return utils.deepCopy(serverContent);
}
2017-09-23 19:01:50 +00:00
if (!serverContent) {
// Sync location has not been created yet
return clientContent;
}
if (serverContent.hash === clientContent.hash) {
// Server and client contents are synced
return clientContent;
}
if (syncedContent.historyData[serverContent.hash]) {
// Server content has not changed or has already been merged
return clientContent;
}
// Perform a merge with last merged content if any, or a simple fusion otherwise
let lastMergedContent;
serverContent.history.some((hash) => {
lastMergedContent = syncedContent.historyData[hash];
return lastMergedContent;
});
if (!lastMergedContent && syncHistoryItem) {
lastMergedContent = syncedContent.historyData[syncHistoryItem[LAST_MERGED]];
}
return diffUtils.mergeContent(serverContent, clientContent, lastMergedContent);
})();
2017-09-27 20:27:12 +00:00
if (!mergedContent) {
fileSyncContext.errors[syncLocation.id] = true;
2017-09-27 20:27:12 +00:00
return null;
}
// Update or set content in store
store.commit('content/setItem', {
2017-09-23 19:01:50 +00:00
id: `${fileId}/content`,
text: utils.sanitizeText(mergedContent.text),
properties: utils.sanitizeText(mergedContent.properties),
discussions: mergedContent.discussions,
comments: mergedContent.comments,
2017-09-27 20:27:12 +00:00
hash: 0,
2017-09-23 19:01:50 +00:00
});
2017-09-26 22:54:26 +00:00
// Retrieve content with new `hash` and freeze it
2017-09-23 19:01:50 +00:00
mergedContent = utils.deepCopy(getContent());
// Make merged content history
const mergedContentHistory = serverContent ? serverContent.history.slice() : [];
let skipUpload = true;
if (mergedContentHistory[0] !== mergedContent.hash) {
// Put merged content hash at the beginning of history
mergedContentHistory.unshift(mergedContent.hash);
// Server content is either out of sync or its history is incomplete, do upload
skipUpload = false;
2017-08-25 10:37:46 +00:00
}
2017-09-23 19:01:50 +00:00
if (syncHistoryItem && syncHistoryItem[0] !== mergedContent.hash) {
// Clean up by removing the hash we've previously added
const idx = mergedContentHistory.indexOf(syncHistoryItem[LAST_SENT]);
if (idx !== -1) {
mergedContentHistory.splice(idx, 1);
}
2017-08-25 10:37:46 +00:00
}
2017-09-23 19:01:50 +00:00
// Store last sent if it's in the server history,
// and merged content which will be sent if different
const newSyncedContent = utils.deepCopy(syncedContent);
const newSyncHistoryItem = newSyncedContent.syncHistory[syncLocation.id] || [];
newSyncedContent.syncHistory[syncLocation.id] = newSyncHistoryItem;
if (serverContent && (serverContent.hash === newSyncHistoryItem[LAST_SENT] ||
serverContent.history.indexOf(newSyncHistoryItem[LAST_SENT]) !== -1)
) {
// The server has accepted the content we previously sent
newSyncHistoryItem[LAST_MERGED] = newSyncHistoryItem[LAST_SENT];
2017-08-25 10:37:46 +00:00
}
2017-09-23 19:01:50 +00:00
newSyncHistoryItem[LAST_SENT] = mergedContent.hash;
newSyncedContent.historyData[mergedContent.hash] = mergedContent;
// Clean synced content from unused revisions
cleanSyncedContent(newSyncedContent);
// Store synced content
store.commit('syncedContent/patchItem', newSyncedContent);
if (skipUpload) {
// Server content and merged content are equal, skip content upload
return null;
2017-08-25 10:37:46 +00:00
}
2017-09-23 19:01:50 +00:00
// Prevent from sending new content too long after old content has been fetched
const syncStartTime = Date.now();
const ifNotTooLate = cb => (res) => {
// No time to refresh a token...
if (syncStartTime + 500 < Date.now()) {
throw new Error('TOO_LATE');
}
return cb(res);
};
// Upload merged content
return provider.uploadContent(token, {
...mergedContent,
history: mergedContentHistory,
}, syncLocation, ifNotTooLate)
.then((syncLocationToStore) => {
// Replace sync location if modified
if (utils.serializeObject(syncLocation) !==
utils.serializeObject(syncLocationToStore)
) {
store.commit('syncLocation/patchItem', syncLocationToStore);
}
2017-09-27 20:27:12 +00:00
// If content was just created, restart sync to create the file as well
if (provider === mainProvider &&
!store.getters['data/syncDataByItemId'][fileId]
) {
syncContext.restart = true;
2017-09-27 20:27:12 +00:00
}
2017-09-23 19:01:50 +00:00
});
})
.catch((err) => {
if (store.state.offline) {
throw err;
2017-08-25 10:37:46 +00:00
}
2017-09-23 19:01:50 +00:00
console.error(err); // eslint-disable-line no-console
store.dispatch('notification/error', err);
fileSyncContext.errors[syncLocation.id] = true;
2017-09-23 19:01:50 +00:00
}),
})
.then(() => syncOneContentLocation());
2017-08-25 10:37:46 +00:00
}
return result;
});
return result;
};
return syncOneContentLocation();
})
.then(
() => localDbSvc.unloadContents(),
err => localDbSvc.unloadContents()
.then(() => {
throw err;
}))
.catch((err) => {
if (err && err.message === 'TOO_LATE') {
// Restart sync
return syncFile(fileId, syncContext);
}
throw err;
});
2017-08-25 10:37:46 +00:00
}
2017-12-10 23:49:20 +00:00
/**
* Sync a data item, typically settings and templates.
*/
2017-09-26 22:54:26 +00:00
function syncDataItem(dataId) {
const item = store.state.data.itemMap[dataId];
const syncData = store.getters['data/syncDataByItemId'][dataId];
// Sync if item hash and syncData hash are inconsistent
if (syncData && item && item.hash === syncData.hash) {
return null;
}
const token = mainProvider.getToken();
return token && mainProvider.downloadData(token, dataId)
.then((serverItem = null) => {
const dataSyncData = store.getters['data/dataSyncData'][dataId];
let mergedItem = (() => {
2017-09-27 20:27:12 +00:00
const clientItem = utils.deepCopy(store.state.data.itemMap[dataId]);
if (!clientItem) {
return serverItem;
}
2017-09-26 22:54:26 +00:00
if (!serverItem) {
return clientItem;
}
if (!dataSyncData) {
return serverItem;
}
if (dataSyncData.hash !== serverItem.hash) {
// Server version has changed
if (dataSyncData.hash !== clientItem.hash && typeof clientItem.data === 'object') {
// Client version has changed as well, merge data objects
return {
...clientItem,
data: diffUtils.mergeObjects(serverItem.data, clientItem.data),
};
}
return serverItem;
}
return clientItem;
})();
2017-09-27 20:27:12 +00:00
if (!mergedItem) {
return null;
}
2017-09-26 22:54:26 +00:00
// Update item in store
store.commit('data/setItem', {
id: dataId,
...mergedItem,
});
// Retrieve item with new `hash` and freeze it
mergedItem = utils.deepCopy(store.state.data.itemMap[dataId]);
return Promise.resolve()
.then(() => {
if (serverItem && serverItem.hash === mergedItem.hash) {
return null;
}
return mainProvider.uploadData(
token,
mergedItem,
dataId,
);
})
.then(() => {
store.dispatch('data/patchDataSyncData', {
[dataId]: utils.deepCopy(store.getters['data/syncDataByItemId'][dataId]),
});
});
});
}
2017-12-10 23:49:20 +00:00
/**
* Sync the whole workspace with the main provider and the current file explicit locations.
*/
function syncWorkspace() {
const syncContext = new SyncContext();
2017-09-26 22:54:26 +00:00
const mainToken = store.getters['data/loginToken'];
return mainProvider.getChanges(mainToken)
2017-08-15 10:43:26 +00:00
.then((changes) => {
2017-08-17 23:10:35 +00:00
// Apply changes
applyChanges(changes);
2017-09-26 22:54:26 +00:00
mainProvider.setAppliedChanges(mainToken, changes);
2017-08-17 23:10:35 +00:00
// Prevent from sending items too long after changes have been retrieved
const syncStartTime = Date.now();
const ifNotTooLate = cb => (res) => {
if (syncStartTime + restartSyncAfter < Date.now()) {
2017-08-25 10:37:46 +00:00
throw new Error('TOO_LATE');
2017-08-17 23:10:35 +00:00
}
return cb(res);
};
// Called until no item to save
const saveNextItem = ifNotTooLate(() => {
2017-08-25 10:37:46 +00:00
const storeItemMap = {
...store.state.file.itemMap,
...store.state.folder.itemMap,
...store.state.syncLocation.itemMap,
2017-09-23 19:01:50 +00:00
...store.state.publishLocation.itemMap,
2017-09-26 22:54:26 +00:00
// Deal with contents and data later
2017-08-25 10:37:46 +00:00
};
2017-08-17 23:10:35 +00:00
const syncDataByItemId = store.getters['data/syncDataByItemId'];
let result;
2017-12-10 23:49:20 +00:00
Object.entries(storeItemMap).some(([id, item]) => {
2017-08-17 23:10:35 +00:00
const existingSyncData = syncDataByItemId[id];
2017-09-27 20:27:12 +00:00
if ((!existingSyncData || existingSyncData.hash !== item.hash) &&
// Add file if content has been uploaded
(item.type !== 'file' || syncDataByItemId[`${id}/content`])
) {
2017-09-23 19:01:50 +00:00
result = mainProvider.saveItem(
2017-09-26 22:54:26 +00:00
mainToken,
2017-08-17 23:10:35 +00:00
// Use deepCopy to freeze objects
utils.deepCopy(item),
utils.deepCopy(existingSyncData),
ifNotTooLate,
)
.then(resultSyncData => store.dispatch('data/patchSyncData', {
[resultSyncData.id]: resultSyncData,
}))
.then(() => saveNextItem());
}
return result;
2017-08-15 10:43:26 +00:00
});
2017-08-17 23:10:35 +00:00
return result;
2017-08-15 10:43:26 +00:00
});
2017-08-17 23:10:35 +00:00
// Called until no item to remove
const removeNextItem = ifNotTooLate(() => {
2017-08-25 10:37:46 +00:00
const storeItemMap = {
...store.state.file.itemMap,
...store.state.folder.itemMap,
...store.state.syncLocation.itemMap,
2017-09-23 19:01:50 +00:00
...store.state.publishLocation.itemMap,
...store.state.content.itemMap,
2017-09-26 22:54:26 +00:00
...store.state.data.itemMap,
2017-08-25 10:37:46 +00:00
};
2017-08-17 23:10:35 +00:00
const syncData = store.getters['data/syncData'];
let result;
2017-12-10 23:49:20 +00:00
Object.entries(syncData).some(([, existingSyncData]) => {
2017-08-25 10:37:46 +00:00
if (!storeItemMap[existingSyncData.itemId] &&
// Remove content only if file has been removed
(existingSyncData.type !== 'content' || !storeItemMap[existingSyncData.itemId.split('/')[0]])
) {
2017-08-17 23:10:35 +00:00
// Use deepCopy to freeze objects
const syncDataToRemove = utils.deepCopy(existingSyncData);
2017-09-23 19:01:50 +00:00
result = mainProvider
2017-09-26 22:54:26 +00:00
.removeItem(mainToken, syncDataToRemove, ifNotTooLate)
2017-08-17 23:10:35 +00:00
.then(() => {
const syncDataCopy = { ...store.getters['data/syncData'] };
delete syncDataCopy[syncDataToRemove.id];
store.dispatch('data/setSyncData', syncDataCopy);
})
.then(() => removeNextItem());
}
return result;
});
return result;
});
2017-08-25 10:37:46 +00:00
const getOneFileIdToSync = () => {
2017-09-27 20:27:12 +00:00
const contentIds = [...new Set([
...Object.keys(localDbSvc.hashMap.content),
...store.getters['file/items'].map(file => `${file.id}/content`),
])];
2017-08-25 10:37:46 +00:00
let fileId;
2017-09-27 20:27:12 +00:00
contentIds.some((contentId) => {
2017-08-25 10:37:46 +00:00
// Get content hash from itemMap or from localDbSvc if not loaded
const loadedContent = store.state.content.itemMap[contentId];
const hash = loadedContent ? loadedContent.hash : localDbSvc.hashMap.content[contentId];
const syncData = store.getters['data/syncDataByItemId'][contentId];
if (
// Sync if syncData does not exist and content syncing was not attempted yet
(!syncData && !syncContext.synced[contentId]) ||
// Or if content hash and syncData hash are inconsistent
(syncData && hash !== syncData.hash)
) {
2017-08-25 10:37:46 +00:00
[fileId] = contentId.split('/');
2017-08-17 23:10:35 +00:00
}
2017-08-25 10:37:46 +00:00
return fileId;
});
return fileId;
};
const syncNextFile = () => {
2017-08-25 10:37:46 +00:00
const fileId = getOneFileIdToSync();
2017-09-27 20:27:12 +00:00
if (!fileId) {
return null;
2017-09-27 20:27:12 +00:00
}
return syncFile(fileId, syncContext)
.then(() => syncNextFile());
2017-08-25 10:37:46 +00:00
};
2017-08-17 23:10:35 +00:00
return Promise.resolve()
.then(() => saveNextItem())
.then(() => removeNextItem())
2017-09-26 22:54:26 +00:00
.then(() => syncDataItem('settings'))
.then(() => syncDataItem('templates'))
2017-08-25 10:37:46 +00:00
.then(() => {
2017-09-27 20:27:12 +00:00
const currentFileId = store.getters['file/current'].id;
2017-09-26 22:54:26 +00:00
if (currentFileId) {
2017-08-25 10:37:46 +00:00
// Sync current file first
return syncFile(currentFileId, syncContext)
.then(() => syncNextFile());
2017-08-25 10:37:46 +00:00
}
return syncNextFile();
})
2017-09-27 20:27:12 +00:00
.then(
() => {
if (syncContext.restart) {
2017-09-27 20:27:12 +00:00
// Restart sync
2017-12-10 23:49:20 +00:00
return syncWorkspace();
2017-09-27 20:27:12 +00:00
}
return null;
},
(err) => {
if (err && err.message === 'TOO_LATE') {
// Restart sync
2017-12-10 23:49:20 +00:00
return syncWorkspace();
2017-09-27 20:27:12 +00:00
}
throw err;
});
2017-08-15 10:43:26 +00:00
});
}
2017-12-10 23:49:20 +00:00
/**
* Enqueue a sync task, if possible.
*/
2017-08-15 10:43:26 +00:00
function requestSync() {
store.dispatch('queue/enqueueSyncRequest', () => new Promise((resolve, reject) => {
let intervalId;
const attempt = () => {
// Only start syncing when these conditions are met
2017-09-23 19:01:50 +00:00
if (utils.isUserActive() && isSyncWindow()) {
2017-08-15 10:43:26 +00:00
clearInterval(intervalId);
2017-08-25 10:37:46 +00:00
if (!isSyncPossible()) {
2017-08-15 10:43:26 +00:00
// Cancel sync
reject('Sync not possible.');
2017-08-25 10:37:46 +00:00
return;
2017-08-15 10:43:26 +00:00
}
2017-08-25 10:37:46 +00:00
2017-10-07 11:22:24 +00:00
// Determine if we have to clean files
const fileHashesToClean = {};
if (getLastStoredSyncActivity() + utils.cleanTrashAfter < Date.now()) {
// Last synchronization happened 7 days ago
const syncDataByItemId = store.getters['data/syncDataByItemId'];
store.getters['file/items'].forEach((file) => {
// If file is in the trash and has not been modified since it was last synced
const syncData = syncDataByItemId[file.id];
if (syncData && file.parentId === 'trash' && file.hash === syncData.hash) {
fileHashesToClean[file.id] = file.hash;
}
});
}
2017-08-25 10:37:46 +00:00
// Call setLastSyncActivity periodically
intervalId = utils.setInterval(() => setLastSyncActivity(), 1000);
setLastSyncActivity();
const cleaner = cb => (res) => {
clearInterval(intervalId);
cb(res);
};
2017-10-07 11:22:24 +00:00
2017-08-25 10:37:46 +00:00
Promise.resolve()
.then(() => {
2017-12-10 23:49:20 +00:00
if (isWorkspaceSyncPossible()) {
return syncWorkspace();
2017-08-25 10:37:46 +00:00
}
if (hasCurrentFileSyncLocations()) {
2017-09-23 19:01:50 +00:00
// Only sync current file if data sync is unavailable.
// We also could sync files that are out-of-sync but it would
// require to load the syncedContent objects of all files.
2017-08-25 10:37:46 +00:00
return syncFile(store.getters['file/current'].id);
}
return null;
})
2017-10-07 11:22:24 +00:00
.then(() => {
// Clean files
2017-12-10 23:49:20 +00:00
Object.entries(fileHashesToClean).forEach(([fileId, fileHash]) => {
2017-10-07 11:22:24 +00:00
const file = store.state.file.itemMap[fileId];
2017-12-10 23:49:20 +00:00
if (file && file.hash === fileHash) {
2017-10-07 11:22:24 +00:00
store.dispatch('deleteFile', fileId);
}
});
})
2017-08-25 10:37:46 +00:00
.then(cleaner(resolve), cleaner(reject));
2017-08-15 10:43:26 +00:00
}
};
intervalId = utils.setInterval(() => attempt(), 1000);
attempt();
}));
}
2017-12-10 23:49:20 +00:00
export default {
init() {
// Load workspaces and tokens from localStorage
localDbSvc.syncLocalStorage();
// Try to find a suitable workspace provider
workspaceProvider = providerRegistry.providers[utils.queryParams.providerId];
if (!workspaceProvider || !workspaceProvider.initWorkspace) {
workspaceProvider = googleDriveAppDataProvider;
}
2017-07-28 07:40:24 +00:00
2017-12-10 23:49:20 +00:00
return workspaceProvider.initWorkspace()
.then(workspace => store.commit('workspace/setCurrentWorkspaceId', workspace.id))
.then(() => localDbSvc.init())
.then(() => {
// Sync periodically
utils.setInterval(() => {
if (isSyncPossible() &&
utils.isUserActive() &&
isSyncWindow() &&
isAutoSyncReady()
) {
requestSync();
}
}, 1000);
2017-08-19 10:24:08 +00:00
2017-12-10 23:49:20 +00:00
// Unload contents from memory periodically
utils.setInterval(() => {
// Wait for sync and publish to finish
if (store.state.queue.isEmpty) {
localDbSvc.unloadContents();
}
}, 5000);
});
},
2017-08-25 10:37:46 +00:00
isSyncPossible,
2017-08-15 10:43:26 +00:00
requestSync,
2017-09-23 19:01:50 +00:00
createSyncLocation,
2017-08-15 10:43:26 +00:00
};