Stackedit/src/services/syncSvc.js

620 lines
23 KiB
JavaScript
Raw Normal View History

2017-07-28 07:40:24 +00:00
import localDbSvc from './localDbSvc';
import store from '../store';
import welcomeFile from '../data/welcomeFile.md';
import utils from './utils';
2017-08-25 10:37:46 +00:00
import diffUtils from './diffUtils';
2017-09-23 19:01:50 +00:00
import providerRegistry from './providers/providerRegistry';
import mainProvider from './providers/googleDriveAppDataProvider';
2017-08-15 10:43:26 +00:00
2017-09-23 19:01:50 +00:00
const lastSyncActivityKey = `${utils.workspaceId}/lastSyncActivity`;
2017-08-15 10:43:26 +00:00
let lastSyncActivity;
const getStoredLastSyncActivity = () => parseInt(localStorage[lastSyncActivityKey], 10) || 0;
const inactivityThreshold = 3 * 1000; // 3 sec
2017-08-17 23:10:35 +00:00
const restartSyncAfter = 30 * 1000; // 30 sec
2017-08-25 10:37:46 +00:00
const autoSyncAfter = utils.randomize(60 * 1000); // 60 sec
const isDataSyncPossible = () => !!store.getters['data/loginToken'];
const hasCurrentFileSyncLocations = () => !!store.getters['syncLocation/current'].length;
const isSyncPossible = () => !store.state.offline &&
(isDataSyncPossible() || hasCurrentFileSyncLocations());
2017-08-15 10:43:26 +00:00
function isSyncWindow() {
const storedLastSyncActivity = getStoredLastSyncActivity();
return lastSyncActivity === storedLastSyncActivity ||
Date.now() > inactivityThreshold + storedLastSyncActivity;
}
2017-08-17 23:10:35 +00:00
function isAutoSyncReady() {
2017-08-15 10:43:26 +00:00
const storedLastSyncActivity = getStoredLastSyncActivity();
return Date.now() > autoSyncAfter + storedLastSyncActivity;
}
function setLastSyncActivity() {
const currentDate = Date.now();
lastSyncActivity = currentDate;
localStorage[lastSyncActivityKey] = currentDate;
}
2017-08-25 10:37:46 +00:00
function cleanSyncedContent(syncedContent) {
// Clean syncHistory from removed syncLocations
Object.keys(syncedContent.syncHistory).forEach((syncLocationId) => {
if (syncLocationId !== 'main' && !store.state.syncLocation.itemMap[syncLocationId]) {
delete syncedContent.syncHistory[syncLocationId];
}
});
const allSyncLocationHashes = new Set([].concat(
...Object.keys(syncedContent.syncHistory).map(
id => syncedContent.syncHistory[id])));
// Clean historyData from unused contents
Object.keys(syncedContent.historyData).map(hash => parseInt(hash, 10)).forEach((hash) => {
if (!allSyncLocationHashes.has(hash)) {
delete syncedContent.historyData[hash];
}
});
}
2017-08-19 10:24:08 +00:00
const loader = type => fileId => localDbSvc.loadItem(`${fileId}/${type}`)
// Item does not exist, create it
.catch(() => store.commit(`${type}/setItem`, {
id: `${fileId}/${type}`,
}));
const loadContent = loader('content');
2017-08-25 10:37:46 +00:00
const loadSyncedContent = loader('syncedContent');
2017-08-19 10:24:08 +00:00
const loadContentState = loader('contentState');
2017-08-17 23:10:35 +00:00
function applyChanges(changes) {
2017-09-26 22:54:26 +00:00
const token = mainProvider.getToken();
2017-08-17 23:10:35 +00:00
const storeItemMap = { ...store.getters.allItemMap };
const syncData = { ...store.getters['data/syncData'] };
let syncDataChanged = false;
changes.forEach((change) => {
const existingSyncData = syncData[change.fileId];
const existingItem = existingSyncData && storeItemMap[existingSyncData.itemId];
if (change.removed && existingSyncData) {
if (existingItem) {
// Remove object from the store
store.commit(`${existingItem.type}/deleteItem`, existingItem.id);
delete storeItemMap[existingItem.id];
}
delete syncData[change.fileId];
syncDataChanged = true;
2017-09-26 22:54:26 +00:00
} else if (!change.removed && change.item && change.item.hash && (
// Ignore items that belong to another user (like settings)
!change.item.sub || change.item.sub === token.sub
)) {
2017-08-25 10:37:46 +00:00
if (!existingSyncData || (existingSyncData.hash !== change.item.hash && (
!existingItem || existingItem.hash !== change.item.hash
2017-08-17 23:10:35 +00:00
))) {
// Put object in the store
if (change.item.type !== 'content') { // Merge contents later
store.commit(`${change.item.type}/setItem`, change.item);
storeItemMap[change.item.id] = change.item;
}
}
syncData[change.fileId] = change.syncData;
syncDataChanged = true;
}
});
if (syncDataChanged) {
store.dispatch('data/setSyncData', syncData);
}
}
2017-08-25 10:37:46 +00:00
const LAST_SENT = 0;
const LAST_MERGED = 1;
2017-09-23 19:01:50 +00:00
function createSyncLocation(syncLocation) {
syncLocation.id = utils.uid();
const currentFile = store.getters['file/current'];
const fileId = currentFile.id;
syncLocation.fileId = fileId;
// Use deepCopy to freeze item
const content = utils.deepCopy(store.getters['content/current']);
store.dispatch('queue/enqueue',
() => {
const provider = providerRegistry.providers[syncLocation.providerId];
const token = provider.getToken(syncLocation);
return provider.uploadContent(token, {
...content,
history: [content.hash],
}, syncLocation)
.then(syncLocationToStore => loadSyncedContent(fileId)
.then(() => {
const newSyncedContent = utils.deepCopy(
store.state.syncedContent.itemMap[`${fileId}/syncedContent`]);
const newSyncHistoryItem = [];
newSyncedContent.syncHistory[syncLocation.id] = newSyncHistoryItem;
newSyncHistoryItem[LAST_SENT] = content.hash;
newSyncedContent.historyData[content.hash] = content;
store.commit('syncedContent/patchItem', newSyncedContent);
store.commit('syncLocation/setItem', syncLocationToStore);
store.dispatch('notification/info', `A new synchronized location was added to "${currentFile.name}".`);
}));
});
}
2017-08-25 10:37:46 +00:00
function syncFile(fileId) {
return loadSyncedContent(fileId)
.then(() => loadContent(fileId))
.then(() => {
const getContent = () => store.state.content.itemMap[`${fileId}/content`];
const getSyncedContent = () => store.state.syncedContent.itemMap[`${fileId}/syncedContent`];
const getSyncHistoryItem = syncLocationId => getSyncedContent().syncHistory[syncLocationId];
const downloadedLocations = {};
2017-09-23 19:01:50 +00:00
const errorLocations = {};
2017-08-25 10:37:46 +00:00
2017-09-23 19:01:50 +00:00
const isLocationSynced = (syncLocation) => {
const syncHistoryItem = getSyncHistoryItem(syncLocation.id);
return syncHistoryItem && syncHistoryItem[LAST_SENT] === getContent().hash;
};
2017-08-25 10:37:46 +00:00
const syncOneContentLocation = () => {
const syncLocations = [
...store.getters['syncLocation/groupedByFileId'][fileId] || [],
];
if (isDataSyncPossible()) {
2017-09-23 19:01:50 +00:00
syncLocations.unshift({ id: 'main', providerId: mainProvider.id, fileId });
2017-08-25 10:37:46 +00:00
}
let result;
syncLocations.some((syncLocation) => {
2017-09-23 19:01:50 +00:00
if (!errorLocations[syncLocation.id] &&
(!downloadedLocations[syncLocation.id] || !isLocationSynced(syncLocation))
) {
const provider = providerRegistry.providers[syncLocation.providerId];
const token = provider.getToken(syncLocation);
result = provider && token && store.dispatch('queue/doWithLocation', {
location: syncLocation,
promise: provider.downloadContent(token, syncLocation)
.then((serverContent = null) => {
downloadedLocations[syncLocation.id] = true;
const syncedContent = getSyncedContent();
const syncHistoryItem = getSyncHistoryItem(syncLocation.id);
let mergedContent = (() => {
const clientContent = utils.deepCopy(getContent());
if (!serverContent) {
// Sync location has not been created yet
return clientContent;
}
if (serverContent.hash === clientContent.hash) {
// Server and client contents are synced
return clientContent;
}
if (syncedContent.historyData[serverContent.hash]) {
// Server content has not changed or has already been merged
return clientContent;
}
// Perform a merge with last merged content if any, or a simple fusion otherwise
let lastMergedContent;
serverContent.history.some((hash) => {
lastMergedContent = syncedContent.historyData[hash];
return lastMergedContent;
});
if (!lastMergedContent && syncHistoryItem) {
lastMergedContent = syncedContent.historyData[syncHistoryItem[LAST_MERGED]];
}
return diffUtils.mergeContent(serverContent, clientContent, lastMergedContent);
})();
// Update content in store
store.commit('content/patchItem', {
id: `${fileId}/content`,
...mergedContent,
});
2017-09-26 22:54:26 +00:00
// Retrieve content with new `hash` and freeze it
2017-09-23 19:01:50 +00:00
mergedContent = utils.deepCopy(getContent());
// Make merged content history
const mergedContentHistory = serverContent ? serverContent.history.slice() : [];
let skipUpload = true;
if (mergedContentHistory[0] !== mergedContent.hash) {
// Put merged content hash at the beginning of history
mergedContentHistory.unshift(mergedContent.hash);
// Server content is either out of sync or its history is incomplete, do upload
skipUpload = false;
2017-08-25 10:37:46 +00:00
}
2017-09-23 19:01:50 +00:00
if (syncHistoryItem && syncHistoryItem[0] !== mergedContent.hash) {
// Clean up by removing the hash we've previously added
const idx = mergedContentHistory.indexOf(syncHistoryItem[LAST_SENT]);
if (idx !== -1) {
mergedContentHistory.splice(idx, 1);
}
2017-08-25 10:37:46 +00:00
}
2017-09-23 19:01:50 +00:00
// Store last sent if it's in the server history,
// and merged content which will be sent if different
const newSyncedContent = utils.deepCopy(syncedContent);
const newSyncHistoryItem = newSyncedContent.syncHistory[syncLocation.id] || [];
newSyncedContent.syncHistory[syncLocation.id] = newSyncHistoryItem;
if (serverContent && (serverContent.hash === newSyncHistoryItem[LAST_SENT] ||
serverContent.history.indexOf(newSyncHistoryItem[LAST_SENT]) !== -1)
) {
// The server has accepted the content we previously sent
newSyncHistoryItem[LAST_MERGED] = newSyncHistoryItem[LAST_SENT];
2017-08-25 10:37:46 +00:00
}
2017-09-23 19:01:50 +00:00
newSyncHistoryItem[LAST_SENT] = mergedContent.hash;
newSyncedContent.historyData[mergedContent.hash] = mergedContent;
// Clean synced content from unused revisions
cleanSyncedContent(newSyncedContent);
// Store synced content
store.commit('syncedContent/patchItem', newSyncedContent);
if (skipUpload) {
// Server content and merged content are equal, skip content upload
return null;
2017-08-25 10:37:46 +00:00
}
2017-09-23 19:01:50 +00:00
// Prevent from sending new content too long after old content has been fetched
const syncStartTime = Date.now();
const ifNotTooLate = cb => (res) => {
// No time to refresh a token...
if (syncStartTime + 500 < Date.now()) {
throw new Error('TOO_LATE');
}
return cb(res);
};
// Upload merged content
return provider.uploadContent(token, {
...mergedContent,
history: mergedContentHistory,
}, syncLocation, ifNotTooLate)
.then((syncLocationToStore) => {
// Replace sync location if modified
if (utils.serializeObject(syncLocation) !==
utils.serializeObject(syncLocationToStore)
) {
store.commit('syncLocation/patchItem', syncLocationToStore);
}
});
})
.catch((err) => {
if (store.state.offline) {
throw err;
2017-08-25 10:37:46 +00:00
}
2017-09-23 19:01:50 +00:00
console.error(err); // eslint-disable-line no-console
store.dispatch('notification/error', err);
errorLocations[syncLocation.id] = true;
}),
})
.then(() => syncOneContentLocation());
2017-08-25 10:37:46 +00:00
}
return result;
});
return result;
};
return syncOneContentLocation();
})
.then(
() => localDbSvc.unloadContents(),
err => localDbSvc.unloadContents()
.then(() => {
throw err;
}))
2017-08-25 10:37:46 +00:00
.catch((err) => {
if (err && err.message === 'TOO_LATE') {
// Restart sync
return syncFile(fileId);
}
throw err;
});
}
2017-09-26 22:54:26 +00:00
function syncDataItem(dataId) {
const item = store.state.data.itemMap[dataId];
const syncData = store.getters['data/syncDataByItemId'][dataId];
// Sync if item hash and syncData hash are inconsistent
if (syncData && item && item.hash === syncData.hash) {
return null;
}
const token = mainProvider.getToken();
return token && mainProvider.downloadData(token, dataId)
.then((serverItem = null) => {
const dataSyncData = store.getters['data/dataSyncData'][dataId];
let mergedItem = (() => {
const clientItem = utils.deepCopy(store.getters[`data/${dataId}`]);
if (!serverItem) {
return clientItem;
}
if (!dataSyncData) {
return serverItem;
}
if (dataSyncData.hash !== serverItem.hash) {
// Server version has changed
if (dataSyncData.hash !== clientItem.hash && typeof clientItem.data === 'object') {
// Client version has changed as well, merge data objects
return {
...clientItem,
data: diffUtils.mergeObjects(serverItem.data, clientItem.data),
};
}
return serverItem;
}
return clientItem;
})();
// Update item in store
store.commit('data/setItem', {
id: dataId,
...mergedItem,
});
// Retrieve item with new `hash` and freeze it
mergedItem = utils.deepCopy(store.state.data.itemMap[dataId]);
return Promise.resolve()
.then(() => {
if (serverItem && serverItem.hash === mergedItem.hash) {
return null;
}
return mainProvider.uploadData(
token,
dataId === 'settings' ? token.sub : undefined,
mergedItem,
dataId,
);
})
.then(() => {
store.dispatch('data/patchDataSyncData', {
[dataId]: utils.deepCopy(store.getters['data/syncDataByItemId'][dataId]),
});
});
});
}
2017-08-15 10:43:26 +00:00
function sync() {
2017-09-26 22:54:26 +00:00
const mainToken = store.getters['data/loginToken'];
return mainProvider.getChanges(mainToken)
2017-08-15 10:43:26 +00:00
.then((changes) => {
2017-08-17 23:10:35 +00:00
// Apply changes
applyChanges(changes);
2017-09-26 22:54:26 +00:00
mainProvider.setAppliedChanges(mainToken, changes);
2017-08-17 23:10:35 +00:00
// Prevent from sending items too long after changes have been retrieved
const syncStartTime = Date.now();
const ifNotTooLate = cb => (res) => {
if (syncStartTime + restartSyncAfter < Date.now()) {
2017-08-25 10:37:46 +00:00
throw new Error('TOO_LATE');
2017-08-17 23:10:35 +00:00
}
return cb(res);
};
// Called until no item to save
const saveNextItem = ifNotTooLate(() => {
2017-08-25 10:37:46 +00:00
const storeItemMap = {
...store.state.file.itemMap,
...store.state.folder.itemMap,
...store.state.syncLocation.itemMap,
2017-09-23 19:01:50 +00:00
...store.state.publishLocation.itemMap,
2017-09-26 22:54:26 +00:00
// Deal with contents and data later
2017-08-25 10:37:46 +00:00
};
2017-08-17 23:10:35 +00:00
const syncDataByItemId = store.getters['data/syncDataByItemId'];
let result;
Object.keys(storeItemMap).some((id) => {
const item = storeItemMap[id];
const existingSyncData = syncDataByItemId[id];
2017-08-25 10:37:46 +00:00
if (!existingSyncData || existingSyncData.hash !== item.hash) {
2017-09-23 19:01:50 +00:00
result = mainProvider.saveItem(
2017-09-26 22:54:26 +00:00
mainToken,
2017-08-17 23:10:35 +00:00
// Use deepCopy to freeze objects
utils.deepCopy(item),
utils.deepCopy(existingSyncData),
ifNotTooLate,
)
.then(resultSyncData => store.dispatch('data/patchSyncData', {
[resultSyncData.id]: resultSyncData,
}))
.then(() => saveNextItem());
}
return result;
2017-08-15 10:43:26 +00:00
});
2017-08-17 23:10:35 +00:00
return result;
2017-08-15 10:43:26 +00:00
});
2017-08-17 23:10:35 +00:00
// Called until no item to remove
const removeNextItem = ifNotTooLate(() => {
2017-08-25 10:37:46 +00:00
const storeItemMap = {
...store.state.file.itemMap,
...store.state.folder.itemMap,
...store.state.syncLocation.itemMap,
2017-09-23 19:01:50 +00:00
...store.state.publishLocation.itemMap,
...store.state.content.itemMap,
2017-09-26 22:54:26 +00:00
...store.state.data.itemMap,
2017-08-25 10:37:46 +00:00
};
2017-08-17 23:10:35 +00:00
const syncData = store.getters['data/syncData'];
let result;
Object.keys(syncData).some((id) => {
const existingSyncData = syncData[id];
2017-08-25 10:37:46 +00:00
if (!storeItemMap[existingSyncData.itemId] &&
// Remove content only if file has been removed
(existingSyncData.type !== 'content' || !storeItemMap[existingSyncData.itemId.split('/')[0]])
) {
2017-08-17 23:10:35 +00:00
// Use deepCopy to freeze objects
const syncDataToRemove = utils.deepCopy(existingSyncData);
2017-09-23 19:01:50 +00:00
result = mainProvider
2017-09-26 22:54:26 +00:00
.removeItem(mainToken, syncDataToRemove, ifNotTooLate)
2017-08-17 23:10:35 +00:00
.then(() => {
const syncDataCopy = { ...store.getters['data/syncData'] };
delete syncDataCopy[syncDataToRemove.id];
store.dispatch('data/setSyncData', syncDataCopy);
})
.then(() => removeNextItem());
}
return result;
});
return result;
});
2017-08-25 10:37:46 +00:00
const getOneFileIdToSync = () => {
2017-09-23 19:01:50 +00:00
const allContentIds = Object.keys(localDbSvc.hashMap.content);
2017-08-25 10:37:46 +00:00
let fileId;
allContentIds.some((contentId) => {
// Get content hash from itemMap or from localDbSvc if not loaded
const loadedContent = store.state.content.itemMap[contentId];
const hash = loadedContent ? loadedContent.hash : localDbSvc.hashMap.content[contentId];
const syncData = store.getters['data/syncDataByItemId'][contentId];
2017-09-26 22:54:26 +00:00
// Sync if item hash and syncData hash are inconsistent
2017-09-23 19:01:50 +00:00
if (!syncData || hash !== syncData.hash) {
2017-08-25 10:37:46 +00:00
[fileId] = contentId.split('/');
2017-08-17 23:10:35 +00:00
}
2017-08-25 10:37:46 +00:00
return fileId;
});
return fileId;
};
const syncNextFile = () => {
const fileId = getOneFileIdToSync();
return fileId && syncFile(fileId)
.then(() => syncNextFile());
};
2017-08-17 23:10:35 +00:00
return Promise.resolve()
.then(() => saveNextItem())
.then(() => removeNextItem())
2017-09-26 22:54:26 +00:00
.then(() => syncDataItem('settings'))
.then(() => syncDataItem('templates'))
2017-08-25 10:37:46 +00:00
.then(() => {
2017-09-26 22:54:26 +00:00
const currentFileId = store.getters['content/current'].id;
if (currentFileId) {
2017-08-25 10:37:46 +00:00
// Sync current file first
2017-09-26 22:54:26 +00:00
return syncFile(currentFileId)
2017-08-25 10:37:46 +00:00
.then(() => syncNextFile());
}
return syncNextFile();
})
2017-08-17 23:10:35 +00:00
.catch((err) => {
2017-08-25 10:37:46 +00:00
if (err && err.message === 'TOO_LATE') {
2017-08-17 23:10:35 +00:00
// Restart sync
return sync();
}
throw err;
});
2017-08-15 10:43:26 +00:00
});
}
function requestSync() {
store.dispatch('queue/enqueueSyncRequest', () => new Promise((resolve, reject) => {
let intervalId;
const attempt = () => {
// Only start syncing when these conditions are met
2017-09-23 19:01:50 +00:00
if (utils.isUserActive() && isSyncWindow()) {
2017-08-15 10:43:26 +00:00
clearInterval(intervalId);
2017-08-25 10:37:46 +00:00
if (!isSyncPossible()) {
2017-08-15 10:43:26 +00:00
// Cancel sync
reject('Sync not possible.');
2017-08-25 10:37:46 +00:00
return;
2017-08-15 10:43:26 +00:00
}
2017-08-25 10:37:46 +00:00
// Call setLastSyncActivity periodically
intervalId = utils.setInterval(() => setLastSyncActivity(), 1000);
setLastSyncActivity();
const cleaner = cb => (res) => {
clearInterval(intervalId);
cb(res);
};
Promise.resolve()
.then(() => {
if (isDataSyncPossible()) {
return sync();
}
if (hasCurrentFileSyncLocations()) {
2017-09-23 19:01:50 +00:00
// Only sync current file if data sync is unavailable.
// We also could sync files that are out-of-sync but it would
// require to load the syncedContent objects of all files.
2017-08-25 10:37:46 +00:00
return syncFile(store.getters['file/current'].id);
}
return null;
})
.then(cleaner(resolve), cleaner(reject));
2017-08-15 10:43:26 +00:00
}
};
intervalId = utils.setInterval(() => attempt(), 1000);
attempt();
}));
}
// Sync periodically
utils.setInterval(() => {
2017-08-25 10:37:46 +00:00
if (isSyncPossible() &&
2017-09-23 19:01:50 +00:00
utils.isUserActive() &&
2017-08-15 10:43:26 +00:00
isSyncWindow() &&
2017-08-17 23:10:35 +00:00
isAutoSyncReady()
2017-08-15 10:43:26 +00:00
) {
requestSync();
}
}, 1000);
2017-07-28 07:40:24 +00:00
const ifNoId = cb => (obj) => {
if (obj.id) {
return obj;
}
return cb();
};
2017-07-31 09:04:01 +00:00
// Load the DB on boot
localDbSvc.sync()
2017-08-15 10:43:26 +00:00
// And watch file changing
2017-07-31 09:04:01 +00:00
.then(() => store.watch(
2017-08-17 23:10:35 +00:00
() => store.getters['file/current'].id,
() => Promise.resolve(store.getters['file/current'])
// If current file has no ID, get the most recent file
.then(ifNoId(() => store.getters['file/lastOpened']))
// If still no ID, create a new file
.then(ifNoId(() => {
const id = utils.uid();
store.commit('content/setItem', {
id: `${id}/content`,
text: welcomeFile,
});
store.commit('file/setItem', {
id,
name: 'Welcome file',
});
return store.state.file.itemMap[id];
}))
.then((currentFile) => {
// Fix current file ID
if (store.getters['file/current'].id !== currentFile.id) {
store.commit('file/setCurrentId', currentFile.id);
// Wait for the next watch tick
return null;
}
// Set last opened
store.dispatch('data/setLastOpenedId', currentFile.id);
return Promise.resolve()
// Load contentState from DB
2017-08-19 10:24:08 +00:00
.then(() => loadContentState(currentFile.id))
2017-08-25 10:37:46 +00:00
// Load syncedContent from DB
.then(() => loadSyncedContent(currentFile.id))
2017-08-17 23:10:35 +00:00
// Load content from DB
2017-08-19 10:24:08 +00:00
.then(() => localDbSvc.loadItem(`${currentFile.id}/content`));
2017-08-17 23:10:35 +00:00
}),
{
2017-07-31 09:04:01 +00:00
immediate: true,
}));
2017-07-28 07:40:24 +00:00
2017-08-15 10:43:26 +00:00
// Sync local DB periodically
utils.setInterval(() => localDbSvc.sync(), 1000);
2017-08-19 10:24:08 +00:00
// Unload contents from memory periodically
utils.setInterval(() => {
// Wait for sync and publish to finish
if (store.state.queue.isEmpty) {
localDbSvc.unloadContents();
}
}, 5000);
2017-08-15 10:43:26 +00:00
export default {
2017-08-25 10:37:46 +00:00
isSyncPossible,
2017-08-15 10:43:26 +00:00
requestSync,
2017-09-23 19:01:50 +00:00
createSyncLocation,
2017-08-15 10:43:26 +00:00
};