2017-07-28 07:40:24 +00:00
|
|
|
import localDbSvc from './localDbSvc';
|
|
|
|
import store from '../store';
|
|
|
|
import utils from './utils';
|
2017-08-25 10:37:46 +00:00
|
|
|
import diffUtils from './diffUtils';
|
2017-12-11 00:53:46 +00:00
|
|
|
import networkSvc from './networkSvc';
|
2018-04-27 14:37:05 +00:00
|
|
|
import providerRegistry from './providers/common/providerRegistry';
|
2017-12-10 23:49:20 +00:00
|
|
|
import googleDriveAppDataProvider from './providers/googleDriveAppDataProvider';
|
2018-01-24 07:31:54 +00:00
|
|
|
import './providers/couchdbWorkspaceProvider';
|
2018-04-27 14:37:05 +00:00
|
|
|
import './providers/githubWorkspaceProvider';
|
|
|
|
import './providers/googleDriveWorkspaceProvider';
|
2018-03-12 00:45:54 +00:00
|
|
|
import tempFileSvc from './tempFileSvc';
|
2018-05-04 18:07:28 +00:00
|
|
|
import fileSvc from './fileSvc';
|
2017-08-15 10:43:26 +00:00
|
|
|
|
2018-04-27 14:37:05 +00:00
|
|
|
const minAutoSyncEvery = 60 * 1000; // 60 sec
|
2017-08-15 10:43:26 +00:00
|
|
|
const inactivityThreshold = 3 * 1000; // 3 sec
|
2017-08-17 23:10:35 +00:00
|
|
|
const restartSyncAfter = 30 * 1000; // 30 sec
|
2018-04-27 14:37:05 +00:00
|
|
|
const restartContentSyncAfter = 500; // Restart if an authorize window pops up
|
2018-04-08 10:18:56 +00:00
|
|
|
const maxContentHistory = 20;
|
2017-08-25 10:37:46 +00:00
|
|
|
|
2018-04-27 14:37:05 +00:00
|
|
|
const LAST_SEEN = 0;
|
|
|
|
const LAST_MERGED = 1;
|
|
|
|
const LAST_SENT = 2;
|
|
|
|
|
2018-01-04 20:19:10 +00:00
|
|
|
let actionProvider;
|
|
|
|
let workspaceProvider;
|
2017-12-10 23:49:20 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Use a lock in the local storage to prevent multiple windows concurrency.
|
|
|
|
*/
|
|
|
|
let lastSyncActivity;
|
2017-12-11 00:53:46 +00:00
|
|
|
const getLastStoredSyncActivity = () =>
|
|
|
|
parseInt(localStorage.getItem(store.getters['workspace/lastSyncActivityKey']), 10) || 0;
|
2017-12-10 23:49:20 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Return true if workspace sync is possible.
|
|
|
|
*/
|
2017-12-17 15:08:52 +00:00
|
|
|
const isWorkspaceSyncPossible = () => !!store.getters['workspace/syncToken'];
|
2017-12-10 23:49:20 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Return true if file has at least one explicit sync location.
|
|
|
|
*/
|
2017-08-25 10:37:46 +00:00
|
|
|
const hasCurrentFileSyncLocations = () => !!store.getters['syncLocation/current'].length;
|
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Return true if we are online and we have something to sync.
|
|
|
|
*/
|
2017-08-25 10:37:46 +00:00
|
|
|
const isSyncPossible = () => !store.state.offline &&
|
2017-12-10 23:49:20 +00:00
|
|
|
(isWorkspaceSyncPossible() || hasCurrentFileSyncLocations());
|
2017-08-15 10:43:26 +00:00
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Return true if we are the many window, ie we have the lastSyncActivity lock.
|
|
|
|
*/
|
2018-05-13 13:27:33 +00:00
|
|
|
const isSyncWindow = () => {
|
2017-10-07 11:22:24 +00:00
|
|
|
const storedLastSyncActivity = getLastStoredSyncActivity();
|
2017-08-15 10:43:26 +00:00
|
|
|
return lastSyncActivity === storedLastSyncActivity ||
|
|
|
|
Date.now() > inactivityThreshold + storedLastSyncActivity;
|
2018-05-13 13:27:33 +00:00
|
|
|
};
|
2017-08-15 10:43:26 +00:00
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
2017-12-11 00:53:46 +00:00
|
|
|
* Return true if auto sync can start, ie if lastSyncActivity is old enough.
|
2017-12-10 23:49:20 +00:00
|
|
|
*/
|
2018-05-13 13:27:33 +00:00
|
|
|
const isAutoSyncReady = () => {
|
2018-05-06 00:46:33 +00:00
|
|
|
let { autoSyncEvery } = store.getters['data/computedSettings'];
|
2017-12-17 15:08:52 +00:00
|
|
|
if (autoSyncEvery < minAutoSyncEvery) {
|
|
|
|
autoSyncEvery = minAutoSyncEvery;
|
|
|
|
}
|
|
|
|
return Date.now() > autoSyncEvery + getLastStoredSyncActivity();
|
2018-05-13 13:27:33 +00:00
|
|
|
};
|
2017-08-15 10:43:26 +00:00
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Update the lastSyncActivity, assuming we have the lock.
|
|
|
|
*/
|
2018-05-13 13:27:33 +00:00
|
|
|
const setLastSyncActivity = () => {
|
2017-08-15 10:43:26 +00:00
|
|
|
const currentDate = Date.now();
|
|
|
|
lastSyncActivity = currentDate;
|
2017-12-11 00:53:46 +00:00
|
|
|
localStorage.setItem(store.getters['workspace/lastSyncActivityKey'], currentDate);
|
2018-05-13 13:27:33 +00:00
|
|
|
};
|
2017-08-15 10:43:26 +00:00
|
|
|
|
2018-04-27 14:37:05 +00:00
|
|
|
/**
|
|
|
|
* Upgrade hashes if syncedContent is from an old version
|
|
|
|
*/
|
2018-05-13 13:27:33 +00:00
|
|
|
const upgradeSyncedContent = (syncedContent) => {
|
2018-04-27 14:37:05 +00:00
|
|
|
if (syncedContent.v) {
|
|
|
|
return syncedContent;
|
|
|
|
}
|
|
|
|
const hashUpgrades = {};
|
|
|
|
const historyData = {};
|
|
|
|
const syncHistory = {};
|
|
|
|
Object.entries(syncedContent.historyData).forEach(([hash, content]) => {
|
|
|
|
const newContent = utils.addItemHash(content);
|
|
|
|
historyData[newContent.hash] = newContent;
|
|
|
|
hashUpgrades[hash] = newContent.hash;
|
|
|
|
});
|
|
|
|
Object.entries(syncedContent.syncHistory).forEach(([id, hashEntries]) => {
|
|
|
|
syncHistory[id] = hashEntries.map(hash => hashUpgrades[hash]);
|
|
|
|
});
|
|
|
|
return {
|
|
|
|
...syncedContent,
|
|
|
|
historyData,
|
|
|
|
syncHistory,
|
|
|
|
v: 1,
|
|
|
|
};
|
2018-05-13 13:27:33 +00:00
|
|
|
};
|
2018-04-27 14:37:05 +00:00
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Clean a syncedContent.
|
|
|
|
*/
|
2018-05-13 13:27:33 +00:00
|
|
|
const cleanSyncedContent = (syncedContent) => {
|
2017-08-25 10:37:46 +00:00
|
|
|
// Clean syncHistory from removed syncLocations
|
|
|
|
Object.keys(syncedContent.syncHistory).forEach((syncLocationId) => {
|
2018-06-21 19:16:33 +00:00
|
|
|
if (syncLocationId !== 'main' && !store.state.syncLocation.itemsById[syncLocationId]) {
|
2017-08-25 10:37:46 +00:00
|
|
|
delete syncedContent.syncHistory[syncLocationId];
|
|
|
|
}
|
|
|
|
});
|
2018-05-06 00:46:33 +00:00
|
|
|
const allSyncLocationHashSet = new Set([]
|
|
|
|
.concat(...Object.keys(syncedContent.syncHistory)
|
|
|
|
.map(id => syncedContent.syncHistory[id])));
|
2017-08-25 10:37:46 +00:00
|
|
|
// Clean historyData from unused contents
|
2018-05-06 00:46:33 +00:00
|
|
|
Object.keys(syncedContent.historyData)
|
|
|
|
.map(hash => parseInt(hash, 10))
|
|
|
|
.forEach((hash) => {
|
|
|
|
if (!allSyncLocationHashSet.has(hash)) {
|
|
|
|
delete syncedContent.historyData[hash];
|
|
|
|
}
|
|
|
|
});
|
2018-05-13 13:27:33 +00:00
|
|
|
};
|
2017-08-25 10:37:46 +00:00
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Apply changes retrieved from the main provider. Update sync data accordingly.
|
|
|
|
*/
|
2018-05-13 13:27:33 +00:00
|
|
|
const applyChanges = (changes) => {
|
2018-06-21 19:16:33 +00:00
|
|
|
const allItemsById = { ...store.getters.allItemsById };
|
|
|
|
const syncDataById = { ...store.getters['data/syncDataById'] };
|
|
|
|
let getExistingItem;
|
|
|
|
if (workspaceProvider.isGit) {
|
|
|
|
const { itemsByGitPath } = store.getters;
|
|
|
|
getExistingItem = (existingSyncData) => {
|
|
|
|
const items = existingSyncData && itemsByGitPath[existingSyncData.id];
|
|
|
|
return items ? items[0] : null;
|
|
|
|
};
|
|
|
|
} else {
|
|
|
|
getExistingItem = existingSyncData => existingSyncData && allItemsById[existingSyncData.itemId];
|
|
|
|
}
|
|
|
|
|
|
|
|
const idsToKeep = {};
|
2017-12-23 18:25:14 +00:00
|
|
|
let saveSyncData = false;
|
2017-08-17 23:10:35 +00:00
|
|
|
|
2018-06-21 19:16:33 +00:00
|
|
|
// Process each change
|
2017-08-17 23:10:35 +00:00
|
|
|
changes.forEach((change) => {
|
2018-06-21 19:16:33 +00:00
|
|
|
const existingSyncData = syncDataById[change.syncDataId];
|
|
|
|
const existingItem = getExistingItem(existingSyncData);
|
|
|
|
// If item was removed
|
2017-12-23 18:25:14 +00:00
|
|
|
if (!change.item && existingSyncData) {
|
2018-06-21 19:16:33 +00:00
|
|
|
if (syncDataById[change.syncDataId]) {
|
|
|
|
delete syncDataById[change.syncDataId];
|
2018-04-27 14:37:05 +00:00
|
|
|
saveSyncData = true;
|
|
|
|
}
|
2017-10-05 07:18:21 +00:00
|
|
|
if (existingItem) {
|
|
|
|
// Remove object from the store
|
|
|
|
store.commit(`${existingItem.type}/deleteItem`, existingItem.id);
|
2018-06-21 19:16:33 +00:00
|
|
|
delete allItemsById[existingItem.id];
|
2017-10-05 07:18:21 +00:00
|
|
|
}
|
2018-06-21 19:16:33 +00:00
|
|
|
// If item was modified
|
2017-12-23 18:25:14 +00:00
|
|
|
} else if (change.item && change.item.hash) {
|
2018-06-21 19:16:33 +00:00
|
|
|
idsToKeep[change.item.id] = true;
|
|
|
|
|
2018-04-27 14:37:05 +00:00
|
|
|
if ((existingSyncData || {}).hash !== change.syncData.hash) {
|
2018-06-21 19:16:33 +00:00
|
|
|
syncDataById[change.syncDataId] = change.syncData;
|
2018-04-27 14:37:05 +00:00
|
|
|
saveSyncData = true;
|
|
|
|
}
|
2017-12-23 18:25:14 +00:00
|
|
|
if (
|
|
|
|
// If no sync data or existing one is different
|
|
|
|
(existingSyncData || {}).hash !== change.item.hash
|
|
|
|
// And no existing item or existing item is different
|
|
|
|
&& (existingItem || {}).hash !== change.item.hash
|
|
|
|
// And item is not content nor data, which will be merged later
|
|
|
|
&& change.item.type !== 'content' && change.item.type !== 'data'
|
|
|
|
) {
|
|
|
|
store.commit(`${change.item.type}/setItem`, change.item);
|
2018-06-21 19:16:33 +00:00
|
|
|
allItemsById[change.item.id] = change.item;
|
2017-12-23 18:25:14 +00:00
|
|
|
}
|
2017-08-17 23:10:35 +00:00
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2017-12-23 18:25:14 +00:00
|
|
|
if (saveSyncData) {
|
2018-06-21 19:16:33 +00:00
|
|
|
store.dispatch('data/setSyncDataById', syncDataById);
|
|
|
|
fileSvc.ensureUniquePaths(idsToKeep);
|
2017-08-17 23:10:35 +00:00
|
|
|
}
|
2018-05-13 13:27:33 +00:00
|
|
|
};
|
2017-08-17 23:10:35 +00:00
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Create a sync location by uploading the current file content.
|
|
|
|
*/
|
2018-05-13 13:27:33 +00:00
|
|
|
const createSyncLocation = (syncLocation) => {
|
2017-09-23 19:01:50 +00:00
|
|
|
syncLocation.id = utils.uid();
|
|
|
|
const currentFile = store.getters['file/current'];
|
|
|
|
const fileId = currentFile.id;
|
|
|
|
syncLocation.fileId = fileId;
|
|
|
|
// Use deepCopy to freeze item
|
|
|
|
const content = utils.deepCopy(store.getters['content/current']);
|
2018-05-06 00:46:33 +00:00
|
|
|
store.dispatch(
|
|
|
|
'queue/enqueue',
|
2018-05-13 13:27:33 +00:00
|
|
|
async () => {
|
2017-09-23 19:01:50 +00:00
|
|
|
const provider = providerRegistry.providers[syncLocation.providerId];
|
|
|
|
const token = provider.getToken(syncLocation);
|
2018-05-13 13:27:33 +00:00
|
|
|
const syncLocationToStore = await provider.uploadContent(token, {
|
2017-09-23 19:01:50 +00:00
|
|
|
...content,
|
|
|
|
history: [content.hash],
|
2018-05-13 13:27:33 +00:00
|
|
|
}, syncLocation);
|
|
|
|
await localDbSvc.loadSyncedContent(fileId);
|
2018-06-21 19:16:33 +00:00
|
|
|
const newSyncedContent = utils.deepCopy(upgradeSyncedContent(store.state.syncedContent.itemsById[`${fileId}/syncedContent`]));
|
2018-05-13 13:27:33 +00:00
|
|
|
const newSyncHistoryItem = [];
|
|
|
|
newSyncedContent.syncHistory[syncLocation.id] = newSyncHistoryItem;
|
|
|
|
newSyncHistoryItem[LAST_SEEN] = content.hash;
|
|
|
|
newSyncHistoryItem[LAST_SENT] = content.hash;
|
|
|
|
newSyncedContent.historyData[content.hash] = content;
|
|
|
|
|
|
|
|
store.commit('syncedContent/patchItem', newSyncedContent);
|
|
|
|
store.commit('syncLocation/setItem', syncLocationToStore);
|
|
|
|
store.dispatch('notification/info', `A new synchronized location was added to "${currentFile.name}".`);
|
2018-05-06 00:46:33 +00:00
|
|
|
},
|
|
|
|
);
|
2018-05-13 13:27:33 +00:00
|
|
|
};
|
2017-09-23 19:01:50 +00:00
|
|
|
|
2018-06-07 23:56:11 +00:00
|
|
|
/**
|
|
|
|
* Prevent from sending new data too long after old data has been fetched.
|
|
|
|
*/
|
2018-04-27 14:37:05 +00:00
|
|
|
const tooLateChecker = (timeout) => {
|
|
|
|
const tooLateAfter = Date.now() + timeout;
|
2018-06-07 23:56:11 +00:00
|
|
|
return (cb) => {
|
2018-04-27 14:37:05 +00:00
|
|
|
if (tooLateAfter < Date.now()) {
|
|
|
|
throw new Error('TOO_LATE');
|
|
|
|
}
|
2018-06-07 23:56:11 +00:00
|
|
|
return cb();
|
2018-04-27 14:37:05 +00:00
|
|
|
};
|
|
|
|
};
|
|
|
|
|
2018-06-07 23:56:11 +00:00
|
|
|
/**
|
2018-06-21 19:16:33 +00:00
|
|
|
* Return true if file is in the temp folder or is a welcome file.
|
2018-06-07 23:56:11 +00:00
|
|
|
*/
|
2018-05-13 13:27:33 +00:00
|
|
|
const isTempFile = (fileId) => {
|
2018-06-07 23:56:11 +00:00
|
|
|
const contentId = `${fileId}/content`;
|
|
|
|
if (store.getters['data/syncDataByItemId'][contentId]) {
|
2018-05-13 13:27:33 +00:00
|
|
|
// If file has already been synced, it's not a temp file
|
|
|
|
return false;
|
|
|
|
}
|
2018-06-21 19:16:33 +00:00
|
|
|
const file = store.state.file.itemsById[fileId];
|
|
|
|
const content = store.state.content.itemsById[contentId];
|
2018-05-13 13:27:33 +00:00
|
|
|
if (!file || !content) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
if (file.parentId === 'temp') {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
const locations = [
|
|
|
|
...store.getters['syncLocation/filteredGroupedByFileId'][fileId] || [],
|
|
|
|
...store.getters['publishLocation/filteredGroupedByFileId'][fileId] || [],
|
|
|
|
];
|
|
|
|
if (locations.length) {
|
|
|
|
// If file has sync/publish locations, it's not a temp file
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
// Return true if it's a welcome file that has no discussion
|
|
|
|
const { welcomeFileHashes } = store.getters['data/localSettings'];
|
|
|
|
const hash = utils.hash(content.text);
|
|
|
|
const hasDiscussions = Object.keys(content.discussions).length;
|
|
|
|
return file.name === 'Welcome file' && welcomeFileHashes[hash] && !hasDiscussions;
|
|
|
|
};
|
|
|
|
|
2018-06-21 19:16:33 +00:00
|
|
|
/**
|
|
|
|
* Patch sync data if some have changed in the result.
|
|
|
|
*/
|
|
|
|
const updateSyncData = (result) => {
|
|
|
|
['syncData', 'contentSyncData', 'fileSyncData'].forEach((field) => {
|
|
|
|
const syncData = result[field];
|
|
|
|
if (syncData) {
|
|
|
|
const oldSyncData = store.getters['data/syncDataById'][syncData.id];
|
|
|
|
if (utils.serializeObject(oldSyncData) !== utils.serializeObject(syncData)) {
|
|
|
|
store.dispatch('data/patchSyncDataById', {
|
|
|
|
[syncData.id]: syncData,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
return result;
|
|
|
|
};
|
|
|
|
|
2017-11-04 16:59:48 +00:00
|
|
|
class SyncContext {
|
2018-03-12 00:45:54 +00:00
|
|
|
restart = false;
|
|
|
|
attempted = {};
|
2017-11-04 16:59:48 +00:00
|
|
|
}
|
|
|
|
|
2018-05-06 00:46:33 +00:00
|
|
|
/**
|
2017-12-10 23:49:20 +00:00
|
|
|
* Sync one file with all its locations.
|
|
|
|
*/
|
2018-05-13 13:27:33 +00:00
|
|
|
const syncFile = async (fileId, syncContext = new SyncContext()) => {
|
2018-06-07 23:56:11 +00:00
|
|
|
const contentId = `${fileId}/content`;
|
|
|
|
syncContext.attempted[contentId] = true;
|
2018-03-12 00:45:54 +00:00
|
|
|
|
2018-05-13 13:27:33 +00:00
|
|
|
await localDbSvc.loadSyncedContent(fileId);
|
|
|
|
try {
|
2018-06-07 23:56:11 +00:00
|
|
|
await localDbSvc.loadItem(contentId);
|
2018-05-13 13:27:33 +00:00
|
|
|
} catch (e) {
|
|
|
|
// Item may not exist if content has not been downloaded yet
|
|
|
|
}
|
2018-06-07 23:56:11 +00:00
|
|
|
|
2018-06-21 19:16:33 +00:00
|
|
|
const getSyncedContent = () => upgradeSyncedContent(store.state.syncedContent.itemsById[`${fileId}/syncedContent`]);
|
2018-05-13 13:27:33 +00:00
|
|
|
const getSyncHistoryItem = syncLocationId => getSyncedContent().syncHistory[syncLocationId];
|
|
|
|
|
|
|
|
try {
|
|
|
|
if (isTempFile(fileId)) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const syncLocations = [
|
|
|
|
...store.getters['syncLocation/filteredGroupedByFileId'][fileId] || [],
|
|
|
|
];
|
|
|
|
if (isWorkspaceSyncPossible()) {
|
|
|
|
syncLocations.unshift({ id: 'main', providerId: workspaceProvider.id, fileId });
|
|
|
|
}
|
|
|
|
|
|
|
|
await utils.awaitSequence(syncLocations, async (syncLocation) => {
|
|
|
|
const provider = providerRegistry.providers[syncLocation.providerId];
|
|
|
|
if (!provider) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
const token = provider.getToken(syncLocation);
|
|
|
|
if (!token) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2018-06-07 23:56:11 +00:00
|
|
|
const downloadContent = async () => {
|
|
|
|
// On simple provider, call simply downloadContent
|
|
|
|
if (syncLocation.id !== 'main') {
|
|
|
|
return provider.downloadContent(token, syncLocation);
|
|
|
|
}
|
|
|
|
|
|
|
|
// On workspace provider, call downloadWorkspaceContent
|
2018-06-21 19:16:33 +00:00
|
|
|
const oldContentSyncData = store.getters['data/syncDataByItemId'][contentId];
|
|
|
|
const oldFileSyncData = store.getters['data/syncDataByItemId'][fileId];
|
|
|
|
if (!oldContentSyncData || !oldFileSyncData) {
|
2018-06-07 23:56:11 +00:00
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
2018-06-21 19:16:33 +00:00
|
|
|
const { content } = updateSyncData(await provider.downloadWorkspaceContent({
|
|
|
|
token,
|
|
|
|
contentId,
|
|
|
|
contentSyncData: oldContentSyncData,
|
|
|
|
fileSyncData: oldFileSyncData,
|
|
|
|
}));
|
|
|
|
|
|
|
|
// Return the downloaded content
|
|
|
|
return content;
|
2018-06-07 23:56:11 +00:00
|
|
|
};
|
|
|
|
|
2018-06-21 19:16:33 +00:00
|
|
|
const uploadContent = async (content, ifNotTooLate) => {
|
2018-06-07 23:56:11 +00:00
|
|
|
// On simple provider, call simply uploadContent
|
|
|
|
if (syncLocation.id !== 'main') {
|
2018-06-21 19:16:33 +00:00
|
|
|
return provider.uploadContent(token, content, syncLocation, ifNotTooLate);
|
2018-06-07 23:56:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// On workspace provider, call uploadWorkspaceContent
|
2018-06-21 19:16:33 +00:00
|
|
|
const oldContentSyncData = store.getters['data/syncDataByItemId'][contentId];
|
|
|
|
if (oldContentSyncData && oldContentSyncData.hash === content.hash) {
|
2018-06-07 23:56:11 +00:00
|
|
|
return syncLocation;
|
|
|
|
}
|
2018-06-21 19:16:33 +00:00
|
|
|
const oldFileSyncData = store.getters['data/syncDataByItemId'][fileId];
|
2018-06-07 23:56:11 +00:00
|
|
|
|
2018-06-21 19:16:33 +00:00
|
|
|
updateSyncData(await provider.uploadWorkspaceContent({
|
2018-06-07 23:56:11 +00:00
|
|
|
token,
|
2018-06-21 19:16:33 +00:00
|
|
|
content,
|
|
|
|
// Use deepCopy to freeze item
|
|
|
|
file: utils.deepCopy(store.state.file.itemsById[fileId]),
|
|
|
|
contentSyncData: oldContentSyncData,
|
|
|
|
fileSyncData: oldFileSyncData,
|
2018-06-07 23:56:11 +00:00
|
|
|
ifNotTooLate,
|
2018-06-21 19:16:33 +00:00
|
|
|
}));
|
2018-06-07 23:56:11 +00:00
|
|
|
|
|
|
|
// Return syncLocation
|
|
|
|
return syncLocation;
|
|
|
|
};
|
|
|
|
|
2018-05-13 13:27:33 +00:00
|
|
|
const doSyncLocation = async () => {
|
2018-06-07 23:56:11 +00:00
|
|
|
const serverContent = await downloadContent(token, syncLocation);
|
2018-05-13 13:27:33 +00:00
|
|
|
const syncedContent = getSyncedContent();
|
|
|
|
const syncHistoryItem = getSyncHistoryItem(syncLocation.id);
|
2018-06-07 23:56:11 +00:00
|
|
|
|
|
|
|
// Merge content
|
|
|
|
let mergedContent;
|
2018-06-21 19:16:33 +00:00
|
|
|
const clientContent = utils.deepCopy(store.state.content.itemsById[contentId]);
|
2018-06-07 23:56:11 +00:00
|
|
|
if (!clientContent) {
|
|
|
|
mergedContent = utils.deepCopy(serverContent || null);
|
|
|
|
} else if (!serverContent // If sync location has not been created yet
|
|
|
|
// Or server and client contents are synced
|
|
|
|
|| serverContent.hash === clientContent.hash
|
|
|
|
// Or server content has not changed or has already been merged
|
|
|
|
|| syncedContent.historyData[serverContent.hash]
|
|
|
|
) {
|
|
|
|
mergedContent = clientContent;
|
|
|
|
} else {
|
|
|
|
// Perform a merge with last merged content if any, or perform a simple fusion otherwise
|
2018-05-13 13:27:33 +00:00
|
|
|
let lastMergedContent = utils.someResult(
|
|
|
|
serverContent.history,
|
|
|
|
hash => syncedContent.historyData[hash],
|
|
|
|
);
|
|
|
|
if (!lastMergedContent && syncHistoryItem) {
|
|
|
|
lastMergedContent = syncedContent.historyData[syncHistoryItem[LAST_MERGED]];
|
|
|
|
}
|
2018-06-07 23:56:11 +00:00
|
|
|
mergedContent = diffUtils.mergeContent(serverContent, clientContent, lastMergedContent);
|
|
|
|
}
|
2018-05-13 13:27:33 +00:00
|
|
|
if (!mergedContent) {
|
|
|
|
return;
|
2017-11-15 08:12:56 +00:00
|
|
|
}
|
2018-05-13 13:27:33 +00:00
|
|
|
|
|
|
|
// Update or set content in store
|
|
|
|
store.commit('content/setItem', {
|
2018-06-07 23:56:11 +00:00
|
|
|
id: contentId,
|
2018-05-13 13:27:33 +00:00
|
|
|
text: utils.sanitizeText(mergedContent.text),
|
|
|
|
properties: utils.sanitizeText(mergedContent.properties),
|
|
|
|
discussions: mergedContent.discussions,
|
|
|
|
comments: mergedContent.comments,
|
|
|
|
});
|
|
|
|
|
|
|
|
// Retrieve content with its new hash value and freeze it
|
2018-06-21 19:16:33 +00:00
|
|
|
mergedContent = utils.deepCopy(store.state.content.itemsById[contentId]);
|
2018-05-13 13:27:33 +00:00
|
|
|
|
|
|
|
// Make merged content history
|
|
|
|
const mergedContentHistory = serverContent ? serverContent.history.slice() : [];
|
|
|
|
let skipUpload = true;
|
|
|
|
if (mergedContentHistory[0] !== mergedContent.hash) {
|
|
|
|
// Put merged content hash at the beginning of history
|
|
|
|
mergedContentHistory.unshift(mergedContent.hash);
|
|
|
|
// Server content is either out of sync or its history is incomplete, do upload
|
|
|
|
skipUpload = false;
|
2017-11-15 08:12:56 +00:00
|
|
|
}
|
2018-05-13 13:27:33 +00:00
|
|
|
if (syncHistoryItem
|
|
|
|
&& syncHistoryItem[LAST_SENT] != null
|
|
|
|
&& syncHistoryItem[LAST_SENT] !== mergedContent.hash
|
|
|
|
) {
|
|
|
|
// Clean up by removing the hash we've previously added
|
|
|
|
const idx = mergedContentHistory.lastIndexOf(syncHistoryItem[LAST_SENT]);
|
|
|
|
if (idx !== -1) {
|
|
|
|
mergedContentHistory.splice(idx, 1);
|
|
|
|
}
|
2018-03-12 00:45:54 +00:00
|
|
|
}
|
2018-05-13 13:27:33 +00:00
|
|
|
|
2018-06-07 23:56:11 +00:00
|
|
|
// Update synced content
|
2018-05-13 13:27:33 +00:00
|
|
|
const newSyncedContent = utils.deepCopy(syncedContent);
|
|
|
|
const newSyncHistoryItem = newSyncedContent.syncHistory[syncLocation.id] || [];
|
|
|
|
newSyncedContent.syncHistory[syncLocation.id] = newSyncHistoryItem;
|
|
|
|
if (serverContent &&
|
|
|
|
(serverContent.hash === newSyncHistoryItem[LAST_SEEN] ||
|
|
|
|
serverContent.history.indexOf(newSyncHistoryItem[LAST_SEEN]) !== -1)
|
|
|
|
) {
|
|
|
|
// That's the 2nd time we've seen this content, trust it for future merges
|
|
|
|
newSyncHistoryItem[LAST_MERGED] = newSyncHistoryItem[LAST_SEEN];
|
|
|
|
}
|
|
|
|
newSyncHistoryItem[LAST_MERGED] = newSyncHistoryItem[LAST_MERGED] || null;
|
|
|
|
newSyncHistoryItem[LAST_SEEN] = mergedContent.hash;
|
|
|
|
newSyncHistoryItem[LAST_SENT] = skipUpload ? null : mergedContent.hash;
|
|
|
|
newSyncedContent.historyData[mergedContent.hash] = mergedContent;
|
|
|
|
|
|
|
|
// Clean synced content from unused revisions
|
|
|
|
cleanSyncedContent(newSyncedContent);
|
|
|
|
// Store synced content
|
|
|
|
store.commit('syncedContent/patchItem', newSyncedContent);
|
|
|
|
|
|
|
|
if (skipUpload) {
|
|
|
|
// Server content and merged content are equal, skip content upload
|
|
|
|
return;
|
2018-03-12 00:45:54 +00:00
|
|
|
}
|
2017-11-04 16:59:48 +00:00
|
|
|
|
2018-05-13 13:27:33 +00:00
|
|
|
// Upload merged content
|
2018-06-07 23:56:11 +00:00
|
|
|
const item = {
|
2018-05-13 13:27:33 +00:00
|
|
|
...mergedContent,
|
|
|
|
history: mergedContentHistory.slice(0, maxContentHistory),
|
2018-06-07 23:56:11 +00:00
|
|
|
};
|
|
|
|
const syncLocationToStore = await uploadContent(
|
|
|
|
item,
|
|
|
|
tooLateChecker(restartContentSyncAfter),
|
|
|
|
);
|
2018-05-13 13:27:33 +00:00
|
|
|
|
|
|
|
// Replace sync location if modified
|
|
|
|
if (utils.serializeObject(syncLocation) !==
|
|
|
|
utils.serializeObject(syncLocationToStore)
|
|
|
|
) {
|
|
|
|
store.commit('syncLocation/patchItem', syncLocationToStore);
|
|
|
|
}
|
2018-03-12 00:45:54 +00:00
|
|
|
|
2018-05-13 13:27:33 +00:00
|
|
|
// If content was just created, restart sync to create the file as well
|
|
|
|
if (provider === workspaceProvider &&
|
|
|
|
!store.getters['data/syncDataByItemId'][fileId]
|
|
|
|
) {
|
|
|
|
syncContext.restart = true;
|
2017-08-25 10:37:46 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2018-05-13 13:27:33 +00:00
|
|
|
await store.dispatch('queue/doWithLocation', {
|
|
|
|
location: syncLocation,
|
|
|
|
action: async () => {
|
|
|
|
try {
|
|
|
|
await doSyncLocation();
|
|
|
|
} catch (err) {
|
|
|
|
if (store.state.offline || (err && err.message === 'TOO_LATE')) {
|
|
|
|
throw err;
|
|
|
|
}
|
|
|
|
console.error(err); // eslint-disable-line no-console
|
|
|
|
store.dispatch('notification/error', err);
|
|
|
|
}
|
|
|
|
},
|
|
|
|
});
|
2017-11-04 16:59:48 +00:00
|
|
|
});
|
2018-05-13 13:27:33 +00:00
|
|
|
} catch (err) {
|
|
|
|
if (err && err.message === 'TOO_LATE') {
|
|
|
|
// Restart sync
|
|
|
|
await syncFile(fileId, syncContext);
|
|
|
|
}
|
|
|
|
throw err;
|
|
|
|
} finally {
|
|
|
|
await localDbSvc.unloadContents();
|
|
|
|
}
|
|
|
|
};
|
2017-08-25 10:37:46 +00:00
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
2017-12-17 15:08:52 +00:00
|
|
|
* Sync a data item, typically settings, workspaces and templates.
|
2017-12-10 23:49:20 +00:00
|
|
|
*/
|
2018-05-13 13:27:33 +00:00
|
|
|
const syncDataItem = async (dataId) => {
|
2018-06-21 19:16:33 +00:00
|
|
|
const getItem = () => store.state.data.itemsById[dataId]
|
|
|
|
|| store.state.data.lsItemsById[dataId];
|
2017-12-17 15:08:52 +00:00
|
|
|
|
2018-06-07 23:56:11 +00:00
|
|
|
const oldItem = getItem();
|
|
|
|
const oldSyncData = store.getters['data/syncDataByItemId'][dataId];
|
2018-05-13 13:27:33 +00:00
|
|
|
// Sync if item hash and syncData hash are out of sync
|
2018-06-07 23:56:11 +00:00
|
|
|
if (oldSyncData && oldItem && oldItem.hash === oldSyncData.hash) {
|
2018-05-13 13:27:33 +00:00
|
|
|
return;
|
2017-09-26 22:54:26 +00:00
|
|
|
}
|
2017-12-17 15:08:52 +00:00
|
|
|
|
2018-06-07 23:56:11 +00:00
|
|
|
const token = workspaceProvider.getToken();
|
2018-06-21 19:16:33 +00:00
|
|
|
const { item } = updateSyncData(await workspaceProvider.downloadWorkspaceData({
|
2018-06-07 23:56:11 +00:00
|
|
|
token,
|
2018-06-21 19:16:33 +00:00
|
|
|
syncData: oldSyncData,
|
|
|
|
}));
|
2018-06-07 23:56:11 +00:00
|
|
|
|
|
|
|
const serverItem = item;
|
2018-06-21 19:16:33 +00:00
|
|
|
const dataSyncData = store.getters['data/dataSyncDataById'][dataId];
|
2018-05-13 13:27:33 +00:00
|
|
|
let mergedItem = (() => {
|
|
|
|
const clientItem = utils.deepCopy(getItem());
|
|
|
|
if (!clientItem) {
|
|
|
|
return serverItem;
|
|
|
|
}
|
|
|
|
if (!serverItem) {
|
|
|
|
return clientItem;
|
|
|
|
}
|
|
|
|
if (!dataSyncData) {
|
|
|
|
return serverItem;
|
|
|
|
}
|
|
|
|
if (dataSyncData.hash !== serverItem.hash) {
|
|
|
|
// Server version has changed
|
|
|
|
if (dataSyncData.hash !== clientItem.hash && typeof clientItem.data === 'object') {
|
|
|
|
// Client version has changed as well, merge data objects
|
|
|
|
return {
|
|
|
|
...clientItem,
|
|
|
|
data: diffUtils.mergeObjects(serverItem.data, clientItem.data),
|
|
|
|
};
|
2017-09-27 20:27:12 +00:00
|
|
|
}
|
2018-05-13 13:27:33 +00:00
|
|
|
return serverItem;
|
|
|
|
}
|
|
|
|
return clientItem;
|
|
|
|
})();
|
2017-09-27 20:27:12 +00:00
|
|
|
|
2018-05-13 13:27:33 +00:00
|
|
|
if (!mergedItem) {
|
|
|
|
return;
|
|
|
|
}
|
2017-09-26 22:54:26 +00:00
|
|
|
|
2018-05-13 13:27:33 +00:00
|
|
|
// Update item in store
|
|
|
|
store.commit('data/setItem', {
|
|
|
|
id: dataId,
|
|
|
|
...mergedItem,
|
|
|
|
});
|
2017-09-26 22:54:26 +00:00
|
|
|
|
2018-05-13 13:27:33 +00:00
|
|
|
// Retrieve item with new `hash` and freeze it
|
|
|
|
mergedItem = utils.deepCopy(getItem());
|
|
|
|
|
|
|
|
if (serverItem && serverItem.hash === mergedItem.hash) {
|
|
|
|
return;
|
|
|
|
}
|
2018-06-07 23:56:11 +00:00
|
|
|
|
|
|
|
// Upload merged data item
|
2018-06-21 19:16:33 +00:00
|
|
|
updateSyncData(await workspaceProvider.uploadWorkspaceData({
|
2018-06-07 23:56:11 +00:00
|
|
|
token,
|
2018-06-21 19:16:33 +00:00
|
|
|
item: mergedItem,
|
|
|
|
syncData: store.getters['data/syncDataByItemId'][dataId],
|
|
|
|
ifNotTooLate: tooLateChecker(restartContentSyncAfter),
|
|
|
|
}));
|
2018-06-07 23:56:11 +00:00
|
|
|
|
|
|
|
// Update data sync data
|
2018-06-21 19:16:33 +00:00
|
|
|
store.dispatch('data/patchDataSyncDataById', {
|
2018-05-13 13:27:33 +00:00
|
|
|
[dataId]: utils.deepCopy(store.getters['data/syncDataByItemId'][dataId]),
|
|
|
|
});
|
|
|
|
};
|
2017-09-26 22:54:26 +00:00
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Sync the whole workspace with the main provider and the current file explicit locations.
|
|
|
|
*/
|
2018-05-13 13:27:33 +00:00
|
|
|
const syncWorkspace = async () => {
|
|
|
|
try {
|
|
|
|
const workspace = store.getters['workspace/currentWorkspace'];
|
|
|
|
const syncContext = new SyncContext();
|
|
|
|
|
|
|
|
// Store the sub in the DB since it's not safely stored in the token
|
|
|
|
const syncToken = store.getters['workspace/syncToken'];
|
|
|
|
const localSettings = store.getters['data/localSettings'];
|
|
|
|
if (!localSettings.syncSub) {
|
|
|
|
store.dispatch('data/patchLocalSettings', {
|
|
|
|
syncSub: syncToken.sub,
|
|
|
|
});
|
|
|
|
} else if (localSettings.syncSub !== syncToken.sub) {
|
|
|
|
throw new Error('Synchronization failed due to token inconsistency.');
|
|
|
|
}
|
2017-08-17 23:10:35 +00:00
|
|
|
|
2018-05-13 13:27:33 +00:00
|
|
|
const changes = await workspaceProvider.getChanges();
|
2018-06-21 19:16:33 +00:00
|
|
|
|
2018-05-13 13:27:33 +00:00
|
|
|
// Apply changes
|
2018-06-21 19:16:33 +00:00
|
|
|
applyChanges(workspaceProvider.prepareChanges(changes));
|
|
|
|
workspaceProvider.onChangesApplied();
|
2018-05-13 13:27:33 +00:00
|
|
|
|
|
|
|
// Prevent from sending items too long after changes have been retrieved
|
|
|
|
const ifNotTooLate = tooLateChecker(restartSyncAfter);
|
|
|
|
|
|
|
|
// Called until no item to save
|
2018-06-07 23:56:11 +00:00
|
|
|
const saveNextItem = () => ifNotTooLate(async () => {
|
2018-05-13 13:27:33 +00:00
|
|
|
const storeItemMap = {
|
2018-06-21 19:16:33 +00:00
|
|
|
...store.state.file.itemsById,
|
|
|
|
...store.state.folder.itemsById,
|
|
|
|
...store.state.syncLocation.itemsById,
|
|
|
|
...store.state.publishLocation.itemsById,
|
2018-05-13 13:27:33 +00:00
|
|
|
// Deal with contents and data later
|
|
|
|
};
|
2018-06-07 23:56:11 +00:00
|
|
|
|
2018-06-21 19:16:33 +00:00
|
|
|
const syncDataByItemId = store.getters['data/syncDataByItemId'];
|
2018-05-13 13:27:33 +00:00
|
|
|
const [changedItem, syncDataToUpdate] = utils.someResult(
|
|
|
|
Object.entries(storeItemMap),
|
|
|
|
([id, item]) => {
|
2018-06-21 19:16:33 +00:00
|
|
|
const syncData = syncDataByItemId[id];
|
|
|
|
if ((!syncData || syncData.hash !== item.hash)
|
2017-12-23 18:25:14 +00:00
|
|
|
// Add file/folder if parent has been added
|
2018-06-21 19:16:33 +00:00
|
|
|
&& (!storeItemMap[item.parentId] || syncDataByItemId[item.parentId])
|
2017-12-23 18:25:14 +00:00
|
|
|
// Add file if content has been added
|
2018-06-21 19:16:33 +00:00
|
|
|
&& (item.type !== 'file' || syncDataByItemId[`${id}/content`])
|
2017-09-27 20:27:12 +00:00
|
|
|
) {
|
2018-06-21 19:16:33 +00:00
|
|
|
return [item, syncData];
|
2017-08-17 23:10:35 +00:00
|
|
|
}
|
2018-05-13 13:27:33 +00:00
|
|
|
return null;
|
|
|
|
},
|
|
|
|
) || [];
|
2017-08-17 23:10:35 +00:00
|
|
|
|
2018-05-13 13:27:33 +00:00
|
|
|
if (changedItem) {
|
|
|
|
const resultSyncData = await workspaceProvider
|
2018-06-21 19:16:33 +00:00
|
|
|
.saveWorkspaceItem({
|
2017-08-17 23:10:35 +00:00
|
|
|
// Use deepCopy to freeze objects
|
2018-06-21 19:16:33 +00:00
|
|
|
item: utils.deepCopy(changedItem),
|
|
|
|
syncData: utils.deepCopy(syncDataToUpdate),
|
2018-05-13 13:27:33 +00:00
|
|
|
ifNotTooLate,
|
2018-06-21 19:16:33 +00:00
|
|
|
});
|
|
|
|
store.dispatch('data/patchSyncDataById', {
|
2018-05-13 13:27:33 +00:00
|
|
|
[resultSyncData.id]: resultSyncData,
|
2017-08-25 10:37:46 +00:00
|
|
|
});
|
2018-05-13 13:27:33 +00:00
|
|
|
await saveNextItem();
|
|
|
|
}
|
|
|
|
});
|
|
|
|
await saveNextItem();
|
|
|
|
|
|
|
|
// Called until no item to remove
|
2018-06-07 23:56:11 +00:00
|
|
|
const removeNextItem = () => ifNotTooLate(async () => {
|
|
|
|
let getItem;
|
|
|
|
let getFileItem;
|
|
|
|
if (workspaceProvider.isGit) {
|
2018-06-21 19:16:33 +00:00
|
|
|
const { itemsByGitPath } = store.getters;
|
|
|
|
getItem = syncData => itemsByGitPath[syncData.id];
|
|
|
|
getFileItem = syncData => itemsByGitPath[syncData.id.slice(1)]; // Remove leading /
|
2018-06-07 23:56:11 +00:00
|
|
|
} else {
|
2018-06-21 19:16:33 +00:00
|
|
|
const { allItemsById } = store.getters;
|
|
|
|
getItem = syncData => allItemsById[syncData.itemId];
|
|
|
|
getFileItem = syncData => allItemsById[syncData.itemId.split('/')[0]];
|
2018-06-07 23:56:11 +00:00
|
|
|
}
|
|
|
|
|
2018-06-21 19:16:33 +00:00
|
|
|
const syncDataById = store.getters['data/syncDataById'];
|
2018-05-13 13:27:33 +00:00
|
|
|
const syncDataToRemove = utils.deepCopy(utils.someResult(
|
2018-06-21 19:16:33 +00:00
|
|
|
Object.values(syncDataById),
|
|
|
|
(syncData) => {
|
|
|
|
if (!getItem(syncData)
|
2018-06-07 23:56:11 +00:00
|
|
|
// We don't want to delete data items, especially on first sync
|
2018-06-21 19:16:33 +00:00
|
|
|
&& syncData.type !== 'data'
|
2018-06-07 23:56:11 +00:00
|
|
|
// Remove content only if file has been removed
|
2018-06-21 19:16:33 +00:00
|
|
|
&& (syncData.type !== 'content'
|
|
|
|
|| !getFileItem(syncData))
|
2018-06-07 23:56:11 +00:00
|
|
|
) {
|
2018-06-21 19:16:33 +00:00
|
|
|
return syncData;
|
2018-06-07 23:56:11 +00:00
|
|
|
}
|
|
|
|
return null;
|
|
|
|
},
|
2018-05-13 13:27:33 +00:00
|
|
|
));
|
|
|
|
|
|
|
|
if (syncDataToRemove) {
|
2018-06-21 19:16:33 +00:00
|
|
|
await workspaceProvider.removeWorkspaceItem({
|
|
|
|
syncData: syncDataToRemove,
|
|
|
|
ifNotTooLate,
|
|
|
|
});
|
|
|
|
const syncDataCopy = { ...store.getters['data/syncDataById'] };
|
2018-05-13 13:27:33 +00:00
|
|
|
delete syncDataCopy[syncDataToRemove.id];
|
2018-06-21 19:16:33 +00:00
|
|
|
store.dispatch('data/setSyncDataById', syncDataCopy);
|
2018-05-13 13:27:33 +00:00
|
|
|
await removeNextItem();
|
|
|
|
}
|
|
|
|
});
|
|
|
|
await removeNextItem();
|
2017-08-25 10:37:46 +00:00
|
|
|
|
2018-05-13 13:27:33 +00:00
|
|
|
// Sync settings and workspaces only in the main workspace
|
|
|
|
if (workspace.id === 'main') {
|
|
|
|
await syncDataItem('settings');
|
|
|
|
await syncDataItem('workspaces');
|
|
|
|
}
|
|
|
|
await syncDataItem('templates');
|
|
|
|
|
|
|
|
const getOneFileIdToSync = () => {
|
2018-06-07 23:56:11 +00:00
|
|
|
let getSyncData;
|
|
|
|
if (workspaceProvider.isGit) {
|
2018-06-21 19:16:33 +00:00
|
|
|
const { gitPathsByItemId } = store.getters;
|
|
|
|
const syncDataById = store.getters['data/syncDataById'];
|
|
|
|
// Use file git path as content may not exist or not be loaded
|
|
|
|
getSyncData = fileId => syncDataById[`/${gitPathsByItemId[fileId]}`];
|
2018-06-07 23:56:11 +00:00
|
|
|
} else {
|
|
|
|
const syncDataByItemId = store.getters['data/syncDataByItemId'];
|
2018-06-21 19:16:33 +00:00
|
|
|
getSyncData = (fileId, contentId) => syncDataByItemId[contentId];
|
2018-06-07 23:56:11 +00:00
|
|
|
}
|
|
|
|
|
2018-06-21 19:16:33 +00:00
|
|
|
// Collect all [fileId, contentId]
|
|
|
|
const ids = [
|
|
|
|
...Object.keys(localDbSvc.hashMap.content)
|
|
|
|
.map(contentId => [contentId.split('/')[0], contentId]),
|
|
|
|
...store.getters['file/items']
|
|
|
|
.map(file => [file.id, `${file.id}/content`]),
|
|
|
|
];
|
|
|
|
|
|
|
|
// Find the first content out of sync
|
|
|
|
const contentMap = store.state.content.itemsById;
|
|
|
|
return utils.someResult(ids, ([fileId, contentId]) => {
|
|
|
|
// Get content hash from itemsById or from localDbSvc if not loaded
|
2018-06-07 23:56:11 +00:00
|
|
|
const loadedContent = contentMap[contentId];
|
2018-05-13 13:27:33 +00:00
|
|
|
const hash = loadedContent ? loadedContent.hash : localDbSvc.hashMap.content[contentId];
|
2018-06-21 19:16:33 +00:00
|
|
|
const syncData = getSyncData(fileId, contentId);
|
2018-05-13 13:27:33 +00:00
|
|
|
if (
|
|
|
|
// Sync if content syncing was not attempted yet
|
|
|
|
!syncContext.attempted[contentId] &&
|
|
|
|
// And if syncData does not exist or if content hash and syncData hash are inconsistent
|
|
|
|
(!syncData || syncData.hash !== hash)
|
|
|
|
) {
|
|
|
|
return fileId;
|
2017-09-27 20:27:12 +00:00
|
|
|
}
|
2018-05-13 13:27:33 +00:00
|
|
|
return null;
|
|
|
|
});
|
|
|
|
};
|
2017-08-17 23:10:35 +00:00
|
|
|
|
2018-05-13 13:27:33 +00:00
|
|
|
const syncNextFile = async () => {
|
|
|
|
const fileId = getOneFileIdToSync();
|
|
|
|
if (fileId) {
|
|
|
|
await syncFile(fileId, syncContext);
|
|
|
|
await syncNextFile();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const currentFileId = store.getters['file/current'].id;
|
|
|
|
if (currentFileId) {
|
|
|
|
// Sync current file first
|
|
|
|
await syncFile(currentFileId, syncContext);
|
|
|
|
}
|
|
|
|
await syncNextFile();
|
|
|
|
|
|
|
|
if (syncContext.restart) {
|
|
|
|
// Restart sync
|
|
|
|
await syncWorkspace();
|
|
|
|
}
|
|
|
|
} catch (err) {
|
|
|
|
if (err && err.message === 'TOO_LATE') {
|
|
|
|
// Restart sync
|
|
|
|
await syncWorkspace();
|
|
|
|
} else {
|
|
|
|
throw err;
|
|
|
|
}
|
|
|
|
} finally {
|
|
|
|
if (workspaceProvider.onSyncEnd) {
|
|
|
|
workspaceProvider.onSyncEnd();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2017-08-15 10:43:26 +00:00
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
/**
|
|
|
|
* Enqueue a sync task, if possible.
|
|
|
|
*/
|
2018-05-13 13:27:33 +00:00
|
|
|
const requestSync = () => {
|
2018-03-12 00:45:54 +00:00
|
|
|
// No sync in light mode
|
|
|
|
if (store.state.light) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2018-05-13 13:27:33 +00:00
|
|
|
store.dispatch('queue/enqueueSyncRequest', async () => {
|
2017-08-15 10:43:26 +00:00
|
|
|
let intervalId;
|
2018-05-13 13:27:33 +00:00
|
|
|
const attempt = async () => {
|
2017-08-15 10:43:26 +00:00
|
|
|
// Only start syncing when these conditions are met
|
2017-12-11 00:53:46 +00:00
|
|
|
if (networkSvc.isUserActive() && isSyncWindow()) {
|
2017-08-15 10:43:26 +00:00
|
|
|
clearInterval(intervalId);
|
2017-08-25 10:37:46 +00:00
|
|
|
if (!isSyncPossible()) {
|
2017-08-15 10:43:26 +00:00
|
|
|
// Cancel sync
|
2018-05-13 13:27:33 +00:00
|
|
|
throw new Error('Sync not possible.');
|
2017-08-15 10:43:26 +00:00
|
|
|
}
|
2017-08-25 10:37:46 +00:00
|
|
|
|
2017-10-07 11:22:24 +00:00
|
|
|
// Determine if we have to clean files
|
|
|
|
const fileHashesToClean = {};
|
|
|
|
if (getLastStoredSyncActivity() + utils.cleanTrashAfter < Date.now()) {
|
|
|
|
// Last synchronization happened 7 days ago
|
|
|
|
const syncDataByItemId = store.getters['data/syncDataByItemId'];
|
|
|
|
store.getters['file/items'].forEach((file) => {
|
|
|
|
// If file is in the trash and has not been modified since it was last synced
|
|
|
|
const syncData = syncDataByItemId[file.id];
|
|
|
|
if (syncData && file.parentId === 'trash' && file.hash === syncData.hash) {
|
|
|
|
fileHashesToClean[file.id] = file.hash;
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2017-08-25 10:37:46 +00:00
|
|
|
// Call setLastSyncActivity periodically
|
|
|
|
intervalId = utils.setInterval(() => setLastSyncActivity(), 1000);
|
|
|
|
setLastSyncActivity();
|
2017-10-07 11:22:24 +00:00
|
|
|
|
2018-05-13 13:27:33 +00:00
|
|
|
try {
|
|
|
|
if (isWorkspaceSyncPossible()) {
|
|
|
|
await syncWorkspace();
|
|
|
|
} else if (hasCurrentFileSyncLocations()) {
|
|
|
|
// Only sync current file if workspace sync is unavailable.
|
|
|
|
// We could sync all files that are out-of-sync but it would
|
|
|
|
// require to load all the syncedContent objects from the DB.
|
|
|
|
await syncFile(store.getters['file/current'].id);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Clean files
|
|
|
|
Object.entries(fileHashesToClean).forEach(([fileId, fileHash]) => {
|
2018-06-21 19:16:33 +00:00
|
|
|
const file = store.state.file.itemsById[fileId];
|
2018-05-13 13:27:33 +00:00
|
|
|
if (file && file.hash === fileHash) {
|
|
|
|
fileSvc.deleteFile(fileId);
|
2017-08-25 10:37:46 +00:00
|
|
|
}
|
2018-05-13 13:27:33 +00:00
|
|
|
});
|
|
|
|
} finally {
|
|
|
|
clearInterval(intervalId);
|
|
|
|
}
|
2017-08-15 10:43:26 +00:00
|
|
|
}
|
|
|
|
};
|
2018-05-13 13:27:33 +00:00
|
|
|
|
2017-08-15 10:43:26 +00:00
|
|
|
intervalId = utils.setInterval(() => attempt(), 1000);
|
2018-06-07 23:56:11 +00:00
|
|
|
return attempt();
|
2018-05-13 13:27:33 +00:00
|
|
|
});
|
|
|
|
};
|
2017-08-15 10:43:26 +00:00
|
|
|
|
2017-12-10 23:49:20 +00:00
|
|
|
export default {
|
2018-05-13 13:27:33 +00:00
|
|
|
async init() {
|
|
|
|
// Load workspaces and tokens from localStorage
|
|
|
|
localDbSvc.syncLocalStorage();
|
|
|
|
|
|
|
|
// Try to find a suitable action provider
|
|
|
|
actionProvider = providerRegistry.providers[utils.queryParams.providerId];
|
|
|
|
if (actionProvider && actionProvider.initAction) {
|
|
|
|
await actionProvider.initAction();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Try to find a suitable workspace sync provider
|
|
|
|
workspaceProvider = providerRegistry.providers[utils.queryParams.providerId];
|
|
|
|
if (!workspaceProvider || !workspaceProvider.initWorkspace) {
|
|
|
|
workspaceProvider = googleDriveAppDataProvider;
|
|
|
|
}
|
|
|
|
const workspace = await workspaceProvider.initWorkspace();
|
|
|
|
store.dispatch('workspace/setCurrentWorkspaceId', workspace.id);
|
|
|
|
await localDbSvc.init();
|
|
|
|
|
|
|
|
// Try to find a suitable action provider
|
|
|
|
actionProvider = providerRegistry.providers[utils.queryParams.providerId] || actionProvider;
|
|
|
|
if (actionProvider && actionProvider.performAction) {
|
|
|
|
const newSyncLocation = await actionProvider.performAction();
|
|
|
|
if (newSyncLocation) {
|
|
|
|
this.createSyncLocation(newSyncLocation);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
await tempFileSvc.init();
|
|
|
|
|
|
|
|
if (!store.state.light) {
|
|
|
|
// Sync periodically
|
|
|
|
utils.setInterval(() => {
|
|
|
|
if (isSyncPossible()
|
|
|
|
&& networkSvc.isUserActive()
|
|
|
|
&& isSyncWindow()
|
|
|
|
&& isAutoSyncReady()
|
|
|
|
) {
|
|
|
|
requestSync();
|
2018-01-04 20:19:10 +00:00
|
|
|
}
|
2018-05-13 13:27:33 +00:00
|
|
|
}, 1000);
|
2017-08-19 10:24:08 +00:00
|
|
|
|
2018-05-13 13:27:33 +00:00
|
|
|
// Unload contents from memory periodically
|
|
|
|
utils.setInterval(() => {
|
|
|
|
// Wait for sync and publish to finish
|
|
|
|
if (store.state.queue.isEmpty) {
|
|
|
|
localDbSvc.unloadContents();
|
2018-03-12 00:45:54 +00:00
|
|
|
}
|
2018-05-13 13:27:33 +00:00
|
|
|
}, 5000);
|
|
|
|
}
|
2017-12-10 23:49:20 +00:00
|
|
|
},
|
2017-08-25 10:37:46 +00:00
|
|
|
isSyncPossible,
|
2017-08-15 10:43:26 +00:00
|
|
|
requestSync,
|
2017-09-23 19:01:50 +00:00
|
|
|
createSyncLocation,
|
2017-08-15 10:43:26 +00:00
|
|
|
};
|