Added gdrive app data provider
This commit is contained in:
parent
0280df2bbb
commit
cc2f9fa204
@ -17,6 +17,12 @@
|
||||
<div class="navigation-bar__title navigation-bar__title--fake text-input"></div>
|
||||
<div class="navigation-bar__title navigation-bar__title--text text-input" v-bind:style="{maxWidth: styles.titleMaxWidth + 'px'}">{{title}}</div>
|
||||
<input class="navigation-bar__title navigation-bar__title--input text-input" :class="{'navigation-bar__title--focus': titleFocus, 'navigation-bar__title--scrolling': titleScrolling}" v-bind:style="{width: titleWidth + 'px'}" @focus="editTitle(true)" @blur="editTitle(false)" @keyup.enter="submitTitle()" @keyup.esc="submitTitle(true)" v-on:mouseenter="titleHover = true" v-on:mouseleave="titleHover = false" v-model="title">
|
||||
<button v-if="!offline && isSyncPossible" class="navigation-bar__button button" :disabled="isSyncRequested" @click="requestSync">
|
||||
<icon-sync></icon-sync>
|
||||
</button>
|
||||
<button v-if="offline && isSyncPossible" class="navigation-bar__button navigation-bar__button--sync-off button" disabled="disabled">
|
||||
<icon-sync-off></icon-sync-off>
|
||||
</button>
|
||||
</div>
|
||||
<div class="navigation-bar__inner navigation-bar__inner--edit-buttons">
|
||||
<button class="navigation-bar__button button" @click="pagedownClick('bold')">
|
||||
@ -60,8 +66,9 @@
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { mapGetters, mapActions } from 'vuex';
|
||||
import { mapState, mapGetters, mapActions } from 'vuex';
|
||||
import editorSvc from '../services/editorSvc';
|
||||
import syncSvc from '../services/syncSvc';
|
||||
import animationSvc from '../services/animationSvc';
|
||||
|
||||
export default {
|
||||
@ -72,9 +79,19 @@ export default {
|
||||
titleHover: false,
|
||||
}),
|
||||
computed: {
|
||||
...mapState([
|
||||
'offline',
|
||||
]),
|
||||
...mapState('queue', [
|
||||
'isSyncRequested',
|
||||
]),
|
||||
...mapGetters('layout', [
|
||||
'styles',
|
||||
]),
|
||||
isSyncPossible() {
|
||||
return this.$store.getters['data/loginToken'] ||
|
||||
this.$store.getters['syncLocation/current'].length;
|
||||
},
|
||||
showSpinner() {
|
||||
return !this.$store.state.queue.isEmpty;
|
||||
},
|
||||
@ -112,6 +129,11 @@ export default {
|
||||
'toggleExplorer',
|
||||
'toggleSideBar',
|
||||
]),
|
||||
requestSync() {
|
||||
if (!this.isSyncRequested) {
|
||||
syncSvc.requestSync();
|
||||
}
|
||||
},
|
||||
pagedownClick(name) {
|
||||
editorSvc.pagedownEditor.uiManager.doClick(name);
|
||||
},
|
||||
@ -203,6 +225,24 @@ export default {
|
||||
font-size: 22px;
|
||||
}
|
||||
|
||||
.navigation-bar__button[disabled] {
|
||||
&,
|
||||
&:active,
|
||||
&:focus,
|
||||
&:hover {
|
||||
color: $navbar-color;
|
||||
}
|
||||
}
|
||||
|
||||
.navigation-bar__button--sync-off[disabled] {
|
||||
&,
|
||||
&:active,
|
||||
&:focus,
|
||||
&:hover {
|
||||
color: #f20;
|
||||
}
|
||||
}
|
||||
|
||||
.navigation-bar__title--input,
|
||||
.navigation-bar__button {
|
||||
&:active,
|
||||
|
@ -15,14 +15,15 @@
|
||||
<div v-if="panel === 'menu'" class="side-bar__panel side-bar__panel--menu">
|
||||
<side-bar-item v-if="!loginToken" @click.native="signin">
|
||||
<icon-login slot="icon"></icon-login>
|
||||
<div>Sign in</div>
|
||||
<div>Sign in with Google</div>
|
||||
<span>Have all your files and settings backed up and synced.</span>
|
||||
</side-bar-item>
|
||||
<!-- <side-bar-item @click.native="signin">
|
||||
<icon-login slot="icon"></icon-login>
|
||||
<div>Sign in on CouchDB</div>
|
||||
<span>Save and collaborate on a CouchDB hosted by you.</span>
|
||||
</side-bar-item> -->
|
||||
<icon-login slot="icon"></icon-login>
|
||||
<div>Sign in on CouchDB</div>
|
||||
<span>Save and collaborate on a CouchDB hosted by you.</span>
|
||||
</side-bar-item> -->
|
||||
<hr v-if="!loginToken">
|
||||
<side-bar-item @click.native="panel = 'toc'">
|
||||
<icon-toc slot="icon"></icon-toc>
|
||||
Table of contents
|
||||
@ -49,7 +50,7 @@ import Toc from './Toc';
|
||||
import SideBarItem from './SideBarItem';
|
||||
import markdownSample from '../data/markdownSample.md';
|
||||
import markdownConversionSvc from '../services/markdownConversionSvc';
|
||||
import googleHelper from '../services/helpers/googleHelper';
|
||||
import googleHelper from '../services/providers/helpers/googleHelper';
|
||||
import syncSvc from '../services/syncSvc';
|
||||
|
||||
const panelNames = {
|
||||
@ -100,6 +101,10 @@ export default {
|
||||
.side-bar {
|
||||
overflow: hidden;
|
||||
height: 100%;
|
||||
|
||||
hr {
|
||||
margin: 5px 10px;
|
||||
}
|
||||
}
|
||||
|
||||
.side-bar__inner {
|
||||
|
@ -59,7 +59,7 @@ export default {
|
||||
color: rgba(0, 0, 0, 0.75);
|
||||
cursor: pointer;
|
||||
font-size: 10px;
|
||||
padding: 10px 20px;
|
||||
padding: 5px 20px 40px;
|
||||
white-space: nowrap;
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
@ -73,8 +73,8 @@ export default {
|
||||
|
||||
.cl-toc-section {
|
||||
* {
|
||||
margin: 0.25em 0;
|
||||
padding: 0.25em 0;
|
||||
margin: 0.2em 0;
|
||||
padding: 0.2em 0;
|
||||
}
|
||||
|
||||
h2 {
|
||||
|
@ -83,6 +83,17 @@ textarea {
|
||||
background-color: rgba(0, 0, 0, 0.1);
|
||||
outline: 0;
|
||||
}
|
||||
|
||||
&[disabled] {
|
||||
&,
|
||||
&:active,
|
||||
&:focus,
|
||||
&:hover {
|
||||
opacity: 0.5;
|
||||
background-color: transparent;
|
||||
cursor: inherit;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.flex {
|
||||
|
@ -5,6 +5,5 @@ export default () => ({
|
||||
properties: '\n',
|
||||
discussions: {},
|
||||
comments: {},
|
||||
syncLocations: [],
|
||||
updated: 0,
|
||||
hash: 0,
|
||||
});
|
||||
|
@ -4,5 +4,5 @@ export default () => ({
|
||||
selectionStart: 0,
|
||||
selectionEnd: 0,
|
||||
scrollPosition: null,
|
||||
updated: 0,
|
||||
hash: 0,
|
||||
});
|
||||
|
@ -3,4 +3,5 @@ export default () => ({
|
||||
type: 'file',
|
||||
name: '',
|
||||
parentId: null,
|
||||
hash: 0,
|
||||
});
|
||||
|
@ -3,4 +3,5 @@ export default () => ({
|
||||
type: 'folder',
|
||||
name: '',
|
||||
parentId: null,
|
||||
hash: 0,
|
||||
});
|
||||
|
@ -1,7 +0,0 @@
|
||||
export default () => ({
|
||||
id: null,
|
||||
type: 'syncContent',
|
||||
historyData: {},
|
||||
syncLocationData: {},
|
||||
updated: 0,
|
||||
});
|
6
src/data/emptySyncLocation.js
Normal file
6
src/data/emptySyncLocation.js
Normal file
@ -0,0 +1,6 @@
|
||||
export default () => ({
|
||||
id: null,
|
||||
type: 'syncLocation',
|
||||
fileId: null,
|
||||
hash: 0,
|
||||
});
|
7
src/data/emptySyncedContent.js
Normal file
7
src/data/emptySyncedContent.js
Normal file
@ -0,0 +1,7 @@
|
||||
export default () => ({
|
||||
id: null,
|
||||
type: 'syncedContent',
|
||||
historyData: {},
|
||||
syncHistory: {},
|
||||
hash: 0,
|
||||
});
|
@ -110,7 +110,7 @@ extensionSvc.onInitConverter(0, (markdown, options) => {
|
||||
const anchorHash = {};
|
||||
let headingOpenToken;
|
||||
let headingContent;
|
||||
state.tokens.cl_each((token) => {
|
||||
state.tokens.forEach((token) => {
|
||||
if (token.type === 'heading_open') {
|
||||
headingContent = '';
|
||||
headingOpenToken = token;
|
||||
@ -148,7 +148,7 @@ extensionSvc.onInitConverter(0, (markdown, options) => {
|
||||
];
|
||||
headingOpenToken = undefined;
|
||||
} else if (headingOpenToken) {
|
||||
headingContent += token.children.cl_reduce((result, child) => {
|
||||
headingContent += token.children.reduce((result, child) => {
|
||||
if (child.type !== 'footnote_ref') {
|
||||
return result + child.content;
|
||||
}
|
||||
|
5
src/icons/Sync.vue
Normal file
5
src/icons/Sync.vue
Normal file
@ -0,0 +1,5 @@
|
||||
<template>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" class="icon" width="100%" height="100%" viewBox="0 0 24.00 24.00">
|
||||
<path d="M12,18C8.69,18 6,15.31 6,12C6,11 6.25,10.03 6.7,9.2L5.24,7.74C4.46,8.97 4,10.43 4,12C4,16.42 7.58,20 12,20V23L16,19L12,15M12,4V1L8,5L12,9V6C15.31,6 18,8.69 18,12C18,13 17.75,13.97 17.3,14.8L18.76,16.26C19.54,15.03 20,13.57 20,12C20,7.58 16.42,4 12,4Z" />
|
||||
</svg>
|
||||
</template>
|
5
src/icons/SyncOff.vue
Normal file
5
src/icons/SyncOff.vue
Normal file
@ -0,0 +1,5 @@
|
||||
<template>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" class="icon" width="100%" height="100%" viewBox="0 0 24.00 24.00">
|
||||
<path d="M20,4H14V10L16.24,7.76C17.32,8.85 18,10.34 18,12C18,13 17.75,13.94 17.32,14.77L18.78,16.23C19.55,15 20,13.56 20,12C20,9.79 19.09,7.8 17.64,6.36L20,4M2.86,5.41L5.22,7.77C4.45,9 4,10.44 4,12C4,14.21 4.91,16.2 6.36,17.64L4,20H10V14L7.76,16.24C6.68,15.15 6,13.66 6,12C6,11 6.25,10.06 6.68,9.23L14.76,17.31C14.5,17.44 14.26,17.56 14,17.65V19.74C14.79,19.53 15.54,19.2 16.22,18.78L18.58,21.14L19.85,19.87L4.14,4.14L2.86,5.41M10,6.35V4.26C9.2,4.47 8.45,4.8 7.77,5.22L9.23,6.68C9.5,6.56 9.73,6.44 10,6.35Z" />
|
||||
</svg>
|
||||
</template>
|
@ -28,6 +28,8 @@ import ArrowLeft from './ArrowLeft';
|
||||
import HelpCircle from './HelpCircle';
|
||||
import Toc from './Toc';
|
||||
import Login from './Login';
|
||||
import Sync from './Sync';
|
||||
import SyncOff from './SyncOff';
|
||||
|
||||
Vue.component('iconFormatBold', FormatBold);
|
||||
Vue.component('iconFormatItalic', FormatItalic);
|
||||
@ -58,3 +60,5 @@ Vue.component('iconArrowLeft', ArrowLeft);
|
||||
Vue.component('iconHelpCircle', HelpCircle);
|
||||
Vue.component('iconToc', Toc);
|
||||
Vue.component('iconLogin', Login);
|
||||
Vue.component('iconSync', Sync);
|
||||
Vue.component('iconSyncOff', SyncOff);
|
||||
|
@ -2,13 +2,12 @@ import DiffMatchPatch from 'diff-match-patch';
|
||||
import utils from './utils';
|
||||
|
||||
const diffMatchPatch = new DiffMatchPatch();
|
||||
const diffMatchPatchStrict = new DiffMatchPatch();
|
||||
diffMatchPatchStrict.Match_Threshold = 0;
|
||||
diffMatchPatchStrict.Patch_DeleteThreshold = 0;
|
||||
const diffMatchPatchPermissive = new DiffMatchPatch();
|
||||
diffMatchPatchPermissive.Match_Distance = 999999999;
|
||||
diffMatchPatch.Match_Distance = 10000;
|
||||
|
||||
function makePatchableText(content, markerKeys, markerIdxMap) {
|
||||
if (!content || !content.discussions) {
|
||||
return null;
|
||||
}
|
||||
const markers = [];
|
||||
// Sort keys to have predictable marker positions in case of same offset
|
||||
const discussionKeys = Object.keys(content.discussions).sort();
|
||||
@ -58,6 +57,9 @@ function makePatchableText(content, markerKeys, markerIdxMap) {
|
||||
}
|
||||
|
||||
function stripDiscussionOffsets(objectMap) {
|
||||
if (objectMap == null) {
|
||||
return objectMap;
|
||||
}
|
||||
const result = {};
|
||||
Object.keys(objectMap).forEach((id) => {
|
||||
result[id] = {
|
||||
@ -85,72 +87,58 @@ function restoreDiscussionOffsets(content, markerKeys) {
|
||||
}
|
||||
}
|
||||
|
||||
function serializeObject(obj) {
|
||||
if (!obj) {
|
||||
return obj;
|
||||
function mergeText(serverText, clientText, lastMergedText) {
|
||||
const serverClientDiffs = diffMatchPatch.diff_main(serverText, clientText);
|
||||
diffMatchPatch.diff_cleanupSemantic(serverClientDiffs);
|
||||
// Fusion text is a mix of both server and client contents
|
||||
const fusionText = serverClientDiffs.map(diff => diff[1]).join('');
|
||||
if (!lastMergedText) {
|
||||
return fusionText;
|
||||
}
|
||||
return JSON.stringify(obj, (key, value) => {
|
||||
if (Object.prototype.toString.call(value) !== '[object Object]') {
|
||||
return value;
|
||||
}
|
||||
return Object.keys(value).sort().reduce((sorted, valueKey) => {
|
||||
sorted[valueKey] = value[valueKey];
|
||||
return sorted;
|
||||
}, {});
|
||||
});
|
||||
// Let's try to find out what text has to be removed from fusion
|
||||
const intersectionText = serverClientDiffs
|
||||
// Keep only equalities
|
||||
.filter(diff => diff[0] === DiffMatchPatch.DIFF_EQUAL)
|
||||
.map(diff => diff[1]).join('');
|
||||
const lastMergedTextDiffs = diffMatchPatch.diff_main(lastMergedText, intersectionText)
|
||||
// Keep only equalities and deletions
|
||||
.filter(diff => diff[0] !== DiffMatchPatch.DIFF_INSERT);
|
||||
diffMatchPatch.diff_cleanupSemantic(serverClientDiffs);
|
||||
// Make a patch with deletions only
|
||||
const patches = diffMatchPatch.patch_make(lastMergedText, lastMergedTextDiffs);
|
||||
// Apply patch to fusion text
|
||||
return diffMatchPatch.patch_apply(patches, fusionText)[0];
|
||||
}
|
||||
|
||||
function mergeText(oldText, newText, serverText) {
|
||||
let diffs = diffMatchPatch.diff_main(oldText, newText);
|
||||
diffMatchPatch.diff_cleanupSemantic(diffs);
|
||||
const patches = diffMatchPatch.patch_make(oldText, diffs);
|
||||
const patchResult = diffMatchPatch.patch_apply(patches, serverText);
|
||||
if (!patchResult[1].some(changeApplied => !changeApplied)) {
|
||||
return patchResult[0];
|
||||
function mergeValues(serverValue, clientValue, lastMergedValue) {
|
||||
if (!lastMergedValue) {
|
||||
return serverValue || clientValue; // Take the server value in priority
|
||||
}
|
||||
|
||||
diffs = diffMatchPatchStrict.diff_main(patchResult[0], newText);
|
||||
diffMatchPatch.diff_cleanupSemantic(diffs);
|
||||
return diffs.map(diff => diff[1]).join('');
|
||||
}
|
||||
|
||||
function quickPatch(oldStr, newStr, destStr, strict) {
|
||||
const dmp = strict ? diffMatchPatchStrict : diffMatchPatch;
|
||||
const diffs = dmp.diff_main(oldStr, newStr);
|
||||
const patches = dmp.patch_make(oldStr, diffs);
|
||||
const patchResult = dmp.patch_apply(patches, destStr);
|
||||
return patchResult[0];
|
||||
}
|
||||
|
||||
function mergeValue(oldValue, newValue, serverValue) {
|
||||
if (!oldValue) {
|
||||
return serverValue; // There might be conflict, keep the server value
|
||||
}
|
||||
const newSerializedValue = serializeObject(newValue);
|
||||
const serverSerializedValue = serializeObject(serverValue);
|
||||
const newSerializedValue = utils.serializeObject(clientValue);
|
||||
const serverSerializedValue = utils.serializeObject(serverValue);
|
||||
if (newSerializedValue === serverSerializedValue) {
|
||||
return serverValue; // no conflict
|
||||
}
|
||||
const oldSerializedValue = serializeObject(oldValue);
|
||||
const oldSerializedValue = utils.serializeObject(lastMergedValue);
|
||||
if (oldSerializedValue !== newSerializedValue && !serverValue) {
|
||||
return newValue; // Removed on server but changed on client
|
||||
return clientValue; // Removed on server but changed on client
|
||||
}
|
||||
if (oldSerializedValue !== serverSerializedValue && !newValue) {
|
||||
if (oldSerializedValue !== serverSerializedValue && !clientValue) {
|
||||
return serverValue; // Removed on client but changed on server
|
||||
}
|
||||
if (oldSerializedValue !== newSerializedValue && oldSerializedValue === serverSerializedValue) {
|
||||
return newValue; // Take the client value
|
||||
return clientValue; // Take the client value
|
||||
}
|
||||
return serverValue; // Take the server value otherwise
|
||||
return serverValue; // Take the server value
|
||||
}
|
||||
|
||||
function mergeObjects(oldObject, newObject, serverObject) {
|
||||
function mergeObjects(serverObject, clientObject, lastMergedObject = {}) {
|
||||
const mergedObject = {};
|
||||
Object.keys({
|
||||
...newObject,
|
||||
...clientObject,
|
||||
...serverObject,
|
||||
}).forEach((key) => {
|
||||
const mergedValue = mergeValue(oldObject[key], newObject[key], serverObject[key]);
|
||||
const mergedValue = mergeValues(serverObject[key], clientObject[key], lastMergedObject[key]);
|
||||
if (mergedValue != null) {
|
||||
mergedObject[key] = mergedValue;
|
||||
}
|
||||
@ -158,47 +146,41 @@ function mergeObjects(oldObject, newObject, serverObject) {
|
||||
return utils.deepCopy(mergedObject);
|
||||
}
|
||||
|
||||
function mergeContent(oldContent, newContent, serverContent) {
|
||||
function mergeContent(serverContent, clientContent, lastMergedContent = {}) {
|
||||
const markerKeys = [];
|
||||
const markerIdxMap = Object.create(null);
|
||||
const oldText = makePatchableText(oldContent, markerKeys, markerIdxMap);
|
||||
const lastMergedText = makePatchableText(lastMergedContent, markerKeys, markerIdxMap);
|
||||
const serverText = makePatchableText(serverContent, markerKeys, markerIdxMap);
|
||||
const localText = makePatchableText(newContent, markerKeys, markerIdxMap);
|
||||
const isServerTextChanges = oldText !== serverText;
|
||||
const isTextSynchronized = serverText === localText;
|
||||
const clientText = makePatchableText(clientContent, markerKeys, markerIdxMap);
|
||||
const isServerTextChanges = lastMergedText !== serverText;
|
||||
const isTextSynchronized = serverText === clientText;
|
||||
|
||||
const result = {
|
||||
text: isTextSynchronized || !isServerTextChanges
|
||||
? localText
|
||||
: mergeText(oldText, serverText, localText),
|
||||
properties: mergeValue(
|
||||
oldContent.properties,
|
||||
newContent.properties,
|
||||
? clientText
|
||||
: mergeText(serverText, clientText, lastMergedText),
|
||||
properties: mergeValues(
|
||||
serverContent.properties,
|
||||
clientContent.properties,
|
||||
lastMergedContent.properties,
|
||||
),
|
||||
discussions: mergeObjects(
|
||||
stripDiscussionOffsets(oldContent.discussions),
|
||||
stripDiscussionOffsets(newContent.discussions),
|
||||
stripDiscussionOffsets(serverContent.discussions),
|
||||
stripDiscussionOffsets(clientContent.discussions),
|
||||
stripDiscussionOffsets(lastMergedContent.discussions),
|
||||
),
|
||||
comments: mergeObjects(
|
||||
oldContent.comments,
|
||||
newContent.comments,
|
||||
serverContent.comments,
|
||||
clientContent.comments,
|
||||
lastMergedContent.comments,
|
||||
),
|
||||
};
|
||||
restoreDiscussionOffsets(result, markerKeys);
|
||||
return result
|
||||
return result;
|
||||
}
|
||||
|
||||
export default {
|
||||
serializeObject,
|
||||
makePatchableText,
|
||||
restoreDiscussionOffsets,
|
||||
applyContentChanges,
|
||||
getTextPatches,
|
||||
getObjectPatches,
|
||||
quickPatch,
|
||||
mergeObjects,
|
||||
mergeContent,
|
||||
};
|
||||
|
@ -1,6 +1,7 @@
|
||||
import DiffMatchPatch from 'diff-match-patch';
|
||||
import cledit from '../libs/cledit';
|
||||
import clDiffUtils from '../libs/cldiffutils';
|
||||
import utils from './utils';
|
||||
import diffUtils from './diffUtils';
|
||||
import store from '../store';
|
||||
|
||||
let clEditor;
|
||||
@ -34,26 +35,35 @@ function getDiscussionMarkers(discussion, discussionId, onMarker) {
|
||||
getMarker('offset1');
|
||||
}
|
||||
|
||||
function syncDiscussionMarkers() {
|
||||
const content = store.getters['content/current'];
|
||||
Object.keys(discussionMarkers)
|
||||
.forEach((markerKey) => {
|
||||
const marker = discussionMarkers[markerKey];
|
||||
// Remove marker if discussion was removed
|
||||
const discussion = content.discussions[marker.discussionId];
|
||||
if (!discussion || discussion[marker.offsetName] === undefined) {
|
||||
clEditor.removeMarker(marker);
|
||||
delete discussionMarkers[markerKey];
|
||||
}
|
||||
});
|
||||
function syncDiscussionMarkers(content, writeOffsets) {
|
||||
Object.keys(discussionMarkers).forEach((markerKey) => {
|
||||
const marker = discussionMarkers[markerKey];
|
||||
// Remove marker if discussion was removed
|
||||
const discussion = content.discussions[marker.discussionId];
|
||||
if (!discussion || discussion[marker.offsetName] === undefined) {
|
||||
clEditor.removeMarker(marker);
|
||||
delete discussionMarkers[markerKey];
|
||||
}
|
||||
});
|
||||
|
||||
Object.keys(content.discussions)
|
||||
.forEach((discussionId) => {
|
||||
const discussion = content.discussions[discussionId];
|
||||
getDiscussionMarkers(discussion, discussionId, (marker) => {
|
||||
Object.keys(content.discussions).forEach((discussionId) => {
|
||||
const discussion = content.discussions[discussionId];
|
||||
getDiscussionMarkers(discussion, discussionId, writeOffsets
|
||||
? (marker) => {
|
||||
discussion[marker.offsetName] = marker.offset;
|
||||
}
|
||||
: (marker) => {
|
||||
marker.offset = discussion[marker.offsetName];
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function removeDiscussionMarkers() {
|
||||
Object.keys(discussionMarkers).forEach((markerKey) => {
|
||||
const marker = discussionMarkers[markerKey];
|
||||
clEditor.removeMarker(marker);
|
||||
delete discussionMarkers[markerKey];
|
||||
});
|
||||
}
|
||||
|
||||
const diffMatchPatch = new DiffMatchPatch();
|
||||
@ -91,8 +101,6 @@ function reversePatches(patches) {
|
||||
|
||||
export default {
|
||||
clEditor: null,
|
||||
lastChange: 0,
|
||||
lastExternalChange: 0,
|
||||
createClEditor(editorElt) {
|
||||
this.clEditor = cledit(editorElt, editorElt.parentNode);
|
||||
clEditor = this.clEditor;
|
||||
@ -100,42 +108,38 @@ export default {
|
||||
markerIdxMap = Object.create(null);
|
||||
discussionMarkers = {};
|
||||
clEditor.on('contentChanged', (text) => {
|
||||
store.dispatch('content/patchCurrent', { text });
|
||||
syncDiscussionMarkers();
|
||||
const content = store.getters['content/current'];
|
||||
const oldContent = store.getters['content/current'];
|
||||
const newContent = {
|
||||
...oldContent,
|
||||
discussions: utils.deepCopy(oldContent.discussions),
|
||||
text,
|
||||
};
|
||||
syncDiscussionMarkers(newContent, true);
|
||||
if (!isChangePatch) {
|
||||
previousPatchableText = currentPatchableText;
|
||||
currentPatchableText = clDiffUtils.makePatchableText(content, markerKeys, markerIdxMap);
|
||||
currentPatchableText = diffUtils.makePatchableText(newContent, markerKeys, markerIdxMap);
|
||||
} else {
|
||||
// Take a chance to restore discussion offsets on undo/redo
|
||||
content.text = currentPatchableText;
|
||||
clDiffUtils.restoreDiscussionOffsets(content, markerKeys);
|
||||
content.discussions.cl_each((discussion, discussionId) => {
|
||||
getDiscussionMarkers(discussion, discussionId, (marker) => {
|
||||
marker.offset = discussion[marker.offsetName];
|
||||
});
|
||||
});
|
||||
diffUtils.restoreDiscussionOffsets(newContent, markerKeys);
|
||||
syncDiscussionMarkers(newContent, false);
|
||||
}
|
||||
store.dispatch('content/patchCurrent', newContent);
|
||||
isChangePatch = false;
|
||||
this.lastChange = Date.now();
|
||||
});
|
||||
clEditor.addMarker(newDiscussionMarker0);
|
||||
clEditor.addMarker(newDiscussionMarker1);
|
||||
},
|
||||
initClEditor(opts, reinit) {
|
||||
initClEditor(opts) {
|
||||
const content = store.getters['content/current'];
|
||||
const contentState = store.getters['contentState/current'];
|
||||
if (content) {
|
||||
const options = Object.assign({}, opts);
|
||||
|
||||
if (contentId !== content.id) {
|
||||
contentId = content.id;
|
||||
currentPatchableText = clDiffUtils.makePatchableText(content, markerKeys, markerIdxMap);
|
||||
currentPatchableText = diffUtils.makePatchableText(content, markerKeys, markerIdxMap);
|
||||
previousPatchableText = currentPatchableText;
|
||||
syncDiscussionMarkers();
|
||||
}
|
||||
|
||||
if (reinit) {
|
||||
syncDiscussionMarkers(content, false);
|
||||
const contentState = store.getters['contentState/current'];
|
||||
options.content = content.text;
|
||||
options.selectionStart = contentState.selectionStart;
|
||||
options.selectionEnd = contentState.selectionEnd;
|
||||
@ -143,21 +147,21 @@ export default {
|
||||
|
||||
options.patchHandler = {
|
||||
makePatches,
|
||||
applyPatches: patches => applyPatches(patches),
|
||||
applyPatches,
|
||||
reversePatches,
|
||||
};
|
||||
clEditor.init(options);
|
||||
}
|
||||
},
|
||||
applyContent(isExternal) {
|
||||
if (!clEditor) {
|
||||
return null;
|
||||
applyContent() {
|
||||
if (clEditor) {
|
||||
const content = store.getters['content/current'];
|
||||
if (clEditor.setContent(content.text, true).range) {
|
||||
// Marker will be recreated on contentChange
|
||||
removeDiscussionMarkers();
|
||||
} else {
|
||||
syncDiscussionMarkers(content, false);
|
||||
}
|
||||
}
|
||||
if (isExternal) {
|
||||
this.lastExternalChange = Date.now();
|
||||
}
|
||||
syncDiscussionMarkers();
|
||||
const content = store.getters['content/current'];
|
||||
return clEditor.setContent(content.text, isExternal);
|
||||
},
|
||||
};
|
||||
|
@ -29,7 +29,6 @@ const allowDebounce = (action, wait) => {
|
||||
|
||||
const diffMatchPatch = new DiffMatchPatch();
|
||||
let lastContentId = null;
|
||||
let reinitClEditor = true;
|
||||
let instantPreview = true;
|
||||
let tokens;
|
||||
const anchorHash = {};
|
||||
@ -185,12 +184,11 @@ const editorSvc = Object.assign(new Vue(), { // Use a vue instance as an event b
|
||||
return 0.15;
|
||||
},
|
||||
};
|
||||
editorEngineSvc.initClEditor(options, reinitClEditor);
|
||||
editorEngineSvc.initClEditor(options);
|
||||
editorEngineSvc.clEditor.toggleEditable(true);
|
||||
const contentId = store.getters['content/current'].id;
|
||||
// Switch off the editor when no content is loaded
|
||||
editorEngineSvc.clEditor.toggleEditable(!!contentId);
|
||||
reinitClEditor = false;
|
||||
this.restoreScrollPosition();
|
||||
},
|
||||
|
||||
@ -573,30 +571,26 @@ const editorSvc = Object.assign(new Vue(), { // Use a vue instance as an event b
|
||||
|
||||
const debouncedRefreshPreview = debounce(refreshPreview, 20);
|
||||
|
||||
let newSectionList;
|
||||
let newSelectionRange;
|
||||
const onEditorChanged = () => {
|
||||
if (this.sectionList !== newSectionList) {
|
||||
this.sectionList = newSectionList;
|
||||
this.$emit('sectionList', this.sectionList);
|
||||
if (instantPreview) {
|
||||
refreshPreview();
|
||||
} else {
|
||||
debouncedRefreshPreview();
|
||||
const onEditorChanged =
|
||||
(sectionList = this.sectionList, selectionRange = this.selectionRange) => {
|
||||
if (this.sectionList !== sectionList) {
|
||||
this.sectionList = sectionList;
|
||||
this.$emit('sectionList', this.sectionList);
|
||||
if (instantPreview) {
|
||||
refreshPreview();
|
||||
} else {
|
||||
debouncedRefreshPreview();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (this.selectionRange !== newSelectionRange) {
|
||||
this.selectionRange = newSelectionRange;
|
||||
this.$emit('selectionRange', this.selectionRange);
|
||||
}
|
||||
this.saveContentState();
|
||||
};
|
||||
const debouncedEditorChanged = debounce(onEditorChanged, 10);
|
||||
if (this.selectionRange !== selectionRange) {
|
||||
this.selectionRange = selectionRange;
|
||||
this.$emit('selectionRange', this.selectionRange);
|
||||
}
|
||||
this.saveContentState();
|
||||
};
|
||||
|
||||
editorEngineSvc.clEditor.selectionMgr.on('selectionChanged', (start, end, selectionRange) => {
|
||||
newSelectionRange = selectionRange;
|
||||
debouncedEditorChanged();
|
||||
});
|
||||
editorEngineSvc.clEditor.selectionMgr.on('selectionChanged',
|
||||
(start, end, selectionRange) => onEditorChanged(undefined, selectionRange));
|
||||
|
||||
/* -----------------------------
|
||||
* Inline images
|
||||
@ -684,14 +678,8 @@ const editorSvc = Object.assign(new Vue(), { // Use a vue instance as an event b
|
||||
triggerImgCacheGc();
|
||||
});
|
||||
|
||||
editorEngineSvc.clEditor.on('contentChanged', (content, diffs, sectionList) => {
|
||||
newSectionList = sectionList;
|
||||
if (instantPreview) {
|
||||
onEditorChanged();
|
||||
} else {
|
||||
debouncedEditorChanged();
|
||||
}
|
||||
});
|
||||
editorEngineSvc.clEditor.on('contentChanged',
|
||||
(content, diffs, sectionList) => onEditorChanged(sectionList));
|
||||
|
||||
this.$emit('inited');
|
||||
|
||||
@ -716,20 +704,20 @@ const editorSvc = Object.assign(new Vue(), { // Use a vue instance as an event b
|
||||
// }
|
||||
// })
|
||||
|
||||
// Watch file content properties changes
|
||||
// Watch file content changes
|
||||
store.watch(
|
||||
() => store.getters['content/current'].properties,
|
||||
(properties) => {
|
||||
// Track ID changes at the same time
|
||||
const contentId = store.getters['content/current'].id;
|
||||
() => store.getters['content/current'].hash,
|
||||
() => {
|
||||
const content = store.getters['content/current'];
|
||||
// Track ID changes
|
||||
let initClEditor = false;
|
||||
if (contentId !== lastContentId) {
|
||||
reinitClEditor = true;
|
||||
if (content.id !== lastContentId) {
|
||||
instantPreview = true;
|
||||
lastContentId = contentId;
|
||||
lastContentId = content.id;
|
||||
initClEditor = true;
|
||||
}
|
||||
const options = extensionSvc.getOptions(properties, true);
|
||||
// Track properties changes
|
||||
const options = extensionSvc.getOptions(content.properties, true);
|
||||
if (JSON.stringify(options) !== JSON.stringify(editorSvc.options)) {
|
||||
editorSvc.options = options;
|
||||
editorSvc.initPrism();
|
||||
@ -739,6 +727,8 @@ const editorSvc = Object.assign(new Vue(), { // Use a vue instance as an event b
|
||||
if (initClEditor) {
|
||||
editorSvc.initClEditor();
|
||||
}
|
||||
// Apply possible text and discussion changes
|
||||
editorEngineSvc.applyContent();
|
||||
}, {
|
||||
immediate: true,
|
||||
});
|
||||
|
@ -1,16 +1,15 @@
|
||||
import 'babel-polyfill';
|
||||
import 'indexeddbshim';
|
||||
import 'indexeddbshim/dist/indexeddbshim';
|
||||
import utils from './utils';
|
||||
import store from '../store';
|
||||
|
||||
let indexedDB = window.indexedDB;
|
||||
const indexedDB = window.indexedDB;
|
||||
const localStorage = window.localStorage;
|
||||
const dbVersion = 1;
|
||||
const dbStoreName = 'objects';
|
||||
|
||||
// Use the shim on Safari or when indexedDB is not available
|
||||
if (window.shimIndexedDB && (!indexedDB || (navigator.userAgent.indexOf('Chrome') === -1 && navigator.userAgent.indexOf('Safari') !== -1))) {
|
||||
indexedDB = window.shimIndexedDB;
|
||||
if (!indexedDB) {
|
||||
throw new Error('Your browser is not supported. Please upgrade to the latest version.');
|
||||
}
|
||||
|
||||
const deleteMarkerMaxAge = 1000;
|
||||
@ -81,20 +80,20 @@ class Connection {
|
||||
}
|
||||
}
|
||||
|
||||
const updatedMap = {};
|
||||
const hashMap = {};
|
||||
utils.types.forEach((type) => {
|
||||
updatedMap[type] = Object.create(null);
|
||||
hashMap[type] = Object.create(null);
|
||||
});
|
||||
|
||||
const contentTypes = {
|
||||
content: true,
|
||||
contentState: true,
|
||||
syncContent: true,
|
||||
syncedContent: true,
|
||||
};
|
||||
|
||||
export default {
|
||||
lastTx: 0,
|
||||
updatedMap,
|
||||
hashMap,
|
||||
connection: new Connection(),
|
||||
|
||||
/**
|
||||
@ -145,7 +144,7 @@ export default {
|
||||
changes.forEach((item) => {
|
||||
this.readDbItem(item, storeItemMap);
|
||||
// If item is an old delete marker, remove it from the DB
|
||||
if (!item.updated && lastTx - item.tx > deleteMarkerMaxAge) {
|
||||
if (!item.hash && lastTx - item.tx > deleteMarkerMaxAge) {
|
||||
dbStore.delete(item.id);
|
||||
}
|
||||
});
|
||||
@ -163,7 +162,7 @@ export default {
|
||||
const incrementedTx = this.lastTx + 1;
|
||||
|
||||
// Remove deleted store items
|
||||
Object.keys(this.updatedMap).forEach((type) => {
|
||||
Object.keys(this.hashMap).forEach((type) => {
|
||||
// Remove this type only if file is deleted
|
||||
let checker = cb => id => !storeItemMap[id] && cb(id);
|
||||
if (contentTypes[type]) {
|
||||
@ -177,14 +176,14 @@ export default {
|
||||
}
|
||||
};
|
||||
}
|
||||
Object.keys(this.updatedMap[type]).forEach(checker((id) => {
|
||||
Object.keys(this.hashMap[type]).forEach(checker((id) => {
|
||||
// Put a delete marker to notify other tabs
|
||||
dbStore.put({
|
||||
id,
|
||||
type,
|
||||
tx: incrementedTx,
|
||||
});
|
||||
delete this.updatedMap[type][id];
|
||||
delete this.hashMap[type][id];
|
||||
this.lastTx = incrementedTx; // No need to read what we just wrote
|
||||
}));
|
||||
});
|
||||
@ -193,13 +192,13 @@ export default {
|
||||
Object.keys(storeItemMap).forEach((id) => {
|
||||
const storeItem = storeItemMap[id];
|
||||
// Store object has changed
|
||||
if (this.updatedMap[storeItem.type][storeItem.id] !== storeItem.updated) {
|
||||
if (this.hashMap[storeItem.type][storeItem.id] !== storeItem.hash) {
|
||||
const item = {
|
||||
...storeItem,
|
||||
tx: incrementedTx,
|
||||
};
|
||||
dbStore.put(item);
|
||||
this.updatedMap[item.type][item.id] = item.updated;
|
||||
this.hashMap[item.type][item.id] = item.hash;
|
||||
this.lastTx = incrementedTx; // No need to read what we just wrote
|
||||
}
|
||||
});
|
||||
@ -210,20 +209,21 @@ export default {
|
||||
*/
|
||||
readDbItem(dbItem, storeItemMap) {
|
||||
const existingStoreItem = storeItemMap[dbItem.id];
|
||||
if (!dbItem.updated) {
|
||||
if (!dbItem.hash) {
|
||||
// DB item is a delete marker
|
||||
delete this.updatedMap[dbItem.type][dbItem.id];
|
||||
delete this.hashMap[dbItem.type][dbItem.id];
|
||||
if (existingStoreItem) {
|
||||
// Remove item from the store
|
||||
store.commit(`${existingStoreItem.type}/deleteItem`, existingStoreItem.id);
|
||||
delete storeItemMap[existingStoreItem.id];
|
||||
}
|
||||
} else if (this.updatedMap[dbItem.type][dbItem.id] !== dbItem.updated) {
|
||||
} else if (this.hashMap[dbItem.type][dbItem.id] !== dbItem.hash) {
|
||||
// DB item is different from the corresponding store item
|
||||
this.updatedMap[dbItem.type][dbItem.id] = dbItem.updated;
|
||||
this.hashMap[dbItem.type][dbItem.id] = dbItem.hash;
|
||||
// Update content only if it exists in the store
|
||||
if (existingStoreItem || !contentTypes[dbItem.type]) {
|
||||
// Put item in the store
|
||||
dbItem.tx = undefined;
|
||||
store.commit(`${dbItem.type}/setItem`, dbItem);
|
||||
storeItemMap[dbItem.id] = dbItem;
|
||||
}
|
||||
@ -248,13 +248,14 @@ export default {
|
||||
const request = dbStore.get(id);
|
||||
request.onsuccess = () => {
|
||||
const dbItem = request.result;
|
||||
if (!dbItem || !dbItem.updated) {
|
||||
if (!dbItem || !dbItem.hash) {
|
||||
onError();
|
||||
} else {
|
||||
this.updatedMap[dbItem.type][dbItem.id] = dbItem.updated;
|
||||
this.hashMap[dbItem.type][dbItem.id] = dbItem.hash;
|
||||
// Put item in the store
|
||||
dbItem.tx = undefined;
|
||||
store.commit(`${dbItem.type}/setItem`, dbItem);
|
||||
resolve(dbItem);
|
||||
resolve();
|
||||
}
|
||||
};
|
||||
}, () => onError());
|
||||
@ -265,15 +266,12 @@ export default {
|
||||
* Unload from the store contents that haven't been opened recently
|
||||
*/
|
||||
unloadContents() {
|
||||
const lastOpenedFileIds = store.getters['data/lastOpenedIds']
|
||||
.slice(0, 10).reduce((result, id) => {
|
||||
result[id] = true;
|
||||
return result;
|
||||
}, {});
|
||||
// Keep only last opened files in memory
|
||||
const lastOpenedFileIds = new Set(store.getters['data/lastOpenedIds']);
|
||||
Object.keys(contentTypes).forEach((type) => {
|
||||
store.getters(`${type}/items`).forEach((item) => {
|
||||
store.getters[`${type}/items`].forEach((item) => {
|
||||
const [fileId] = item.id.split('/');
|
||||
if (!lastOpenedFileIds[fileId]) {
|
||||
if (!lastOpenedFileIds.has(fileId)) {
|
||||
// Remove item from the store
|
||||
store.commit(`${type}/deleteItem`, item.id);
|
||||
}
|
||||
|
@ -93,7 +93,7 @@ const deflistBlockTypeMap = createFlagMap([
|
||||
|
||||
function hashArray(arr, valueHash, valueArray) {
|
||||
const hash = [];
|
||||
arr.cl_each((str) => {
|
||||
arr.forEach((str) => {
|
||||
let strHash = valueHash[str];
|
||||
if (strHash === undefined) {
|
||||
strHash = valueArray.length;
|
||||
|
@ -1,24 +1,108 @@
|
||||
import store from '../../store';
|
||||
import googleHelper from '../helpers/googleHelper';
|
||||
import googleHelper from './helpers/googleHelper';
|
||||
|
||||
export default {
|
||||
downloadContent(token, fileId) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][`${fileId}/content`];
|
||||
getChanges(token) {
|
||||
return googleHelper.getChanges(token)
|
||||
.then((result) => {
|
||||
const changes = result.changes.filter((change) => {
|
||||
if (change.file) {
|
||||
try {
|
||||
change.item = JSON.parse(change.file.name);
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
// Build sync data
|
||||
change.syncData = {
|
||||
id: change.fileId,
|
||||
itemId: change.item.id,
|
||||
type: change.item.type,
|
||||
hash: change.item.hash,
|
||||
};
|
||||
change.file = undefined;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
changes.nextPageToken = result.nextPageToken;
|
||||
return changes;
|
||||
});
|
||||
},
|
||||
setAppliedChanges(token, changes) {
|
||||
const lastToken = store.getters['data/googleTokens'][token.sub];
|
||||
if (changes.nextPageToken !== lastToken.nextPageToken) {
|
||||
store.dispatch('data/setGoogleToken', {
|
||||
...lastToken,
|
||||
nextPageToken: changes.nextPageToken,
|
||||
});
|
||||
}
|
||||
},
|
||||
saveItem(token, item, syncData, ifNotTooLate) {
|
||||
return googleHelper.saveFile(
|
||||
token,
|
||||
JSON.stringify(item),
|
||||
['appDataFolder'],
|
||||
null,
|
||||
syncData && syncData.id,
|
||||
ifNotTooLate,
|
||||
)
|
||||
.then(file => ({
|
||||
// Build sync data
|
||||
id: file.id,
|
||||
itemId: item.id,
|
||||
type: item.type,
|
||||
hash: item.hash,
|
||||
}));
|
||||
},
|
||||
removeItem(token, syncData, ifNotTooLate) {
|
||||
return googleHelper.removeAppDataFile(token, syncData.id, ifNotTooLate)
|
||||
.then(() => syncData);
|
||||
},
|
||||
downloadContent(token, syncLocation) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][`${syncLocation.fileId}/content`];
|
||||
if (!syncData) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
return googleHelper.downloadAppDataFile(token, syncData.id)
|
||||
.then((content) => {
|
||||
if (content.updated !== syncData.updated) {
|
||||
const item = JSON.parse(content);
|
||||
if (item.hash !== syncData.hash) {
|
||||
store.dispatch('data/setSyncData', {
|
||||
...store.getters['data/syncData'],
|
||||
[syncData.id]: {
|
||||
...syncData,
|
||||
updated: content.updated,
|
||||
hash: item.hash,
|
||||
},
|
||||
});
|
||||
}
|
||||
return {
|
||||
history: [],
|
||||
...content,
|
||||
};
|
||||
return item;
|
||||
});
|
||||
},
|
||||
uploadContent(token, item, syncLocation, ifNotTooLate) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][`${syncLocation.fileId}/content`];
|
||||
if (syncData && syncData.hash === item.hash) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
return googleHelper.saveAppDataFile(
|
||||
token,
|
||||
JSON.stringify({
|
||||
id: item.id,
|
||||
type: item.type,
|
||||
hash: item.hash,
|
||||
}),
|
||||
['appDataFolder'],
|
||||
JSON.stringify(item),
|
||||
syncData && syncData.id,
|
||||
ifNotTooLate,
|
||||
)
|
||||
.then(file => store.dispatch('data/setSyncData', {
|
||||
...store.getters['data/syncData'],
|
||||
[file.id]: {
|
||||
// Build sync data
|
||||
id: file.id,
|
||||
itemId: item.id,
|
||||
type: item.type,
|
||||
hash: item.hash,
|
||||
},
|
||||
}));
|
||||
},
|
||||
};
|
||||
|
30
src/services/providers/gdriveProvider.js
Normal file
30
src/services/providers/gdriveProvider.js
Normal file
@ -0,0 +1,30 @@
|
||||
import store from '../../store';
|
||||
import googleHelper from './helpers/googleHelper';
|
||||
import providerUtils from './providerUtils';
|
||||
|
||||
export default {
|
||||
downloadContent(token, syncLocation) {
|
||||
return googleHelper.downloadFile(token, syncLocation.gdriveFileId)
|
||||
.then(content => providerUtils.parseContent(content));
|
||||
},
|
||||
uploadContent(token, item, syncLocation, ifNotTooLate) {
|
||||
const file = store.state.file.itemMap[syncLocation.fileId];
|
||||
const name = (file && file.name) || 'Untitled';
|
||||
const parents = [];
|
||||
if (syncLocation.gdriveParentId) {
|
||||
parents.push(syncLocation.gdriveParentId);
|
||||
}
|
||||
return googleHelper.saveFile(
|
||||
token,
|
||||
name,
|
||||
parents,
|
||||
providerUtils.serializeContent(item),
|
||||
syncLocation && syncLocation.gdriveId,
|
||||
ifNotTooLate,
|
||||
)
|
||||
.then(gdriveFile => ({
|
||||
...syncLocation,
|
||||
gdriveId: gdriveFile.id,
|
||||
}));
|
||||
},
|
||||
};
|
@ -1,9 +1,9 @@
|
||||
import utils from '../utils';
|
||||
import store from '../../store';
|
||||
import utils from '../../utils';
|
||||
import store from '../../../store';
|
||||
|
||||
const clientId = '241271498917-t4t7d07qis7oc0ahaskbif3ft6tk63cd.apps.googleusercontent.com';
|
||||
const appsDomain = null;
|
||||
const tokenExpirationMargin = 10 * 60 * 1000; // 10 min
|
||||
const tokenExpirationMargin = 5 * 60 * 1000; // 5 min (Google tokens expire after 1h)
|
||||
|
||||
// const scopeMap = {
|
||||
// profile: [
|
||||
@ -23,6 +23,11 @@ const tokenExpirationMargin = 10 * 60 * 1000; // 10 min
|
||||
// ],
|
||||
// };
|
||||
|
||||
const gdriveAppDataScopes = ['https://www.googleapis.com/auth/drive.appdata'];
|
||||
const getGdriveScopes = () => [store.getters['data/settings'].gdriveFullAccess === true
|
||||
? 'https://www.googleapis.com/auth/drive'
|
||||
: 'https://www.googleapis.com/auth/drive.file'];
|
||||
|
||||
const request = (token, options) => utils.request({
|
||||
...options,
|
||||
headers: {
|
||||
@ -31,17 +36,76 @@ const request = (token, options) => utils.request({
|
||||
},
|
||||
});
|
||||
|
||||
function saveFile(refreshedToken, name, parents, media = null, fileId = null,
|
||||
ifNotTooLate = cb => res => cb(res),
|
||||
) {
|
||||
return Promise.resolve()
|
||||
// Refreshing a token can take a while if an oauth window pops up, so check if it's too late
|
||||
.then(ifNotTooLate(() => {
|
||||
const options = {
|
||||
method: 'POST',
|
||||
url: 'https://www.googleapis.com/drive/v3/files',
|
||||
};
|
||||
const metadata = { name };
|
||||
if (fileId) {
|
||||
options.method = 'PATCH';
|
||||
options.url = `https://www.googleapis.com/drive/v3/files/${fileId}`;
|
||||
} else if (parents) {
|
||||
// Parents field is not patchable
|
||||
metadata.parents = parents;
|
||||
}
|
||||
if (media) {
|
||||
const boundary = `-------${utils.uid()}`;
|
||||
const delimiter = `\r\n--${boundary}\r\n`;
|
||||
const closeDelimiter = `\r\n--${boundary}--`;
|
||||
let multipartRequestBody = '';
|
||||
multipartRequestBody += delimiter;
|
||||
multipartRequestBody += 'Content-Type: application/json; charset=UTF-8\r\n\r\n';
|
||||
multipartRequestBody += JSON.stringify(metadata);
|
||||
multipartRequestBody += delimiter;
|
||||
multipartRequestBody += 'Content-Type: text/plain; charset=UTF-8\r\n\r\n';
|
||||
multipartRequestBody += media;
|
||||
multipartRequestBody += closeDelimiter;
|
||||
options.url = options.url.replace(
|
||||
'https://www.googleapis.com/',
|
||||
'https://www.googleapis.com/upload/');
|
||||
return request(refreshedToken, {
|
||||
...options,
|
||||
params: {
|
||||
uploadType: 'multipart',
|
||||
},
|
||||
headers: {
|
||||
'Content-Type': `multipart/mixed; boundary="${boundary}"`,
|
||||
},
|
||||
body: multipartRequestBody,
|
||||
}).then(res => res.body);
|
||||
}
|
||||
return request(refreshedToken, {
|
||||
...options,
|
||||
body: metadata,
|
||||
}).then(res => res.body);
|
||||
}));
|
||||
}
|
||||
|
||||
function downloadFile(refreshedToken, id) {
|
||||
return request(refreshedToken, {
|
||||
method: 'GET',
|
||||
url: `https://www.googleapis.com/drive/v3/files/${id}?alt=media`,
|
||||
raw: true,
|
||||
}).then(res => res.body);
|
||||
}
|
||||
|
||||
export default {
|
||||
startOauth2(scopes, sub = null, silent = false) {
|
||||
return utils.startOauth2(
|
||||
'https://accounts.google.com/o/oauth2/v2/auth', {
|
||||
client_id: clientId,
|
||||
response_type: 'token',
|
||||
scope: scopes.join(' '),
|
||||
hd: appsDomain,
|
||||
login_hint: sub,
|
||||
prompt: silent ? 'none' : null,
|
||||
}, silent)
|
||||
'https://accounts.google.com/o/oauth2/v2/auth', {
|
||||
client_id: clientId,
|
||||
response_type: 'token',
|
||||
scope: scopes.join(' '),
|
||||
hd: appsDomain,
|
||||
login_hint: sub,
|
||||
prompt: silent ? 'none' : null,
|
||||
}, silent)
|
||||
// Call the tokeninfo endpoint
|
||||
.then(data => utils.request({
|
||||
method: 'POST',
|
||||
@ -116,8 +180,10 @@ export default {
|
||||
});
|
||||
},
|
||||
getChanges(token) {
|
||||
let changes = [];
|
||||
return this.refreshToken(['https://www.googleapis.com/auth/drive.appdata'], token)
|
||||
const result = {
|
||||
changes: [],
|
||||
};
|
||||
return this.refreshToken(gdriveAppDataScopes, token)
|
||||
.then((refreshedToken) => {
|
||||
const getPage = (pageToken = '1') => request(refreshedToken, {
|
||||
method: 'GET',
|
||||
@ -129,122 +195,39 @@ export default {
|
||||
fields: 'nextPageToken,newStartPageToken,changes(fileId,removed,file/name,file/properties)',
|
||||
},
|
||||
}).then((res) => {
|
||||
changes = changes.concat(res.body.changes.filter(item => item.fileId));
|
||||
result.changes = result.changes.concat(res.body.changes.filter(item => item.fileId));
|
||||
if (res.body.nextPageToken) {
|
||||
return getPage(res.body.nextPageToken);
|
||||
}
|
||||
changes.forEach((change) => {
|
||||
if (change.file) {
|
||||
change.item = {
|
||||
name: change.file.name,
|
||||
};
|
||||
if (change.file.properties) {
|
||||
Object.keys(change.file.properties).forEach((key) => {
|
||||
change.item[key] = JSON.parse(change.file.properties[key]);
|
||||
});
|
||||
}
|
||||
change.syncData = {
|
||||
id: change.fileId,
|
||||
itemId: change.item.id,
|
||||
updated: change.item.updated,
|
||||
};
|
||||
change.file = undefined;
|
||||
}
|
||||
});
|
||||
changes.nextPageToken = res.body.newStartPageToken;
|
||||
return changes;
|
||||
result.nextPageToken = res.body.newStartPageToken;
|
||||
return result;
|
||||
});
|
||||
|
||||
return getPage(refreshedToken.nextPageToken);
|
||||
});
|
||||
},
|
||||
updateNextPageToken(token, changes) {
|
||||
const lastToken = store.getters['data/googleTokens'][token.sub];
|
||||
if (changes.nextPageToken !== lastToken.nextPageToken) {
|
||||
store.dispatch('data/setGoogleToken', {
|
||||
...lastToken,
|
||||
nextPageToken: changes.nextPageToken,
|
||||
});
|
||||
}
|
||||
saveFile(token, name, parents, media, fileId, ifNotTooLate) {
|
||||
return this.refreshToken(getGdriveScopes(), token)
|
||||
.then(refreshedToken => saveFile(refreshedToken, name, parents, media, fileId, ifNotTooLate));
|
||||
},
|
||||
saveItem(token, item, syncData, ifNotTooLate = cb => res => cb(res)) {
|
||||
return this.refreshToken(['https://www.googleapis.com/auth/drive.appdata'], token)
|
||||
// Refreshing a token can take a while if an oauth window pops up, so check if it's too late
|
||||
.then(ifNotTooLate((refreshedToken) => {
|
||||
const options = {
|
||||
method: 'POST',
|
||||
url: 'https://www.googleapis.com/drive/v3/files',
|
||||
};
|
||||
const metadata = {
|
||||
name: item.name,
|
||||
properties: {},
|
||||
};
|
||||
if (syncData) {
|
||||
options.method = 'PATCH';
|
||||
options.url = `https://www.googleapis.com/drive/v3/files/${syncData.id}`;
|
||||
} else {
|
||||
// Parents field is not patchable
|
||||
metadata.parents = ['appDataFolder'];
|
||||
}
|
||||
Object.keys(item).forEach((key) => {
|
||||
if (key !== 'name' && key !== 'tx') {
|
||||
metadata.properties[key] = JSON.stringify(item[key]);
|
||||
}
|
||||
});
|
||||
const media = null;
|
||||
const boundary = `-------${utils.uid()}`;
|
||||
const delimiter = `\r\n--${boundary}\r\n`;
|
||||
const closeDelimiter = `\r\n--${boundary}--`;
|
||||
if (media) {
|
||||
let multipartRequestBody = '';
|
||||
multipartRequestBody += delimiter;
|
||||
multipartRequestBody += 'Content-Type: application/json\r\n\r\n';
|
||||
multipartRequestBody += JSON.stringify(metadata);
|
||||
multipartRequestBody += delimiter;
|
||||
multipartRequestBody += 'Content-Type: application/json\r\n\r\n';
|
||||
multipartRequestBody += JSON.stringify(media);
|
||||
multipartRequestBody += closeDelimiter;
|
||||
options.url = options.url.replace(
|
||||
'https://www.googleapis.com/',
|
||||
'https://www.googleapis.com/upload/');
|
||||
return request(refreshedToken, {
|
||||
...options,
|
||||
params: {
|
||||
uploadType: 'multipart',
|
||||
},
|
||||
headers: {
|
||||
'Content-Type': `multipart/mixed; boundary="${boundary}"`,
|
||||
},
|
||||
body: multipartRequestBody,
|
||||
});
|
||||
}
|
||||
return request(refreshedToken, {
|
||||
...options,
|
||||
body: metadata,
|
||||
}).then(res => ({
|
||||
// Build sync data
|
||||
id: res.body.id,
|
||||
itemId: item.id,
|
||||
updated: item.updated,
|
||||
}));
|
||||
}));
|
||||
saveAppDataFile(token, name, parents, media, fileId, ifNotTooLate) {
|
||||
return this.refreshToken(gdriveAppDataScopes, token)
|
||||
.then(refreshedToken => saveFile(refreshedToken, name, parents, media, fileId, ifNotTooLate));
|
||||
},
|
||||
removeItem(token, syncData, ifNotTooLate = cb => res => cb(res)) {
|
||||
return this.refreshToken(['https://www.googleapis.com/auth/drive.appdata'], token)
|
||||
downloadFile(token, id) {
|
||||
return this.refreshToken(getGdriveScopes(), token)
|
||||
.then(refreshedToken => downloadFile(refreshedToken, id));
|
||||
},
|
||||
downloadAppDataFile(token, id) {
|
||||
return this.refreshToken(gdriveAppDataScopes, token)
|
||||
.then(refreshedToken => downloadFile(refreshedToken, id));
|
||||
},
|
||||
removeAppDataFile(token, id, ifNotTooLate = cb => res => cb(res)) {
|
||||
return this.refreshToken(gdriveAppDataScopes, token)
|
||||
// Refreshing a token can take a while if an oauth window pops up, so check if it's too late
|
||||
.then(ifNotTooLate(refreshedToken => request(refreshedToken, {
|
||||
method: 'DELETE',
|
||||
url: `https://www.googleapis.com/drive/v3/files/${syncData.id}`,
|
||||
})).then(() => syncData));
|
||||
},
|
||||
downloadFile(refreshedToken, id) {
|
||||
return request(refreshedToken, {
|
||||
method: 'GET',
|
||||
url: `https://www.googleapis.com/drive/v3/files/${id}?alt=media`,
|
||||
}).then(res => res.body);
|
||||
},
|
||||
downloadAppDataFile(token, id) {
|
||||
return this.refreshToken(['https://www.googleapis.com/auth/drive.appdata'], token)
|
||||
.then(refreshedToken => this.downloadFile(refreshedToken, id));
|
||||
url: `https://www.googleapis.com/drive/v3/files/${id}`,
|
||||
})));
|
||||
},
|
||||
};
|
48
src/services/providers/providerUtils.js
Normal file
48
src/services/providers/providerUtils.js
Normal file
@ -0,0 +1,48 @@
|
||||
import emptyContent from '../../data/emptyContent';
|
||||
|
||||
const dataExtractor = /<!--stackedit_data:([A-Za-z0-9+/=\s]+)-->$/;
|
||||
|
||||
// https://developer.mozilla.org/en/docs/Web/API/WindowBase64/Base64_encoding_and_decoding
|
||||
const b64Encode = str => btoa(encodeURIComponent(str).replace(/%([0-9A-F]{2})/g,
|
||||
(match, p1) => String.fromCharCode(`0x${p1}`)));
|
||||
const b64Decode = str => decodeURIComponent(atob(str).split('').map(
|
||||
c => `%${`00${c.charCodeAt(0).toString(16)}`.slice(-2)}`).join(''));
|
||||
|
||||
export default {
|
||||
serializeContent(content) {
|
||||
let result = content.text;
|
||||
const data = {};
|
||||
if (content.properties.length > 1) {
|
||||
data.properties = content.properties;
|
||||
}
|
||||
if (Object.keys(content.discussions).length) {
|
||||
data.discussions = content.discussions;
|
||||
}
|
||||
if (Object.keys(content.comments).length) {
|
||||
data.comments = content.comments;
|
||||
}
|
||||
if (content.history && content.history.length) {
|
||||
data.history = content.history;
|
||||
}
|
||||
if (Object.keys(data).length) {
|
||||
const serializedData = b64Encode(JSON.stringify(data)).replace(/(.{50})/g, '$1\n');
|
||||
result += `<!--stackedit_data:\n${serializedData}-->`;
|
||||
}
|
||||
return result;
|
||||
},
|
||||
parseContent(serializedContent) {
|
||||
const result = emptyContent();
|
||||
result.text = serializedContent;
|
||||
const extractedData = dataExtractor.exec(serializedContent);
|
||||
if (extractedData) {
|
||||
try {
|
||||
const serializedData = extractedData[1].replace(/\s/g, '');
|
||||
Object.assign(result, JSON.parse(b64Decode(serializedData)));
|
||||
result.text = serializedContent.slice(0, extractedData.index);
|
||||
} catch (e) {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
return result;
|
||||
},
|
||||
};
|
@ -2,18 +2,22 @@ import localDbSvc from './localDbSvc';
|
||||
import store from '../store';
|
||||
import welcomeFile from '../data/welcomeFile.md';
|
||||
import utils from './utils';
|
||||
import diffUtils from './diffUtils';
|
||||
import userActivitySvc from './userActivitySvc';
|
||||
import gdriveAppDataProvider from './providers/gdriveAppDataProvider';
|
||||
import googleHelper from './helpers/googleHelper';
|
||||
|
||||
const lastSyncActivityKey = 'lastSyncActivity';
|
||||
let lastSyncActivity;
|
||||
const getStoredLastSyncActivity = () => parseInt(localStorage[lastSyncActivityKey], 10) || 0;
|
||||
const inactivityThreshold = 3 * 1000; // 3 sec
|
||||
const restartSyncAfter = 30 * 1000; // 30 sec
|
||||
const autoSyncAfter = utils.randomize(restartSyncAfter);
|
||||
const isSyncAvailable = () => window.navigator.onLine !== false &&
|
||||
!!store.getters['data/loginToken'];
|
||||
const autoSyncAfter = utils.randomize(60 * 1000); // 60 sec
|
||||
|
||||
const isDataSyncPossible = () => !!store.getters['data/loginToken'];
|
||||
const hasCurrentFileSyncLocations = () => !!store.getters['syncLocation/current'].length;
|
||||
|
||||
const isSyncPossible = () => !store.state.offline &&
|
||||
(isDataSyncPossible() || hasCurrentFileSyncLocations());
|
||||
|
||||
function isSyncWindow() {
|
||||
const storedLastSyncActivity = getStoredLastSyncActivity();
|
||||
@ -48,13 +52,31 @@ function getSyncToken(syncLocation) {
|
||||
}
|
||||
}
|
||||
|
||||
function cleanSyncedContent(syncedContent) {
|
||||
// Clean syncHistory from removed syncLocations
|
||||
Object.keys(syncedContent.syncHistory).forEach((syncLocationId) => {
|
||||
if (syncLocationId !== 'main' && !store.state.syncLocation.itemMap[syncLocationId]) {
|
||||
delete syncedContent.syncHistory[syncLocationId];
|
||||
}
|
||||
});
|
||||
const allSyncLocationHashes = new Set([].concat(
|
||||
...Object.keys(syncedContent.syncHistory).map(
|
||||
id => syncedContent.syncHistory[id])));
|
||||
// Clean historyData from unused contents
|
||||
Object.keys(syncedContent.historyData).map(hash => parseInt(hash, 10)).forEach((hash) => {
|
||||
if (!allSyncLocationHashes.has(hash)) {
|
||||
delete syncedContent.historyData[hash];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const loader = type => fileId => localDbSvc.loadItem(`${fileId}/${type}`)
|
||||
// Item does not exist, create it
|
||||
.catch(() => store.commit(`${type}/setItem`, {
|
||||
id: `${fileId}/${type}`,
|
||||
}));
|
||||
const loadContent = loader('content');
|
||||
const loadSyncContent = loader('syncContent');
|
||||
const loadSyncedContent = loader('syncedContent');
|
||||
const loadContentState = loader('contentState');
|
||||
|
||||
function applyChanges(changes) {
|
||||
@ -73,9 +95,9 @@ function applyChanges(changes) {
|
||||
}
|
||||
delete syncData[change.fileId];
|
||||
syncDataChanged = true;
|
||||
} else if (!change.removed && change.item && change.item.updated) {
|
||||
if (!existingSyncData || (existingSyncData.updated !== change.item.updated && (
|
||||
!existingItem || existingItem.updated !== change.item.updated
|
||||
} else if (!change.removed && change.item && change.item.hash) {
|
||||
if (!existingSyncData || (existingSyncData.hash !== change.item.hash && (
|
||||
!existingItem || existingItem.hash !== change.item.hash
|
||||
))) {
|
||||
// Put object in the store
|
||||
if (change.item.type !== 'content') { // Merge contents later
|
||||
@ -93,33 +115,184 @@ function applyChanges(changes) {
|
||||
}
|
||||
}
|
||||
|
||||
const LAST_SENT = 0;
|
||||
const LAST_MERGED = 1;
|
||||
|
||||
function syncFile(fileId) {
|
||||
return loadSyncedContent(fileId)
|
||||
.then(() => loadContent(fileId))
|
||||
.then(() => {
|
||||
const getContent = () => store.state.content.itemMap[`${fileId}/content`];
|
||||
const getSyncedContent = () => store.state.syncedContent.itemMap[`${fileId}/syncedContent`];
|
||||
const getSyncHistoryItem = syncLocationId => getSyncedContent().syncHistory[syncLocationId];
|
||||
const downloadedLocations = {};
|
||||
|
||||
const isLocationSynced = syncLocation =>
|
||||
getSyncHistoryItem(syncLocation.id)[LAST_SENT] === getContent().hash;
|
||||
|
||||
const syncOneContentLocation = () => {
|
||||
const syncLocations = [
|
||||
...store.getters['syncLocation/groupedByFileId'][fileId] || [],
|
||||
];
|
||||
if (isDataSyncPossible()) {
|
||||
syncLocations.push({ id: 'main', provider: 'gdriveAppData', fileId });
|
||||
}
|
||||
let result;
|
||||
syncLocations.some((syncLocation) => {
|
||||
if (!downloadedLocations[syncLocation.id] || !isLocationSynced(syncLocation)) {
|
||||
const provider = getSyncProvider(syncLocation);
|
||||
const token = getSyncToken(syncLocation);
|
||||
result = provider && token && provider.downloadContent(token, syncLocation)
|
||||
.then((serverContent = null) => {
|
||||
downloadedLocations[syncLocation.id] = true;
|
||||
|
||||
const syncedContent = getSyncedContent();
|
||||
const syncHistoryItem = getSyncHistoryItem(syncLocation.id);
|
||||
let mergedContent = (() => {
|
||||
const clientContent = utils.deepCopy(getContent());
|
||||
if (!serverContent) {
|
||||
// Sync location has not been created yet
|
||||
return clientContent;
|
||||
}
|
||||
if (serverContent.hash === clientContent.hash) {
|
||||
// Server and client contents are synced
|
||||
return clientContent;
|
||||
}
|
||||
if (syncedContent.historyData[serverContent.hash]) {
|
||||
// Server content has not changed or has already been merged
|
||||
return clientContent;
|
||||
}
|
||||
// Perform a merge with last merged content if any, or a simple fusion otherwise
|
||||
let lastMergedContent;
|
||||
serverContent.history.some((hash) => {
|
||||
lastMergedContent = syncedContent.historyData[hash];
|
||||
return lastMergedContent;
|
||||
});
|
||||
if (!lastMergedContent && syncHistoryItem) {
|
||||
lastMergedContent = syncedContent.historyData[syncHistoryItem[LAST_MERGED]];
|
||||
}
|
||||
return diffUtils.mergeContent(serverContent, clientContent, lastMergedContent);
|
||||
})();
|
||||
|
||||
// Update content in store
|
||||
store.commit('content/patchItem', {
|
||||
id: `${fileId}/content`,
|
||||
...mergedContent,
|
||||
});
|
||||
|
||||
// Retrieve content with new `hash` value and freeze it
|
||||
mergedContent = utils.deepCopy(getContent());
|
||||
|
||||
// Make merged content history
|
||||
const mergedContentHistory = serverContent ? serverContent.history.slice() : [];
|
||||
let skipUpload = true;
|
||||
if (mergedContentHistory[0] !== mergedContent.hash) {
|
||||
// Put merged content hash at the beginning of history
|
||||
mergedContentHistory.unshift(mergedContent.hash);
|
||||
// Server content is either out of sync or its history is incomplete, do upload
|
||||
skipUpload = false;
|
||||
}
|
||||
if (syncHistoryItem && syncHistoryItem[0] !== mergedContent.hash) {
|
||||
// Clean up by removing the hash we've previously added
|
||||
const idx = mergedContentHistory.indexOf(syncHistoryItem[LAST_SENT]);
|
||||
if (idx !== -1) {
|
||||
mergedContentHistory.splice(idx, 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Store server content if any, and merged content which will be sent if different
|
||||
const newSyncedContent = utils.deepCopy(syncedContent);
|
||||
const newSyncHistoryItem = newSyncedContent.syncHistory[syncLocation.id] || [];
|
||||
newSyncedContent.syncHistory[syncLocation.id] = newSyncHistoryItem;
|
||||
if (serverContent && (serverContent.hash === newSyncHistoryItem[LAST_SENT] ||
|
||||
serverContent.history.indexOf(newSyncHistoryItem[LAST_SENT]) !== -1)
|
||||
) {
|
||||
// The server has accepted the content we previously sent
|
||||
newSyncHistoryItem[LAST_MERGED] = newSyncHistoryItem[LAST_SENT];
|
||||
}
|
||||
newSyncHistoryItem[LAST_SENT] = mergedContent.hash;
|
||||
newSyncedContent.historyData[mergedContent.hash] = mergedContent;
|
||||
|
||||
// Clean synced content from unused revisions
|
||||
cleanSyncedContent(newSyncedContent);
|
||||
// Store synced content
|
||||
store.commit('syncedContent/patchItem', newSyncedContent);
|
||||
|
||||
if (skipUpload) {
|
||||
// Server content and merged content are equal, skip content upload
|
||||
return null;
|
||||
}
|
||||
|
||||
// Prevent from sending new content too long after old content has been fetched
|
||||
const syncStartTime = Date.now();
|
||||
const ifNotTooLate = cb => (res) => {
|
||||
// No time to refresh a token...
|
||||
if (syncStartTime + 500 < Date.now()) {
|
||||
throw new Error('TOO_LATE');
|
||||
}
|
||||
return cb(res);
|
||||
};
|
||||
|
||||
// Upload merged content
|
||||
return provider.uploadContent(token, {
|
||||
...mergedContent,
|
||||
history: mergedContentHistory,
|
||||
}, syncLocation, ifNotTooLate);
|
||||
})
|
||||
.then(() => syncOneContentLocation());
|
||||
}
|
||||
return result;
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
return syncOneContentLocation();
|
||||
})
|
||||
.then(() => localDbSvc.unloadContents(), (err) => {
|
||||
localDbSvc.unloadContents();
|
||||
throw err;
|
||||
})
|
||||
.catch((err) => {
|
||||
if (err && err.message === 'TOO_LATE') {
|
||||
// Restart sync
|
||||
return syncFile(fileId);
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
function sync() {
|
||||
const googleToken = store.getters['data/loginToken'];
|
||||
return googleHelper.getChanges(googleToken)
|
||||
return gdriveAppDataProvider.getChanges(googleToken)
|
||||
.then((changes) => {
|
||||
// Apply changes
|
||||
applyChanges(changes);
|
||||
googleHelper.updateNextPageToken(googleToken, changes);
|
||||
gdriveAppDataProvider.setAppliedChanges(googleToken, changes);
|
||||
|
||||
// Prevent from sending items too long after changes have been retrieved
|
||||
const syncStartTime = Date.now();
|
||||
const ifNotTooLate = cb => (res) => {
|
||||
if (syncStartTime + restartSyncAfter < Date.now()) {
|
||||
throw new Error('too_late');
|
||||
throw new Error('TOO_LATE');
|
||||
}
|
||||
return cb(res);
|
||||
};
|
||||
|
||||
// Called until no item to save
|
||||
const saveNextItem = ifNotTooLate(() => {
|
||||
const storeItemMap = store.getters.syncedItemMap;
|
||||
const storeItemMap = {
|
||||
...store.state.file.itemMap,
|
||||
...store.state.folder.itemMap,
|
||||
...store.state.syncLocation.itemMap,
|
||||
// Deal with contents later
|
||||
};
|
||||
const syncDataByItemId = store.getters['data/syncDataByItemId'];
|
||||
let result;
|
||||
Object.keys(storeItemMap).some((id) => {
|
||||
const item = storeItemMap[id];
|
||||
const existingSyncData = syncDataByItemId[id];
|
||||
if (!existingSyncData || existingSyncData.updated !== item.updated) {
|
||||
result = googleHelper.saveItem(
|
||||
if (!existingSyncData || existingSyncData.hash !== item.hash) {
|
||||
result = gdriveAppDataProvider.saveItem(
|
||||
googleToken,
|
||||
// Use deepCopy to freeze objects
|
||||
utils.deepCopy(item),
|
||||
@ -138,15 +311,23 @@ function sync() {
|
||||
|
||||
// Called until no item to remove
|
||||
const removeNextItem = ifNotTooLate(() => {
|
||||
const storeItemMap = store.getters.syncedItemMap;
|
||||
const storeItemMap = {
|
||||
...store.state.file.itemMap,
|
||||
...store.state.folder.itemMap,
|
||||
...store.state.syncLocation.itemMap,
|
||||
...store.state.content.itemMap, // Deal with contents now
|
||||
};
|
||||
const syncData = store.getters['data/syncData'];
|
||||
let result;
|
||||
Object.keys(syncData).some((id) => {
|
||||
const existingSyncData = syncData[id];
|
||||
if (!storeItemMap[existingSyncData.itemId]) {
|
||||
if (!storeItemMap[existingSyncData.itemId] &&
|
||||
// Remove content only if file has been removed
|
||||
(existingSyncData.type !== 'content' || !storeItemMap[existingSyncData.itemId.split('/')[0]])
|
||||
) {
|
||||
// Use deepCopy to freeze objects
|
||||
const syncDataToRemove = utils.deepCopy(existingSyncData);
|
||||
result = googleHelper.removeItem(googleToken, syncDataToRemove, ifNotTooLate)
|
||||
result = gdriveAppDataProvider.removeItem(googleToken, syncDataToRemove, ifNotTooLate)
|
||||
.then(() => {
|
||||
const syncDataCopy = { ...store.getters['data/syncData'] };
|
||||
delete syncDataCopy[syncDataToRemove.id];
|
||||
@ -159,98 +340,45 @@ function sync() {
|
||||
return result;
|
||||
});
|
||||
|
||||
// Get content `updated` field from itemMap or from localDbSvc if not loaded
|
||||
const getContentUpdated = (contentId) => {
|
||||
const loadedContent = store.state.content.itemMap[contentId];
|
||||
return loadedContent ? loadedContent.updated : localDbSvc.updatedMap.content[contentId];
|
||||
const getOneFileIdToSync = () => {
|
||||
const allContentIds = Object.keys({
|
||||
...store.state.content.itemMap,
|
||||
...store.getters['data/syncDataByType'].content,
|
||||
});
|
||||
let fileId;
|
||||
allContentIds.some((contentId) => {
|
||||
// Get content hash from itemMap or from localDbSvc if not loaded
|
||||
const loadedContent = store.state.content.itemMap[contentId];
|
||||
const hash = loadedContent ? loadedContent.hash : localDbSvc.hashMap.content[contentId];
|
||||
const syncData = store.getters['data/syncDataByItemId'][contentId];
|
||||
// Sync if item hash and syncData hash are different
|
||||
if (!hash || !syncData || hash !== syncData.hash) {
|
||||
[fileId] = contentId.split('/');
|
||||
}
|
||||
return fileId;
|
||||
});
|
||||
return fileId;
|
||||
};
|
||||
|
||||
// Download current file content and contents that have changed
|
||||
const forceContentIds = { [`${store.getters['file/current'].id}/content`]: true };
|
||||
store.getters['file/items'].forEach((file) => {
|
||||
const contentId = `${file.id}/content`;
|
||||
const updated = getContentUpdated(contentId);
|
||||
const existingSyncData = store.getters['data/syncDataByItemId'][contentId];
|
||||
});
|
||||
|
||||
const syncOneContent = fileId => loadSyncContent(fileId)
|
||||
.then(() => loadContent(fileId))
|
||||
.then(() => {
|
||||
const getContent = () => store.state.content.itemMap[`${fileId}/content`];
|
||||
const getSyncContent = () => store.state.content.itemMap[`${fileId}/syncContent`];
|
||||
|
||||
const syncLocations = [
|
||||
{ id: 'main', provider: 'gdriveAppData' },
|
||||
...getContent().syncLocations.filter(syncLocation => getSyncToken(syncLocation),
|
||||
)];
|
||||
const downloadedLocations = {};
|
||||
|
||||
const syncOneContentLocation = () => {
|
||||
let result;
|
||||
syncLocations.some((syncLocation) => {
|
||||
if (!downloadedLocations[syncLocation.id]) {
|
||||
const provider = getSyncProvider(syncLocation);
|
||||
const token = getSyncToken(syncLocation);
|
||||
result = provider && token && provider.downloadContent(fileId, syncLocation)
|
||||
.then((content) => {
|
||||
const syncContent = getSyncContent();
|
||||
const syncLocationData = syncContent.syncLocationData[syncLocation.id] || {
|
||||
history: [],
|
||||
};
|
||||
let lastMergedContent;
|
||||
syncLocationData.history.some((updated) => {
|
||||
if (content.history.indexOf(updated) !== -1) {
|
||||
|
||||
}
|
||||
return lastMergedContent;
|
||||
});
|
||||
})
|
||||
.then(() => syncOneContentLocation());
|
||||
}
|
||||
return result;
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
return syncOneContentLocation();
|
||||
})
|
||||
.then(() => localDbSvc.unloadContents(), (err) => {
|
||||
localDbSvc.unloadContents();
|
||||
throw err;
|
||||
});
|
||||
|
||||
// Called until no content to save
|
||||
const saveNextContent = ifNotTooLate(() => {
|
||||
let saveContentPromise;
|
||||
const getSaveContentPromise = (contentId) => {
|
||||
const updated = getContentUpdated(contentId);
|
||||
const existingSyncData = store.getters['data/syncDataByItemId'][contentId];
|
||||
if (!existingSyncData || existingSyncData.updated !== updated) {
|
||||
saveContentPromise = localDbSvc.loadItem(contentId)
|
||||
.then(content => googleHelper.saveItem(
|
||||
googleToken,
|
||||
// Use deepCopy to freeze objects
|
||||
utils.deepCopy(content),
|
||||
utils.deepCopy(existingSyncData),
|
||||
ifNotTooLate,
|
||||
))
|
||||
.then(resultSyncData => store.dispatch('data/patchSyncData', {
|
||||
[resultSyncData.id]: resultSyncData,
|
||||
}))
|
||||
.then(() => saveNextContent());
|
||||
}
|
||||
return saveContentPromise;
|
||||
};
|
||||
Object.keys(localDbSvc.updatedMap.content)
|
||||
.some(id => getSaveContentPromise(id, syncDataByItemId));
|
||||
return saveContentPromise;
|
||||
});
|
||||
const syncNextFile = () => {
|
||||
const fileId = getOneFileIdToSync();
|
||||
return fileId && syncFile(fileId)
|
||||
.then(() => syncNextFile());
|
||||
};
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => saveNextItem())
|
||||
.then(() => removeNextItem())
|
||||
.then(() => {
|
||||
if (store.getters['content/current'].id) {
|
||||
// Sync current file first
|
||||
return syncFile(store.getters['file/current'].id)
|
||||
.then(() => syncNextFile());
|
||||
}
|
||||
return syncNextFile();
|
||||
})
|
||||
.catch((err) => {
|
||||
if (err && err.message === 'too_late') {
|
||||
if (err && err.message === 'TOO_LATE') {
|
||||
// Restart sync
|
||||
return sync();
|
||||
}
|
||||
@ -266,19 +394,30 @@ function requestSync() {
|
||||
// Only start syncing when these conditions are met
|
||||
if (userActivitySvc.isActive() && isSyncWindow()) {
|
||||
clearInterval(intervalId);
|
||||
if (!isSyncAvailable()) {
|
||||
if (!isSyncPossible()) {
|
||||
// Cancel sync
|
||||
reject();
|
||||
} else {
|
||||
// Call setLastSyncActivity periodically
|
||||
intervalId = utils.setInterval(() => setLastSyncActivity(), 1000);
|
||||
setLastSyncActivity();
|
||||
const cleaner = cb => (res) => {
|
||||
clearInterval(intervalId);
|
||||
cb(res);
|
||||
};
|
||||
sync().then(cleaner(resolve), cleaner(reject));
|
||||
return;
|
||||
}
|
||||
|
||||
// Call setLastSyncActivity periodically
|
||||
intervalId = utils.setInterval(() => setLastSyncActivity(), 1000);
|
||||
setLastSyncActivity();
|
||||
const cleaner = cb => (res) => {
|
||||
clearInterval(intervalId);
|
||||
cb(res);
|
||||
};
|
||||
Promise.resolve()
|
||||
.then(() => {
|
||||
if (isDataSyncPossible()) {
|
||||
return sync();
|
||||
}
|
||||
if (hasCurrentFileSyncLocations()) {
|
||||
return syncFile(store.getters['file/current'].id);
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.then(cleaner(resolve), cleaner(reject));
|
||||
}
|
||||
};
|
||||
intervalId = utils.setInterval(() => attempt(), 1000);
|
||||
@ -288,7 +427,7 @@ function requestSync() {
|
||||
|
||||
// Sync periodically
|
||||
utils.setInterval(() => {
|
||||
if (isSyncAvailable() &&
|
||||
if (isSyncPossible() &&
|
||||
userActivitySvc.isActive() &&
|
||||
isSyncWindow() &&
|
||||
isAutoSyncReady()
|
||||
@ -337,8 +476,8 @@ localDbSvc.sync()
|
||||
return Promise.resolve()
|
||||
// Load contentState from DB
|
||||
.then(() => loadContentState(currentFile.id))
|
||||
// Load syncContent from DB
|
||||
.then(() => loadSyncContent(currentFile.id))
|
||||
// Load syncedContent from DB
|
||||
.then(() => loadSyncedContent(currentFile.id))
|
||||
// Load content from DB
|
||||
.then(() => localDbSvc.loadItem(`${currentFile.id}/content`));
|
||||
}),
|
||||
@ -358,6 +497,6 @@ utils.setInterval(() => {
|
||||
}, 5000);
|
||||
|
||||
export default {
|
||||
isSyncAvailable,
|
||||
isSyncPossible,
|
||||
requestSync,
|
||||
};
|
||||
|
@ -14,14 +14,36 @@ const scriptLoadingPromises = Object.create(null);
|
||||
const origin = `${location.protocol}//${location.host}`;
|
||||
|
||||
export default {
|
||||
types: ['contentState', 'syncContent', 'content', 'file', 'folder', 'data'],
|
||||
types: ['contentState', 'syncedContent', 'content', 'file', 'folder', 'syncLocation', 'data'],
|
||||
deepCopy(obj) {
|
||||
return obj === undefined ? obj : JSON.parse(JSON.stringify(obj));
|
||||
return obj == null ? obj : JSON.parse(JSON.stringify(obj));
|
||||
},
|
||||
serializeObject(obj) {
|
||||
return obj === undefined ? obj : JSON.stringify(obj, (key, value) => {
|
||||
if (Object.prototype.toString.call(value) !== '[object Object]') {
|
||||
return value;
|
||||
}
|
||||
// Sort keys to have a predictable result
|
||||
return Object.keys(value).sort().reduce((sorted, valueKey) => {
|
||||
sorted[valueKey] = value[valueKey];
|
||||
return sorted;
|
||||
}, {});
|
||||
});
|
||||
},
|
||||
uid() {
|
||||
crypto.getRandomValues(array);
|
||||
return array.cl_map(value => alphabet[value % radix]).join('');
|
||||
},
|
||||
hash(str) {
|
||||
let hash = 0;
|
||||
if (!str) return hash;
|
||||
for (let i = 0; i < str.length; i += 1) {
|
||||
const char = str.charCodeAt(i);
|
||||
hash = ((hash << 5) - hash) + char; // eslint-disable-line no-bitwise
|
||||
hash |= 0; // eslint-disable-line no-bitwise
|
||||
}
|
||||
return hash;
|
||||
},
|
||||
randomize(value) {
|
||||
return Math.floor((1 + (Math.random() * 0.2)) * value);
|
||||
},
|
||||
@ -73,6 +95,7 @@ export default {
|
||||
oauth2Context.iframeElt.onload = () => {
|
||||
oauth2Context.closeTimeout = setTimeout(() => oauth2Context.clean(), 5 * 1000);
|
||||
};
|
||||
oauth2Context.iframeElt.onerror = () => oauth2Context.clean();
|
||||
oauth2Context.iframeElt.src = authorizeUrl;
|
||||
document.body.appendChild(oauth2Context.iframeElt);
|
||||
oauth2Context.wnd = oauth2Context.iframeElt.contentWindow;
|
||||
@ -96,7 +119,9 @@ export default {
|
||||
}
|
||||
clearTimeout(oauth2Context.closeTimeout);
|
||||
window.removeEventListener('message', oauth2Context.msgHandler);
|
||||
oauth2Context.clean = () => { }; // Prevent from cleaning several times
|
||||
oauth2Context.clean = () => {
|
||||
// Prevent from cleaning several times
|
||||
};
|
||||
if (errorMsg) {
|
||||
reject(new Error(errorMsg));
|
||||
}
|
||||
@ -130,9 +155,11 @@ export default {
|
||||
let retryAfter = 500; // 500 ms
|
||||
const maxRetryAfter = 30 * 1000; // 30 sec
|
||||
const config = Object.assign({}, configParam);
|
||||
config.headers = Object.assign({
|
||||
'Content-Type': 'application/json',
|
||||
}, config.headers);
|
||||
config.headers = Object.assign({}, config.headers);
|
||||
if (config.body && typeof config.body === 'object') {
|
||||
config.body = JSON.stringify(config.body);
|
||||
config.headers['Content-Type'] = 'application/json';
|
||||
}
|
||||
|
||||
function parseHeaders(xhr) {
|
||||
const pairs = xhr.getAllResponseHeaders().trim().split('\n');
|
||||
@ -174,10 +201,12 @@ export default {
|
||||
headers: parseHeaders(xhr),
|
||||
body: xhr.responseText,
|
||||
};
|
||||
try {
|
||||
result.body = JSON.parse(result.body);
|
||||
} catch (e) {
|
||||
// ignore
|
||||
if (!config.raw) {
|
||||
try {
|
||||
result.body = JSON.parse(result.body);
|
||||
} catch (e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
if (result.status >= 200 && result.status < 300) {
|
||||
resolve(result);
|
||||
@ -202,7 +231,7 @@ export default {
|
||||
Object.keys(config.headers).forEach((key) => {
|
||||
xhr.setRequestHeader(key, config.headers[key]);
|
||||
});
|
||||
xhr.send(config.body ? JSON.stringify(config.body) : null);
|
||||
xhr.send(config.body || null);
|
||||
})
|
||||
.catch((err) => {
|
||||
// Try again later in case of retriable error
|
||||
|
@ -3,10 +3,11 @@ import Vue from 'vue';
|
||||
import Vuex from 'vuex';
|
||||
import utils from '../services/utils';
|
||||
import contentState from './modules/contentState';
|
||||
import syncContent from './modules/syncContent';
|
||||
import syncedContent from './modules/syncedContent';
|
||||
import content from './modules/content';
|
||||
import file from './modules/file';
|
||||
import folder from './modules/folder';
|
||||
import syncLocation from './modules/syncLocation';
|
||||
import data from './modules/data';
|
||||
import layout from './modules/layout';
|
||||
import editor from './modules/editor';
|
||||
@ -18,9 +19,10 @@ Vue.use(Vuex);
|
||||
|
||||
const debug = process.env.NODE_ENV !== 'production';
|
||||
|
||||
export default new Vuex.Store({
|
||||
const store = new Vuex.Store({
|
||||
state: {
|
||||
ready: false,
|
||||
offline: false,
|
||||
},
|
||||
getters: {
|
||||
allItemMap: (state) => {
|
||||
@ -28,23 +30,22 @@ export default new Vuex.Store({
|
||||
utils.types.forEach(type => Object.assign(result, state[type].itemMap));
|
||||
return result;
|
||||
},
|
||||
syncedItemMap: (state) => {
|
||||
const result = {};
|
||||
['file', 'folder'].forEach(type => Object.assign(result, state[type].itemMap));
|
||||
return result;
|
||||
},
|
||||
},
|
||||
mutations: {
|
||||
setReady: (state) => {
|
||||
state.ready = true;
|
||||
},
|
||||
setOffline: (state, value) => {
|
||||
state.offline = value;
|
||||
},
|
||||
},
|
||||
modules: {
|
||||
contentState,
|
||||
syncContent,
|
||||
syncedContent,
|
||||
content,
|
||||
file,
|
||||
folder,
|
||||
syncLocation,
|
||||
data,
|
||||
layout,
|
||||
editor,
|
||||
@ -55,3 +56,15 @@ export default new Vuex.Store({
|
||||
strict: debug,
|
||||
plugins: debug ? [createLogger()] : [],
|
||||
});
|
||||
|
||||
function checkOffline() {
|
||||
const isOffline = window.navigator.onLine === false;
|
||||
if (isOffline !== store.state.offline) {
|
||||
store.commit('setOffline', isOffline);
|
||||
}
|
||||
}
|
||||
utils.setInterval(checkOffline, 1000);
|
||||
window.addEventListener('online', checkOffline);
|
||||
window.addEventListener('offline', checkOffline);
|
||||
|
||||
export default store;
|
||||
|
@ -1,7 +1,7 @@
|
||||
import moduleTemplate from './moduleTemplate';
|
||||
import empty from '../../data/emptyContentState';
|
||||
|
||||
const module = moduleTemplate(empty);
|
||||
const module = moduleTemplate(empty, true);
|
||||
|
||||
module.getters = {
|
||||
...module.getters,
|
||||
|
@ -1,7 +1,8 @@
|
||||
import moduleTemplate from './moduleTemplate';
|
||||
import utils from '../../services/utils';
|
||||
import defaultLocalSettings from '../../data/defaultLocalSettings';
|
||||
|
||||
const itemTemplate = (id, data = {}) => ({ id, type: 'data', data, updated: 0 });
|
||||
const itemTemplate = (id, data = {}) => ({ id, type: 'data', data, hash: 0 });
|
||||
|
||||
const empty = (id) => {
|
||||
switch (id) {
|
||||
@ -11,7 +12,7 @@ const empty = (id) => {
|
||||
return itemTemplate(id);
|
||||
}
|
||||
};
|
||||
const module = moduleTemplate(empty);
|
||||
const module = moduleTemplate(empty, true);
|
||||
|
||||
const getter = id => state => (state.itemMap[id] || empty(id)).data;
|
||||
const setter = id => ({ commit }, data) => commit('setItem', itemTemplate(id, data));
|
||||
@ -73,6 +74,20 @@ module.getters.syncDataByItemId = (state, getters) => {
|
||||
});
|
||||
return result;
|
||||
};
|
||||
module.getters.syncDataByType = (state, getters) => {
|
||||
const result = {};
|
||||
utils.types.forEach((type) => {
|
||||
result[type] = {};
|
||||
});
|
||||
const syncData = getters.syncData;
|
||||
Object.keys(syncData).forEach((id) => {
|
||||
const item = syncData[id];
|
||||
if (result[item.type]) {
|
||||
result[item.type][item.itemId] = item;
|
||||
}
|
||||
});
|
||||
return result;
|
||||
};
|
||||
module.actions.patchSyncData = patcher('syncData');
|
||||
module.actions.setSyncData = setter('syncData');
|
||||
|
||||
|
@ -1,10 +1,17 @@
|
||||
import Vue from 'vue';
|
||||
import utils from '../../services/utils';
|
||||
|
||||
export default (empty, simpleHash = false) => {
|
||||
// Use Date.now as a simple hash function, which is ok for not-synced types
|
||||
const hashFunc = simpleHash ? Date.now : item => utils.hash(utils.serializeObject({
|
||||
...item,
|
||||
hash: undefined,
|
||||
}));
|
||||
|
||||
export default (empty) => {
|
||||
function setItem(state, value) {
|
||||
const item = Object.assign(empty(value.id), value);
|
||||
if (!item.updated) {
|
||||
item.updated = Date.now();
|
||||
if (!item.hash) {
|
||||
item.hash = hashFunc(item);
|
||||
}
|
||||
Vue.set(state.itemMap, item.id, item);
|
||||
}
|
||||
@ -13,7 +20,7 @@ export default (empty) => {
|
||||
const item = state.itemMap[patch.id];
|
||||
if (item) {
|
||||
Object.assign(item, patch);
|
||||
item.updated = Date.now(); // Trigger sync
|
||||
item.hash = hashFunc(item);
|
||||
Vue.set(state.itemMap, item.id, item);
|
||||
return true;
|
||||
}
|
||||
|
@ -1,12 +0,0 @@
|
||||
import moduleTemplate from './moduleTemplate';
|
||||
import empty from '../../data/emptySyncContent';
|
||||
|
||||
const module = moduleTemplate(empty);
|
||||
|
||||
module.getters = {
|
||||
...module.getters,
|
||||
current: (state, getters, rootState, rootGetters) =>
|
||||
state.itemMap[`${rootGetters['file/current'].id}/syncContent`] || empty(),
|
||||
};
|
||||
|
||||
export default module;
|
21
src/store/modules/syncLocation.js
Normal file
21
src/store/modules/syncLocation.js
Normal file
@ -0,0 +1,21 @@
|
||||
import moduleTemplate from './moduleTemplate';
|
||||
import empty from '../../data/emptySyncLocation';
|
||||
|
||||
const module = moduleTemplate(empty);
|
||||
|
||||
module.getters = {
|
||||
...module.getters,
|
||||
groupedByFileId: (state, getters) => {
|
||||
const result = {};
|
||||
getters.items.forEach((item) => {
|
||||
const list = result[item.fileId] || [];
|
||||
list.push(item);
|
||||
result[item.fileId] = list;
|
||||
});
|
||||
return result;
|
||||
},
|
||||
current: (state, getters, rootState, rootGetters) =>
|
||||
getters.groupedByFileId[rootGetters['file/current'].id] || [],
|
||||
};
|
||||
|
||||
export default module;
|
12
src/store/modules/syncedContent.js
Normal file
12
src/store/modules/syncedContent.js
Normal file
@ -0,0 +1,12 @@
|
||||
import moduleTemplate from './moduleTemplate';
|
||||
import empty from '../../data/emptySyncedContent';
|
||||
|
||||
const module = moduleTemplate(empty, true);
|
||||
|
||||
module.getters = {
|
||||
...module.getters,
|
||||
current: (state, getters, rootState, rootGetters) =>
|
||||
state.itemMap[`${rootGetters['file/current'].id}/syncedContent`] || empty(),
|
||||
};
|
||||
|
||||
export default module;
|
Loading…
Reference in New Issue
Block a user