Use of async/await
This commit is contained in:
parent
e971082768
commit
597c747b00
@ -92,20 +92,19 @@ export default {
|
||||
return !!this.$store.getters['modal/config'];
|
||||
},
|
||||
},
|
||||
created() {
|
||||
syncSvc.init()
|
||||
.then(() => {
|
||||
networkSvc.init();
|
||||
sponsorSvc.init();
|
||||
async created() {
|
||||
try {
|
||||
await syncSvc.init();
|
||||
await networkSvc.init();
|
||||
await sponsorSvc.init();
|
||||
this.ready = true;
|
||||
tempFileSvc.setReady();
|
||||
})
|
||||
.catch((err) => {
|
||||
} catch (err) {
|
||||
if (err && err.message !== 'reload') {
|
||||
console.error(err); // eslint-disable-line no-console
|
||||
this.$store.dispatch('notification/error', err);
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
@ -97,29 +97,36 @@ export default {
|
||||
}
|
||||
return true;
|
||||
},
|
||||
submitNewChild(cancel) {
|
||||
async submitNewChild(cancel) {
|
||||
const { newChildNode } = this.$store.state.explorer;
|
||||
if (!cancel && !newChildNode.isNil && newChildNode.item.name) {
|
||||
try {
|
||||
if (newChildNode.isFolder) {
|
||||
fileSvc.storeItem(newChildNode.item)
|
||||
.then(item => this.select(item.id), () => { /* cancel */ });
|
||||
const item = await fileSvc.storeItem(newChildNode.item);
|
||||
this.select(item.id);
|
||||
} else {
|
||||
fileSvc.createFile(newChildNode.item)
|
||||
.then(item => this.select(item.id), () => { /* cancel */ });
|
||||
const item = await fileSvc.createFile(newChildNode.item);
|
||||
this.select(item.id);
|
||||
}
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
}
|
||||
this.$store.commit('explorer/setNewItem', null);
|
||||
},
|
||||
submitEdit(cancel) {
|
||||
async submitEdit(cancel) {
|
||||
const { item } = this.$store.getters['explorer/editingNode'];
|
||||
const value = this.editingValue;
|
||||
this.setEditingId(null);
|
||||
if (!cancel && item.id && value) {
|
||||
fileSvc.storeItem({
|
||||
try {
|
||||
await fileSvc.storeItem({
|
||||
...item,
|
||||
name: value,
|
||||
})
|
||||
.catch(() => { /* cancel */ });
|
||||
});
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
}
|
||||
},
|
||||
setDragSourceId(evt) {
|
||||
@ -140,22 +147,17 @@ export default {
|
||||
&& !targetNode.isNil
|
||||
&& sourceNode.item.id !== targetNode.item.id
|
||||
) {
|
||||
const patch = {
|
||||
id: sourceNode.item.id,
|
||||
fileSvc.storeItem({
|
||||
...sourceNode.item,
|
||||
parentId: targetNode.item.id,
|
||||
};
|
||||
if (sourceNode.isFolder) {
|
||||
this.$store.commit('folder/patchItem', patch);
|
||||
} else {
|
||||
this.$store.commit('file/patchItem', patch);
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
onContextMenu(evt) {
|
||||
async onContextMenu(evt) {
|
||||
if (this.select(undefined, false)) {
|
||||
evt.preventDefault();
|
||||
evt.stopPropagation();
|
||||
this.$store.dispatch('contextMenu/open', {
|
||||
const item = await this.$store.dispatch('contextMenu/open', {
|
||||
coordinates: {
|
||||
left: evt.clientX,
|
||||
top: evt.clientY,
|
||||
@ -178,8 +180,8 @@ export default {
|
||||
name: 'Delete',
|
||||
perform: () => explorerSvc.deleteItem(),
|
||||
}],
|
||||
})
|
||||
.then(item => item.perform());
|
||||
});
|
||||
item.perform();
|
||||
}
|
||||
},
|
||||
},
|
||||
|
@ -175,10 +175,6 @@ export default {
|
||||
background-color: rgba(160, 160, 160, 0.5);
|
||||
overflow: auto;
|
||||
|
||||
hr {
|
||||
margin: 0.5em 0;
|
||||
}
|
||||
|
||||
p {
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
@ -192,7 +192,7 @@ export default {
|
||||
editorSvc.pagedownEditor.uiManager.doClick(name);
|
||||
}
|
||||
},
|
||||
editTitle(toggle) {
|
||||
async editTitle(toggle) {
|
||||
this.titleFocus = toggle;
|
||||
if (toggle) {
|
||||
this.titleInputElt.setSelectionRange(0, this.titleInputElt.value.length);
|
||||
@ -200,11 +200,14 @@ export default {
|
||||
const title = this.title.trim();
|
||||
this.title = this.$store.getters['file/current'].name;
|
||||
if (title) {
|
||||
fileSvc.storeItem({
|
||||
try {
|
||||
await fileSvc.storeItem({
|
||||
...this.$store.getters['file/current'],
|
||||
name: title,
|
||||
})
|
||||
.catch(() => { /* Cancel */ });
|
||||
});
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -47,12 +47,13 @@ export default {
|
||||
...mapMutations('discussion', [
|
||||
'setIsCommenting',
|
||||
]),
|
||||
removeComment() {
|
||||
this.$store.dispatch('modal/commentDeletion')
|
||||
.then(
|
||||
() => this.$store.dispatch('discussion/cleanCurrentFile', { filterComment: this.comment }),
|
||||
() => { /* Cancel */ },
|
||||
);
|
||||
async removeComment() {
|
||||
try {
|
||||
await this.$store.dispatch('modal/commentDeletion');
|
||||
this.$store.dispatch('discussion/cleanCurrentFile', { filterComment: this.comment });
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
},
|
||||
mounted() {
|
||||
|
@ -93,14 +93,15 @@ export default {
|
||||
.start();
|
||||
}
|
||||
},
|
||||
removeDiscussion() {
|
||||
this.$store.dispatch('modal/discussionDeletion')
|
||||
.then(
|
||||
() => this.$store.dispatch('discussion/cleanCurrentFile', {
|
||||
async removeDiscussion() {
|
||||
try {
|
||||
await this.$store.dispatch('modal/discussionDeletion');
|
||||
this.$store.dispatch('discussion/cleanCurrentFile', {
|
||||
filterDiscussion: this.currentDiscussion,
|
||||
}),
|
||||
() => { /* Cancel */ },
|
||||
);
|
||||
});
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
@ -96,12 +96,13 @@ export default {
|
||||
...mapMutations('content', [
|
||||
'setRevisionContent',
|
||||
]),
|
||||
signin() {
|
||||
return googleHelper.signin()
|
||||
.then(
|
||||
() => syncSvc.requestSync(),
|
||||
() => { /* Cancel */ },
|
||||
);
|
||||
async signin() {
|
||||
try {
|
||||
await googleHelper.signin();
|
||||
syncSvc.requestSync();
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
close() {
|
||||
this.$store.dispatch('data/setSideBarPanel', 'menu');
|
||||
@ -117,10 +118,15 @@ export default {
|
||||
const currentFile = this.$store.getters['file/current'];
|
||||
this.$store.dispatch(
|
||||
'queue/enqueue',
|
||||
() => Promise.resolve()
|
||||
.then(() => this.workspaceProvider
|
||||
.getRevisionContent(syncToken, currentFile.id, revision.id))
|
||||
.then(resolve, reject),
|
||||
async () => {
|
||||
try {
|
||||
const content = await this.workspaceProvider
|
||||
.getRevisionContent(syncToken, currentFile.id, revision.id);
|
||||
resolve(content);
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
revisionContentPromises[revision.id] = revisionContentPromise;
|
||||
@ -181,9 +187,15 @@ export default {
|
||||
revisionsPromise = new Promise((resolve, reject) => {
|
||||
this.$store.dispatch(
|
||||
'queue/enqueue',
|
||||
() => Promise.resolve()
|
||||
.then(() => this.workspaceProvider.listRevisions(syncToken, currentFile.id))
|
||||
.then(resolve, reject),
|
||||
async () => {
|
||||
try {
|
||||
const revisions = await this.workspaceProvider
|
||||
.listRevisions(syncToken, currentFile.id);
|
||||
resolve(revisions);
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
},
|
||||
);
|
||||
})
|
||||
.catch(() => {
|
||||
|
@ -104,16 +104,20 @@ export default {
|
||||
...mapActions('data', {
|
||||
setPanel: 'setSideBarPanel',
|
||||
}),
|
||||
signin() {
|
||||
return googleHelper.signin()
|
||||
.then(
|
||||
() => syncSvc.requestSync(),
|
||||
() => { /* Cancel */ },
|
||||
);
|
||||
async signin() {
|
||||
try {
|
||||
await googleHelper.signin();
|
||||
syncSvc.requestSync();
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
fileProperties() {
|
||||
return this.$store.dispatch('modal/open', 'fileProperties')
|
||||
.catch(() => { /* Cancel */ });
|
||||
async fileProperties() {
|
||||
try {
|
||||
await this.$store.dispatch('modal/open', 'fileProperties');
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
print() {
|
||||
window.print();
|
||||
|
@ -78,29 +78,33 @@ export default {
|
||||
document.body.removeChild(iframeElt);
|
||||
}, 60000);
|
||||
},
|
||||
settings() {
|
||||
return this.$store.dispatch('modal/open', 'settings')
|
||||
.then(
|
||||
settings => this.$store.dispatch('data/setSettings', settings),
|
||||
() => { /* Cancel */ },
|
||||
);
|
||||
async settings() {
|
||||
try {
|
||||
const settings = await this.$store.dispatch('modal/open', 'settings');
|
||||
this.$store.dispatch('data/setSettings', settings);
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
templates() {
|
||||
return this.$store.dispatch('modal/open', 'templates')
|
||||
.then(
|
||||
({ templates }) => this.$store.dispatch('data/setTemplates', templates),
|
||||
() => { /* Cancel */ },
|
||||
);
|
||||
async templates() {
|
||||
try {
|
||||
const { templates } = await this.$store.dispatch('modal/open', 'templates');
|
||||
this.$store.dispatch('data/setTemplates', templates);
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
reset() {
|
||||
return this.$store.dispatch('modal/reset')
|
||||
.then(() => {
|
||||
async reset() {
|
||||
try {
|
||||
await this.$store.dispatch('modal/reset');
|
||||
window.location.href = '#reset=true';
|
||||
window.location.reload();
|
||||
});
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
about() {
|
||||
return this.$store.dispatch('modal/open', 'about');
|
||||
this.$store.dispatch('modal/open', 'about');
|
||||
},
|
||||
},
|
||||
};
|
||||
|
@ -118,12 +118,15 @@ const tokensToArray = (tokens, filter = () => true) => Object.keys(tokens)
|
||||
.filter(token => filter(token))
|
||||
.sort((token1, token2) => token1.name.localeCompare(token2.name));
|
||||
|
||||
const openPublishModal = (token, type) => store.dispatch('modal/open', {
|
||||
const publishModalOpener = type => async (token) => {
|
||||
try {
|
||||
const publishLocation = await store.dispatch('modal/open', {
|
||||
type,
|
||||
token,
|
||||
}).then(publishLocation => publishSvc.createPublishLocation(publishLocation));
|
||||
|
||||
const onCancel = () => {};
|
||||
});
|
||||
publishSvc.createPublishLocation(publishLocation);
|
||||
} catch (e) { /* cancel */ }
|
||||
};
|
||||
|
||||
export default {
|
||||
components: {
|
||||
@ -178,74 +181,48 @@ export default {
|
||||
managePublish() {
|
||||
return this.$store.dispatch('modal/open', 'publishManagement');
|
||||
},
|
||||
addGoogleDriveAccount() {
|
||||
return this.$store.dispatch('modal/open', {
|
||||
type: 'googleDriveAccount',
|
||||
onResolve: () => googleHelper.addDriveAccount(!store.getters['data/localSettings'].googleDriveRestrictedAccess),
|
||||
})
|
||||
.catch(onCancel);
|
||||
async addGoogleDriveAccount() {
|
||||
try {
|
||||
await this.$store.dispatch('modal/open', { type: 'googleDriveAccount' });
|
||||
await googleHelper.addDriveAccount(!store.getters['data/localSettings'].googleDriveRestrictedAccess);
|
||||
} catch (e) { /* cancel */ }
|
||||
},
|
||||
addDropboxAccount() {
|
||||
return this.$store.dispatch('modal/open', {
|
||||
type: 'dropboxAccount',
|
||||
onResolve: () => dropboxHelper.addAccount(!store.getters['data/localSettings'].dropboxRestrictedAccess),
|
||||
})
|
||||
.catch(onCancel);
|
||||
async addDropboxAccount() {
|
||||
try {
|
||||
await this.$store.dispatch('modal/open', { type: 'dropboxAccount' });
|
||||
await dropboxHelper.addAccount(!store.getters['data/localSettings'].dropboxRestrictedAccess);
|
||||
} catch (e) { /* cancel */ }
|
||||
},
|
||||
addGithubAccount() {
|
||||
return this.$store.dispatch('modal/open', {
|
||||
type: 'githubAccount',
|
||||
onResolve: () => githubHelper.addAccount(store.getters['data/localSettings'].githubRepoFullAccess),
|
||||
})
|
||||
.catch(onCancel);
|
||||
async addGithubAccount() {
|
||||
try {
|
||||
await this.$store.dispatch('modal/open', { type: 'githubAccount' });
|
||||
await githubHelper.addAccount(store.getters['data/localSettings'].githubRepoFullAccess);
|
||||
} catch (e) { /* cancel */ }
|
||||
},
|
||||
addWordpressAccount() {
|
||||
return wordpressHelper.addAccount()
|
||||
.catch(onCancel);
|
||||
async addWordpressAccount() {
|
||||
try {
|
||||
await wordpressHelper.addAccount();
|
||||
} catch (e) { /* cancel */ }
|
||||
},
|
||||
addBloggerAccount() {
|
||||
return googleHelper.addBloggerAccount()
|
||||
.catch(onCancel);
|
||||
async addBloggerAccount() {
|
||||
try {
|
||||
await googleHelper.addBloggerAccount();
|
||||
} catch (e) { /* cancel */ }
|
||||
},
|
||||
addZendeskAccount() {
|
||||
return this.$store.dispatch('modal/open', {
|
||||
type: 'zendeskAccount',
|
||||
onResolve: ({ subdomain, clientId }) => zendeskHelper.addAccount(subdomain, clientId),
|
||||
})
|
||||
.catch(onCancel);
|
||||
},
|
||||
publishGoogleDrive(token) {
|
||||
return openPublishModal(token, 'googleDrivePublish')
|
||||
.catch(onCancel);
|
||||
},
|
||||
publishDropbox(token) {
|
||||
return openPublishModal(token, 'dropboxPublish')
|
||||
.catch(onCancel);
|
||||
},
|
||||
publishGithub(token) {
|
||||
return openPublishModal(token, 'githubPublish')
|
||||
.catch(onCancel);
|
||||
},
|
||||
publishGist(token) {
|
||||
return openPublishModal(token, 'gistPublish')
|
||||
.catch(onCancel);
|
||||
},
|
||||
publishWordpress(token) {
|
||||
return openPublishModal(token, 'wordpressPublish')
|
||||
.catch(onCancel);
|
||||
},
|
||||
publishBlogger(token) {
|
||||
return openPublishModal(token, 'bloggerPublish')
|
||||
.catch(onCancel);
|
||||
},
|
||||
publishBloggerPage(token) {
|
||||
return openPublishModal(token, 'bloggerPagePublish')
|
||||
.catch(onCancel);
|
||||
},
|
||||
publishZendesk(token) {
|
||||
return openPublishModal(token, 'zendeskPublish')
|
||||
.catch(onCancel);
|
||||
async addZendeskAccount() {
|
||||
try {
|
||||
const { subdomain, clientId } = await this.$store.dispatch('modal/open', { type: 'zendeskAccount' });
|
||||
await zendeskHelper.addAccount(subdomain, clientId);
|
||||
} catch (e) { /* cancel */ }
|
||||
},
|
||||
publishGoogleDrive: publishModalOpener('googleDrivePublish'),
|
||||
publishDropbox: publishModalOpener('dropboxPublish'),
|
||||
publishGithub: publishModalOpener('githubPublish'),
|
||||
publishGist: publishModalOpener('gistPublish'),
|
||||
publishWordpress: publishModalOpener('wordpressPublish'),
|
||||
publishBlogger: publishModalOpener('bloggerPublish'),
|
||||
publishBloggerPage: publishModalOpener('bloggerPagePublish'),
|
||||
publishZendesk: publishModalOpener('zendeskPublish'),
|
||||
},
|
||||
};
|
||||
</script>
|
||||
|
@ -101,8 +101,6 @@ const openSyncModal = (token, type) => store.dispatch('modal/open', {
|
||||
token,
|
||||
}).then(syncLocation => syncSvc.createSyncLocation(syncLocation));
|
||||
|
||||
const onCancel = () => {};
|
||||
|
||||
export default {
|
||||
components: {
|
||||
MenuEntry,
|
||||
@ -147,66 +145,79 @@ export default {
|
||||
manageSync() {
|
||||
return this.$store.dispatch('modal/open', 'syncManagement');
|
||||
},
|
||||
addGoogleDriveAccount() {
|
||||
return this.$store.dispatch('modal/open', {
|
||||
type: 'googleDriveAccount',
|
||||
onResolve: () => googleHelper.addDriveAccount(!store.getters['data/localSettings'].googleDriveRestrictedAccess),
|
||||
})
|
||||
.catch(onCancel);
|
||||
async addGoogleDriveAccount() {
|
||||
try {
|
||||
await this.$store.dispatch('modal/open', { type: 'googleDriveAccount' });
|
||||
await googleHelper.addDriveAccount(!store.getters['data/localSettings'].googleDriveRestrictedAccess);
|
||||
} catch (e) { /* cancel */ }
|
||||
},
|
||||
addDropboxAccount() {
|
||||
return this.$store.dispatch('modal/open', {
|
||||
type: 'dropboxAccount',
|
||||
onResolve: () => dropboxHelper.addAccount(!store.getters['data/localSettings'].dropboxRestrictedAccess),
|
||||
})
|
||||
.catch(onCancel);
|
||||
async addDropboxAccount() {
|
||||
try {
|
||||
await this.$store.dispatch('modal/open', { type: 'dropboxAccount' });
|
||||
await dropboxHelper.addAccount(!store.getters['data/localSettings'].dropboxRestrictedAccess);
|
||||
} catch (e) { /* cancel */ }
|
||||
},
|
||||
addGithubAccount() {
|
||||
return this.$store.dispatch('modal/open', {
|
||||
type: 'githubAccount',
|
||||
onResolve: () => githubHelper.addAccount(store.getters['data/localSettings'].githubRepoFullAccess),
|
||||
})
|
||||
.catch(onCancel);
|
||||
async addGithubAccount() {
|
||||
try {
|
||||
await this.$store.dispatch('modal/open', { type: 'githubAccount' });
|
||||
await githubHelper.addAccount(store.getters['data/localSettings'].githubRepoFullAccess);
|
||||
} catch (e) { /* cancel */ }
|
||||
},
|
||||
openGoogleDrive(token) {
|
||||
return googleHelper.openPicker(token, 'doc')
|
||||
.then(files => this.$store.dispatch(
|
||||
async openGoogleDrive(token) {
|
||||
const files = await googleHelper.openPicker(token, 'doc');
|
||||
this.$store.dispatch(
|
||||
'queue/enqueue',
|
||||
() => googleDriveProvider.openFiles(token, files),
|
||||
));
|
||||
);
|
||||
},
|
||||
openDropbox(token) {
|
||||
return dropboxHelper.openChooser(token)
|
||||
.then(paths => this.$store.dispatch(
|
||||
async openDropbox(token) {
|
||||
const paths = await dropboxHelper.openChooser(token);
|
||||
this.$store.dispatch(
|
||||
'queue/enqueue',
|
||||
() => dropboxProvider.openFiles(token, paths),
|
||||
));
|
||||
);
|
||||
},
|
||||
saveGoogleDrive(token) {
|
||||
return openSyncModal(token, 'googleDriveSave')
|
||||
.catch(onCancel);
|
||||
async saveGoogleDrive(token) {
|
||||
try {
|
||||
await openSyncModal(token, 'googleDriveSave');
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
saveDropbox(token) {
|
||||
return openSyncModal(token, 'dropboxSave')
|
||||
.catch(onCancel);
|
||||
async saveDropbox(token) {
|
||||
try {
|
||||
await openSyncModal(token, 'dropboxSave');
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
openGithub(token) {
|
||||
return store.dispatch('modal/open', {
|
||||
async openGithub(token) {
|
||||
try {
|
||||
const syncLocation = await store.dispatch('modal/open', {
|
||||
type: 'githubOpen',
|
||||
token,
|
||||
})
|
||||
.then(syncLocation => this.$store.dispatch(
|
||||
});
|
||||
this.$store.dispatch(
|
||||
'queue/enqueue',
|
||||
() => githubProvider.openFile(token, syncLocation),
|
||||
));
|
||||
);
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
saveGithub(token) {
|
||||
return openSyncModal(token, 'githubSave')
|
||||
.catch(onCancel);
|
||||
async saveGithub(token) {
|
||||
try {
|
||||
await openSyncModal(token, 'githubSave');
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
saveGist(token) {
|
||||
return openSyncModal(token, 'gistSync')
|
||||
.catch(onCancel);
|
||||
async saveGist(token) {
|
||||
try {
|
||||
await openSyncModal(token, 'gistSync');
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
@ -31,8 +31,6 @@ import { mapGetters } from 'vuex';
|
||||
import MenuEntry from './common/MenuEntry';
|
||||
import googleHelper from '../../services/providers/helpers/googleHelper';
|
||||
|
||||
const onCancel = () => {};
|
||||
|
||||
export default {
|
||||
components: {
|
||||
MenuEntry,
|
||||
@ -46,28 +44,37 @@ export default {
|
||||
]),
|
||||
},
|
||||
methods: {
|
||||
addCouchdbWorkspace() {
|
||||
return this.$store.dispatch('modal/open', {
|
||||
async addCouchdbWorkspace() {
|
||||
try {
|
||||
this.$store.dispatch('modal/open', {
|
||||
type: 'couchdbWorkspace',
|
||||
})
|
||||
.catch(onCancel);
|
||||
});
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
addGithubWorkspace() {
|
||||
return this.$store.dispatch('modal/open', {
|
||||
async addGithubWorkspace() {
|
||||
try {
|
||||
this.$store.dispatch('modal/open', {
|
||||
type: 'githubWorkspace',
|
||||
})
|
||||
.catch(onCancel);
|
||||
});
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
addGoogleDriveWorkspace() {
|
||||
return googleHelper.addDriveAccount(true)
|
||||
.then(token => this.$store.dispatch('modal/open', {
|
||||
async addGoogleDriveWorkspace() {
|
||||
try {
|
||||
const token = await googleHelper.addDriveAccount(true);
|
||||
this.$store.dispatch('modal/open', {
|
||||
type: 'googleDriveWorkspace',
|
||||
token,
|
||||
}))
|
||||
.catch(onCancel);
|
||||
});
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
manageWorkspaces() {
|
||||
return this.$store.dispatch('modal/open', 'workspaceManagement');
|
||||
this.$store.dispatch('modal/open', 'workspaceManagement');
|
||||
},
|
||||
},
|
||||
};
|
||||
|
@ -2,7 +2,7 @@
|
||||
<modal-inner class="modal__inner-1--about-modal" aria-label="About">
|
||||
<div class="modal__content">
|
||||
<div class="logo-background"></div>
|
||||
<small>v{{version}}<br>© 2013-2018 Dock5 Software</small>
|
||||
<small>© 2013-2018 Dock5 Software<br>v{{version}}</small>
|
||||
<hr>
|
||||
StackEdit on <a target="_blank" href="https://github.com/benweet/stackedit/">GitHub</a>
|
||||
<br>
|
||||
@ -59,11 +59,12 @@ export default {
|
||||
|
||||
.logo-background {
|
||||
height: 75px;
|
||||
margin: 0.5rem 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
small {
|
||||
display: block;
|
||||
font-size: 0.75em;
|
||||
}
|
||||
|
||||
hr {
|
||||
|
@ -41,6 +41,9 @@
|
||||
</form-entry>
|
||||
<form-entry label="Status">
|
||||
<input slot="field" class="textfield" type="text" v-model.trim="status" @keydown.enter="resolve()">
|
||||
<div class="form-entry__info">
|
||||
<b>Example:</b> draft
|
||||
</div>
|
||||
</form-entry>
|
||||
<form-entry label="Date" info="YYYY-MM-DD">
|
||||
<input slot="field" class="textfield" type="text" v-model.trim="date" @keydown.enter="resolve()">
|
||||
|
@ -37,12 +37,13 @@ export default modalTemplate({
|
||||
let timeoutId;
|
||||
this.$watch('selectedTemplate', (selectedTemplate) => {
|
||||
clearTimeout(timeoutId);
|
||||
timeoutId = setTimeout(() => {
|
||||
timeoutId = setTimeout(async () => {
|
||||
const currentFile = this.$store.getters['file/current'];
|
||||
exportSvc.applyTemplate(currentFile.id, this.allTemplates[selectedTemplate])
|
||||
.then((html) => {
|
||||
const html = await exportSvc.applyTemplate(
|
||||
currentFile.id,
|
||||
this.allTemplates[selectedTemplate],
|
||||
);
|
||||
this.result = html;
|
||||
});
|
||||
}, 10);
|
||||
}, {
|
||||
immediate: true,
|
||||
|
@ -61,15 +61,17 @@ export default modalTemplate({
|
||||
addGooglePhotosAccount() {
|
||||
return googleHelper.addPhotosAccount();
|
||||
},
|
||||
openGooglePhotos(token) {
|
||||
async openGooglePhotos(token) {
|
||||
const { callback } = this.config;
|
||||
this.config.reject();
|
||||
googleHelper.openPicker(token, 'img')
|
||||
.then(res => res[0] && this.$store.dispatch('modal/open', {
|
||||
const res = await googleHelper.openPicker(token, 'img');
|
||||
if (res[0]) {
|
||||
this.$store.dispatch('modal/open', {
|
||||
type: 'googlePhoto',
|
||||
url: res[0].url,
|
||||
callback,
|
||||
}));
|
||||
});
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
|
@ -38,19 +38,20 @@ export default modalTemplate({
|
||||
selectedFormat: 'pandocExportFormat',
|
||||
},
|
||||
methods: {
|
||||
resolve() {
|
||||
async resolve() {
|
||||
this.config.resolve();
|
||||
const currentFile = this.$store.getters['file/current'];
|
||||
const currentContent = this.$store.getters['content/current'];
|
||||
const { selectedFormat } = this;
|
||||
this.$store.dispatch('queue/enqueue', () => Promise.all([
|
||||
const [sponsorToken, token] = await this.$store.dispatch('queue/enqueue', () => Promise.all([
|
||||
Promise.resolve().then(() => {
|
||||
const sponsorToken = this.$store.getters['workspace/sponsorToken'];
|
||||
return sponsorToken && googleHelper.refreshToken(sponsorToken);
|
||||
const tokenToRefresh = this.$store.getters['workspace/sponsorToken'];
|
||||
return tokenToRefresh && googleHelper.refreshToken(tokenToRefresh);
|
||||
}),
|
||||
sponsorSvc.getToken(),
|
||||
])
|
||||
.then(([sponsorToken, token]) => networkSvc.request({
|
||||
]));
|
||||
try {
|
||||
const { body } = await networkSvc.request({
|
||||
method: 'POST',
|
||||
url: 'pandocExport',
|
||||
params: {
|
||||
@ -63,20 +64,16 @@ export default modalTemplate({
|
||||
body: JSON.stringify(editorSvc.getPandocAst()),
|
||||
blob: true,
|
||||
timeout: 60000,
|
||||
})
|
||||
.then((res) => {
|
||||
FileSaver.saveAs(res.body, `${currentFile.name}.${selectedFormat}`);
|
||||
}, (err) => {
|
||||
if (err.status !== 401) {
|
||||
throw err;
|
||||
}
|
||||
this.$store.dispatch('modal/sponsorOnly')
|
||||
.catch(() => { /* Cancel */ });
|
||||
}))
|
||||
.catch((err) => {
|
||||
});
|
||||
FileSaver.saveAs(body, `${currentFile.name}.${selectedFormat}`);
|
||||
} catch (err) {
|
||||
if (err.status === 401) {
|
||||
this.$store.dispatch('modal/sponsorOnly');
|
||||
} else {
|
||||
console.error(err); // eslint-disable-line no-console
|
||||
this.$store.dispatch('notification/error', err);
|
||||
}));
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
|
@ -33,13 +33,14 @@ export default modalTemplate({
|
||||
selectedTemplate: 'pdfExportTemplate',
|
||||
},
|
||||
methods: {
|
||||
resolve() {
|
||||
async resolve() {
|
||||
this.config.resolve();
|
||||
const currentFile = this.$store.getters['file/current'];
|
||||
this.$store.dispatch('queue/enqueue', () => Promise.all([
|
||||
const [sponsorToken, token, html] = await this.$store
|
||||
.dispatch('queue/enqueue', () => Promise.all([
|
||||
Promise.resolve().then(() => {
|
||||
const sponsorToken = this.$store.getters['workspace/sponsorToken'];
|
||||
return sponsorToken && googleHelper.refreshToken(sponsorToken);
|
||||
const tokenToRefresh = this.$store.getters['workspace/sponsorToken'];
|
||||
return tokenToRefresh && googleHelper.refreshToken(tokenToRefresh);
|
||||
}),
|
||||
sponsorSvc.getToken(),
|
||||
exportSvc.applyTemplate(
|
||||
@ -47,8 +48,9 @@ export default modalTemplate({
|
||||
this.allTemplates[this.selectedTemplate],
|
||||
true,
|
||||
),
|
||||
])
|
||||
.then(([sponsorToken, token, html]) => networkSvc.request({
|
||||
]));
|
||||
try {
|
||||
const { body } = await networkSvc.request({
|
||||
method: 'POST',
|
||||
url: 'pdfExport',
|
||||
params: {
|
||||
@ -59,20 +61,16 @@ export default modalTemplate({
|
||||
body: html,
|
||||
blob: true,
|
||||
timeout: 60000,
|
||||
})
|
||||
.then((res) => {
|
||||
FileSaver.saveAs(res.body, `${currentFile.name}.pdf`);
|
||||
}, (err) => {
|
||||
if (err.status !== 401) {
|
||||
throw err;
|
||||
}
|
||||
this.$store.dispatch('modal/sponsorOnly')
|
||||
.catch(() => { /* Cancel */ });
|
||||
}))
|
||||
.catch((err) => {
|
||||
});
|
||||
FileSaver.saveAs(body, `${currentFile.name}.pdf`);
|
||||
} catch (err) {
|
||||
if (err.status === 401) {
|
||||
this.$store.dispatch('modal/sponsorOnly');
|
||||
} else {
|
||||
console.error(err); // eslint-disable-line no-console
|
||||
this.$store.dispatch('notification/error', err);
|
||||
}));
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
|
@ -75,12 +75,13 @@ export default {
|
||||
}
|
||||
this.editedId = null;
|
||||
},
|
||||
remove(id) {
|
||||
return this.$store.dispatch('modal/removeWorkspace')
|
||||
.then(
|
||||
() => localDbSvc.removeWorkspace(id),
|
||||
() => { /* Cancel */ },
|
||||
);
|
||||
async remove(id) {
|
||||
try {
|
||||
await this.$store.dispatch('modal/removeWorkspace');
|
||||
localDbSvc.removeWorkspace(id);
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
@ -29,20 +29,18 @@ export default {
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
sponsor() {
|
||||
Promise.resolve()
|
||||
.then(() => !this.$store.getters['workspace/sponsorToken'] &&
|
||||
// If user has to sign in
|
||||
this.$store.dispatch('modal/signInForSponsorship', {
|
||||
onResolve: () => googleHelper.signin()
|
||||
.then(() => syncSvc.requestSync()),
|
||||
}))
|
||||
.then(() => {
|
||||
if (!this.$store.getters.isSponsor) {
|
||||
this.$store.dispatch('modal/open', 'sponsor');
|
||||
async sponsor() {
|
||||
try {
|
||||
if (!this.$store.getters['workspace/sponsorToken']) {
|
||||
// User has to sign in
|
||||
await this.$store.dispatch('modal/signInForSponsorship');
|
||||
await googleHelper.signin();
|
||||
syncSvc.requestSync();
|
||||
}
|
||||
})
|
||||
.catch(() => { /* Cancel */ });
|
||||
if (!this.$store.getters.isSponsor) {
|
||||
await this.$store.dispatch('modal/open', 'sponsor');
|
||||
}
|
||||
} catch (e) { /* cancel */ }
|
||||
},
|
||||
},
|
||||
};
|
||||
|
@ -63,17 +63,15 @@ export default (desc) => {
|
||||
return sortedTemplates;
|
||||
};
|
||||
// Make use of `function` to have `this` bound to the component
|
||||
component.methods.configureTemplates = function () { // eslint-disable-line func-names
|
||||
store.dispatch('modal/open', {
|
||||
component.methods.configureTemplates = async function () { // eslint-disable-line func-names
|
||||
const { templates, selectedId } = await store.dispatch('modal/open', {
|
||||
type: 'templates',
|
||||
selectedId: this.selectedTemplate,
|
||||
})
|
||||
.then(({ templates, selectedId }) => {
|
||||
});
|
||||
store.dispatch('data/setTemplates', templates);
|
||||
store.dispatch('data/patchLocalSettings', {
|
||||
[id]: selectedId,
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
});
|
||||
|
@ -18,14 +18,12 @@ OfflinePluginRuntime.install({
|
||||
// Tells to new SW to take control immediately
|
||||
OfflinePluginRuntime.applyUpdate();
|
||||
},
|
||||
onUpdated: () => {
|
||||
onUpdated: async () => {
|
||||
if (!store.state.light) {
|
||||
localDbSvc.sync()
|
||||
.then(() => {
|
||||
await localDbSvc.sync();
|
||||
localStorage.updated = true;
|
||||
// Reload the webpage to load into the new version
|
||||
window.location.reload();
|
||||
});
|
||||
}
|
||||
},
|
||||
});
|
||||
|
@ -49,20 +49,26 @@ export default {
|
||||
}
|
||||
});
|
||||
|
||||
await utils.awaitSequence(Object.keys(folderNameMap), async externalId => fileSvc.storeItem({
|
||||
await utils.awaitSequence(
|
||||
Object.keys(folderNameMap),
|
||||
async externalId => fileSvc.setOrPatchItem({
|
||||
id: folderIdMap[externalId],
|
||||
type: 'folder',
|
||||
name: folderNameMap[externalId],
|
||||
parentId: folderIdMap[parentIdMap[externalId]],
|
||||
}, true));
|
||||
}),
|
||||
);
|
||||
|
||||
await utils.awaitSequence(Object.keys(fileNameMap), async externalId => fileSvc.createFile({
|
||||
await utils.awaitSequence(
|
||||
Object.keys(fileNameMap),
|
||||
async externalId => fileSvc.createFile({
|
||||
name: fileNameMap[externalId],
|
||||
parentId: folderIdMap[parentIdMap[externalId]],
|
||||
text: textMap[externalId],
|
||||
properties: propertiesMap[externalId],
|
||||
discussions: discussionsMap[externalId],
|
||||
comments: commentsMap[externalId],
|
||||
}, true));
|
||||
}, true),
|
||||
);
|
||||
},
|
||||
};
|
||||
|
@ -120,7 +120,7 @@ const editorSvc = Object.assign(new Vue(), editorSvcDiscussions, editorSvcUtils,
|
||||
/**
|
||||
* Refresh the preview with the result of `convert()`
|
||||
*/
|
||||
refreshPreview() {
|
||||
async refreshPreview() {
|
||||
const sectionDescList = [];
|
||||
let sectionPreviewElt;
|
||||
let sectionTocElt;
|
||||
@ -222,10 +222,10 @@ const editorSvc = Object.assign(new Vue(), editorSvcDiscussions, editorSvcUtils,
|
||||
img.onerror = resolve;
|
||||
img.src = imgElt.src;
|
||||
}));
|
||||
await Promise.all(loadedPromises);
|
||||
|
||||
Promise.all(loadedPromises)
|
||||
// Debounce if sections have already been measured
|
||||
.then(() => this.measureSectionDimensions(!!this.previewCtxMeasured));
|
||||
this.measureSectionDimensions(!!this.previewCtxMeasured);
|
||||
},
|
||||
|
||||
/**
|
||||
|
@ -15,32 +15,36 @@ export default {
|
||||
parentId,
|
||||
});
|
||||
},
|
||||
deleteItem() {
|
||||
async deleteItem() {
|
||||
const selectedNode = store.getters['explorer/selectedNode'];
|
||||
if (selectedNode.isNil) {
|
||||
return Promise.resolve();
|
||||
return;
|
||||
}
|
||||
if (selectedNode.isTrash || selectedNode.item.parentId === 'trash') {
|
||||
return store.dispatch('modal/trashDeletion').catch(() => { /* Cancel */ });
|
||||
try {
|
||||
await store.dispatch('modal/trashDeletion');
|
||||
} catch (e) {
|
||||
// Cancel
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// See if we have a dialog to show
|
||||
let modalAction;
|
||||
// See if we have a confirmation dialog to show
|
||||
let moveToTrash = true;
|
||||
try {
|
||||
if (selectedNode.isTemp) {
|
||||
modalAction = 'modal/tempFolderDeletion';
|
||||
await store.dispatch('modal/tempFolderDeletion', selectedNode.item);
|
||||
moveToTrash = false;
|
||||
} else if (selectedNode.item.parentId === 'temp') {
|
||||
modalAction = 'modal/tempFileDeletion';
|
||||
await store.dispatch('modal/tempFileDeletion', selectedNode.item);
|
||||
moveToTrash = false;
|
||||
} else if (selectedNode.isFolder) {
|
||||
modalAction = 'modal/folderDeletion';
|
||||
await store.dispatch('modal/folderDeletion', selectedNode.item);
|
||||
}
|
||||
} catch (e) {
|
||||
return; // cancel
|
||||
}
|
||||
|
||||
return (modalAction
|
||||
? store.dispatch(modalAction, selectedNode.item)
|
||||
: Promise.resolve())
|
||||
.then(() => {
|
||||
const deleteFile = (id) => {
|
||||
if (moveToTrash) {
|
||||
store.commit('file/patchItem', {
|
||||
@ -80,6 +84,5 @@ export default {
|
||||
});
|
||||
}
|
||||
}
|
||||
}, () => { /* Cancel */ });
|
||||
},
|
||||
};
|
||||
|
@ -42,13 +42,12 @@ export default {
|
||||
/**
|
||||
* Apply the template to the file content
|
||||
*/
|
||||
applyTemplate(fileId, template = {
|
||||
async applyTemplate(fileId, template = {
|
||||
value: '{{{files.0.content.text}}}',
|
||||
helpers: '',
|
||||
}, pdf = false) {
|
||||
const file = store.state.file.itemMap[fileId];
|
||||
return localDbSvc.loadItem(`${fileId}/content`)
|
||||
.then((content) => {
|
||||
const content = await localDbSvc.loadItem(`${fileId}/content`);
|
||||
const properties = utils.computeProperties(content.properties);
|
||||
const options = extensionSvc.getOptions(properties);
|
||||
const converter = markdownConversionSvc.createConverter(options, true);
|
||||
@ -109,19 +108,17 @@ export default {
|
||||
});
|
||||
worker.postMessage([template.value, view, template.helpers]);
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Export a file to disk.
|
||||
*/
|
||||
exportToDisk(fileId, type, template) {
|
||||
async exportToDisk(fileId, type, template) {
|
||||
const file = store.state.file.itemMap[fileId];
|
||||
return this.applyTemplate(fileId, template)
|
||||
.then((html) => {
|
||||
const html = await this.applyTemplate(fileId, template);
|
||||
const blob = new Blob([html], {
|
||||
type: 'text/plain;charset=utf-8',
|
||||
});
|
||||
FileSaver.saveAs(blob, `${file.name}.${type}`);
|
||||
});
|
||||
},
|
||||
};
|
||||
|
@ -7,7 +7,7 @@ export default {
|
||||
/**
|
||||
* Create a file in the store with the specified fields.
|
||||
*/
|
||||
createFile({
|
||||
async createFile({
|
||||
name,
|
||||
parentId,
|
||||
text,
|
||||
@ -29,56 +29,55 @@ export default {
|
||||
discussions: discussions || {},
|
||||
comments: comments || {},
|
||||
};
|
||||
const nameStripped = file.name !== utils.defaultName && file.name !== name;
|
||||
|
||||
// Check if there is a path conflict
|
||||
const workspaceUniquePaths = store.getters['workspace/hasUniquePaths'];
|
||||
let pathConflict;
|
||||
|
||||
// Show warning dialogs
|
||||
if (!background) {
|
||||
// If name is being stripped
|
||||
if (file.name !== utils.defaultName && file.name !== name) {
|
||||
await store.dispatch('modal/stripName', name);
|
||||
}
|
||||
|
||||
// Check if there is already a file with that path
|
||||
if (workspaceUniquePaths) {
|
||||
const parentPath = store.getters.itemPaths[file.parentId] || '';
|
||||
const path = parentPath + file.name;
|
||||
pathConflict = !!store.getters.pathItems[path];
|
||||
if (store.getters.pathItems[path]) {
|
||||
await store.dispatch('modal/pathConflict', name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Show warning dialogs and then save in the store
|
||||
return Promise.resolve()
|
||||
.then(() => !background && nameStripped && store.dispatch('modal/stripName', name))
|
||||
.then(() => !background && pathConflict && store.dispatch('modal/pathConflict', name))
|
||||
.then(() => {
|
||||
// Save file and content in the store
|
||||
store.commit('content/setItem', content);
|
||||
store.commit('file/setItem', file);
|
||||
if (workspaceUniquePaths) {
|
||||
this.makePathUnique(id);
|
||||
}
|
||||
|
||||
// Return the new file item
|
||||
return store.state.file.itemMap[id];
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Make sanity checks and then create/update the folder/file in the store.
|
||||
*/
|
||||
async storeItem(item, background = false) {
|
||||
async storeItem(item) {
|
||||
const id = item.id || utils.uid();
|
||||
const sanitizedName = utils.sanitizeName(item.name);
|
||||
|
||||
if (item.type === 'folder' && forbiddenFolderNameMatcher.exec(sanitizedName)) {
|
||||
if (background) {
|
||||
return null;
|
||||
}
|
||||
await store.dispatch('modal/unauthorizedName', item.name);
|
||||
throw new Error('Unauthorized name.');
|
||||
}
|
||||
|
||||
const workspaceUniquePaths = store.getters['workspace/hasUniquePaths'];
|
||||
|
||||
// Show warning dialogs
|
||||
if (!background) {
|
||||
// If name has been stripped
|
||||
if (sanitizedName !== utils.defaultName && sanitizedName !== item.name) {
|
||||
await store.dispatch('modal/stripName', item.name);
|
||||
}
|
||||
// Check if there is a path conflict
|
||||
if (workspaceUniquePaths) {
|
||||
if (store.getters['workspace/hasUniquePaths']) {
|
||||
const parentPath = store.getters.itemPaths[item.parentId] || '';
|
||||
const path = parentPath + sanitizedName;
|
||||
const pathItems = store.getters.pathItems[path] || [];
|
||||
@ -86,20 +85,43 @@ export default {
|
||||
await store.dispatch('modal/pathConflict', item.name);
|
||||
}
|
||||
}
|
||||
|
||||
return this.setOrPatchItem({
|
||||
...item,
|
||||
id,
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Create/update the folder/file in the store and make sure its path is unique.
|
||||
*/
|
||||
setOrPatchItem(patch) {
|
||||
const item = {
|
||||
...store.getters.allItemMap[patch.id] || patch,
|
||||
};
|
||||
if (!item.id) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (patch.parentId !== undefined) {
|
||||
item.parentId = patch.parentId || null;
|
||||
}
|
||||
if (patch.name) {
|
||||
const sanitizedName = utils.sanitizeName(patch.name);
|
||||
if (item.type !== 'folder' || !forbiddenFolderNameMatcher.exec(sanitizedName)) {
|
||||
item.name = sanitizedName;
|
||||
}
|
||||
}
|
||||
|
||||
// Save item in the store
|
||||
store.commit(`${item.type}/setItem`, {
|
||||
id,
|
||||
parentId: item.parentId || null,
|
||||
name: sanitizedName,
|
||||
});
|
||||
store.commit(`${item.type}/setItem`, item);
|
||||
|
||||
// Ensure path uniqueness
|
||||
if (workspaceUniquePaths) {
|
||||
this.makePathUnique(id);
|
||||
if (store.getters['workspace/hasUniquePaths']) {
|
||||
this.makePathUnique(item.id);
|
||||
}
|
||||
return store.getters.allItemMap[id];
|
||||
|
||||
return store.getters.allItemMap[item.id];
|
||||
},
|
||||
|
||||
/**
|
||||
|
@ -136,7 +136,7 @@ const localDbSvc = {
|
||||
* localDb will be finished. Effectively, open a transaction, then read and apply all changes
|
||||
* from the DB since the previous transaction, then write all the changes from the store.
|
||||
*/
|
||||
sync() {
|
||||
async sync() {
|
||||
return new Promise((resolve, reject) => {
|
||||
// Create the DB transaction
|
||||
this.connection.createTx((tx) => {
|
||||
@ -275,7 +275,7 @@ const localDbSvc = {
|
||||
/**
|
||||
* Retrieve an item from the DB and put it in the store.
|
||||
*/
|
||||
loadItem(id) {
|
||||
async loadItem(id) {
|
||||
// Check if item is in the store
|
||||
const itemInStore = store.getters.allItemMap[id];
|
||||
if (itemInStore) {
|
||||
@ -307,9 +307,8 @@ const localDbSvc = {
|
||||
/**
|
||||
* Unload from the store contents that haven't been opened recently
|
||||
*/
|
||||
unloadContents() {
|
||||
return this.sync()
|
||||
.then(() => {
|
||||
async unloadContents() {
|
||||
await this.sync();
|
||||
// Keep only last opened files in memory
|
||||
const lastOpenedFileIdSet = new Set(store.getters['data/lastOpenedIds']);
|
||||
Object.keys(contentTypes).forEach((type) => {
|
||||
@ -321,58 +320,50 @@ const localDbSvc = {
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Drop the database and clean the localStorage for the specified workspaceId.
|
||||
*/
|
||||
removeWorkspace(id) {
|
||||
async removeWorkspace(id) {
|
||||
const workspaces = {
|
||||
...store.getters['data/workspaces'],
|
||||
};
|
||||
delete workspaces[id];
|
||||
store.dispatch('data/setWorkspaces', workspaces);
|
||||
this.syncLocalStorage();
|
||||
return new Promise((resolve, reject) => {
|
||||
await new Promise((resolve, reject) => {
|
||||
const dbName = getDbName(id);
|
||||
const request = indexedDB.deleteDatabase(dbName);
|
||||
request.onerror = reject;
|
||||
request.onsuccess = resolve;
|
||||
})
|
||||
.then(() => {
|
||||
});
|
||||
localStorage.removeItem(`${id}/lastSyncActivity`);
|
||||
localStorage.removeItem(`${id}/lastWindowFocus`);
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Create the connection and start syncing.
|
||||
*/
|
||||
init() {
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
async init() {
|
||||
// Reset the app if reset flag was passed
|
||||
if (resetApp) {
|
||||
return Promise.all(Object.keys(store.getters['data/workspaces'])
|
||||
.map(workspaceId => localDbSvc.removeWorkspace(workspaceId)))
|
||||
.then(() => utils.localStorageDataIds.forEach((id) => {
|
||||
await Promise.all(Object.keys(store.getters['data/workspaces'])
|
||||
.map(workspaceId => localDbSvc.removeWorkspace(workspaceId)));
|
||||
utils.localStorageDataIds.forEach((id) => {
|
||||
// Clean data stored in localStorage
|
||||
localStorage.removeItem(`data/${id}`);
|
||||
}))
|
||||
.then(() => {
|
||||
});
|
||||
window.location.reload();
|
||||
throw new Error('reload');
|
||||
});
|
||||
}
|
||||
|
||||
// Create the connection
|
||||
this.connection = new Connection();
|
||||
|
||||
// Load the DB
|
||||
return localDbSvc.sync();
|
||||
})
|
||||
.then(() => {
|
||||
await localDbSvc.sync();
|
||||
|
||||
// If exportWorkspace parameter was provided
|
||||
if (exportWorkspace) {
|
||||
const backup = JSON.stringify(store.getters.allItemMap);
|
||||
@ -427,7 +418,7 @@ const localDbSvc = {
|
||||
// watch current file changing
|
||||
store.watch(
|
||||
() => store.getters['file/current'].id,
|
||||
() => {
|
||||
async () => {
|
||||
// See if currentFile is real, ie it has an ID
|
||||
const currentFile = store.getters['file/current'];
|
||||
// If current file has no ID, get the most recent file
|
||||
@ -438,50 +429,43 @@ const localDbSvc = {
|
||||
store.commit('file/setCurrentId', recentFile.id);
|
||||
} else {
|
||||
// If still no ID, create a new file
|
||||
fileSvc.createFile({
|
||||
const newFile = await fileSvc.createFile({
|
||||
name: 'Welcome file',
|
||||
text: welcomeFile,
|
||||
}, true)
|
||||
}, true);
|
||||
// Set it as the current file
|
||||
.then(newFile => store.commit('file/setCurrentId', newFile.id));
|
||||
store.commit('file/setCurrentId', newFile.id);
|
||||
}
|
||||
} else {
|
||||
Promise.resolve()
|
||||
try {
|
||||
// Load contentState from DB
|
||||
.then(() => localDbSvc.loadContentState(currentFile.id))
|
||||
await localDbSvc.loadContentState(currentFile.id);
|
||||
// Load syncedContent from DB
|
||||
.then(() => localDbSvc.loadSyncedContent(currentFile.id))
|
||||
await localDbSvc.loadSyncedContent(currentFile.id);
|
||||
// Load content from DB
|
||||
.then(() => localDbSvc.loadItem(`${currentFile.id}/content`))
|
||||
.then(
|
||||
() => {
|
||||
// Set last opened file
|
||||
store.dispatch('data/setLastOpenedId', currentFile.id);
|
||||
// Cancel new discussion
|
||||
store.commit('discussion/setCurrentDiscussionId');
|
||||
// Open the gutter if file contains discussions
|
||||
store.commit(
|
||||
'discussion/setCurrentDiscussionId',
|
||||
store.getters['discussion/nextDiscussionId'],
|
||||
);
|
||||
},
|
||||
(err) => {
|
||||
try {
|
||||
await localDbSvc.loadItem(`${currentFile.id}/content`);
|
||||
} catch (err) {
|
||||
// Failure (content is not available), go back to previous file
|
||||
const lastOpenedFile = store.getters['file/lastOpened'];
|
||||
store.commit('file/setCurrentId', lastOpenedFile.id);
|
||||
throw err;
|
||||
},
|
||||
)
|
||||
.catch((err) => {
|
||||
}
|
||||
// Set last opened file
|
||||
store.dispatch('data/setLastOpenedId', currentFile.id);
|
||||
// Cancel new discussion and open the gutter if file contains discussions
|
||||
store.commit(
|
||||
'discussion/setCurrentDiscussionId',
|
||||
store.getters['discussion/nextDiscussionId'],
|
||||
);
|
||||
} catch (err) {
|
||||
console.error(err); // eslint-disable-line no-console
|
||||
store.dispatch('notification/error', err);
|
||||
});
|
||||
}
|
||||
}, {
|
||||
immediate: true,
|
||||
}
|
||||
},
|
||||
{ immediate: true },
|
||||
);
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -7,6 +7,27 @@ const networkTimeout = 30 * 1000; // 30 sec
|
||||
let isConnectionDown = false;
|
||||
const userInactiveAfter = 2 * 60 * 1000; // 2 minutes
|
||||
|
||||
|
||||
function parseHeaders(xhr) {
|
||||
const pairs = xhr.getAllResponseHeaders().trim().split('\n');
|
||||
const headers = {};
|
||||
pairs.forEach((header) => {
|
||||
const split = header.trim().split(':');
|
||||
const key = split.shift().trim().toLowerCase();
|
||||
const value = split.join(':').trim();
|
||||
headers[key] = value;
|
||||
});
|
||||
return headers;
|
||||
}
|
||||
|
||||
function isRetriable(err) {
|
||||
if (err.status === 403) {
|
||||
const googleReason = ((((err.body || {}).error || {}).errors || [])[0] || {}).reason;
|
||||
return googleReason === 'rateLimitExceeded' || googleReason === 'userRateLimitExceeded';
|
||||
}
|
||||
return err.status === 429 || (err.status >= 500 && err.status < 600);
|
||||
}
|
||||
|
||||
export default {
|
||||
init() {
|
||||
// Keep track of the last user activity
|
||||
@ -31,37 +52,34 @@ export default {
|
||||
window.addEventListener('focus', setLastFocus);
|
||||
|
||||
// Check browser is online periodically
|
||||
const checkOffline = () => {
|
||||
const checkOffline = async () => {
|
||||
const isBrowserOffline = window.navigator.onLine === false;
|
||||
if (!isBrowserOffline &&
|
||||
store.state.lastOfflineCheck + networkTimeout + 5000 < Date.now() &&
|
||||
this.isUserActive()
|
||||
) {
|
||||
store.commit('updateLastOfflineCheck');
|
||||
new Promise((resolve, reject) => {
|
||||
const script = document.createElement('script');
|
||||
let timeout;
|
||||
let clean = (cb) => {
|
||||
clearTimeout(timeout);
|
||||
document.head.removeChild(script);
|
||||
clean = () => {}; // Prevent from cleaning several times
|
||||
cb();
|
||||
};
|
||||
script.onload = () => clean(resolve);
|
||||
script.onerror = () => clean(reject);
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
script.onload = resolve;
|
||||
script.onerror = reject;
|
||||
script.src = `https://apis.google.com/js/api.js?${Date.now()}`;
|
||||
try {
|
||||
document.head.appendChild(script); // This can fail with bad network
|
||||
timeout = setTimeout(() => clean(reject), networkTimeout);
|
||||
timeout = setTimeout(reject, networkTimeout);
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
isConnectionDown = false;
|
||||
}, () => {
|
||||
isConnectionDown = true;
|
||||
});
|
||||
isConnectionDown = false;
|
||||
} catch (e) {
|
||||
isConnectionDown = true;
|
||||
} finally {
|
||||
clearTimeout(timeout);
|
||||
document.head.removeChild(script);
|
||||
}
|
||||
}
|
||||
const offline = isBrowserOffline || isConnectionDown;
|
||||
if (store.state.offline !== offline) {
|
||||
@ -88,7 +106,7 @@ export default {
|
||||
isUserActive() {
|
||||
return this.lastActivity > Date.now() - userInactiveAfter && this.isWindowFocused();
|
||||
},
|
||||
loadScript(url) {
|
||||
async loadScript(url) {
|
||||
if (!scriptLoadingPromises[url]) {
|
||||
scriptLoadingPromises[url] = new Promise((resolve, reject) => {
|
||||
const script = document.createElement('script');
|
||||
@ -103,7 +121,7 @@ export default {
|
||||
}
|
||||
return scriptLoadingPromises[url];
|
||||
},
|
||||
startOauth2(url, params = {}, silent = false) {
|
||||
async startOauth2(url, params = {}, silent = false) {
|
||||
// Build the authorize URL
|
||||
const state = utils.uid();
|
||||
params.state = state;
|
||||
@ -125,47 +143,29 @@ export default {
|
||||
}
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let checkClosedInterval;
|
||||
let closeTimeout;
|
||||
let msgHandler;
|
||||
let clean = () => {
|
||||
clearInterval(checkClosedInterval);
|
||||
if (!silent && !wnd.closed) {
|
||||
wnd.close();
|
||||
}
|
||||
if (iframeElt) {
|
||||
document.body.removeChild(iframeElt);
|
||||
}
|
||||
clearTimeout(closeTimeout);
|
||||
window.removeEventListener('message', msgHandler);
|
||||
clean = () => Promise.resolve(); // Prevent from cleaning several times
|
||||
return Promise.resolve();
|
||||
};
|
||||
|
||||
try {
|
||||
return await new Promise((resolve, reject) => {
|
||||
if (silent) {
|
||||
iframeElt.onerror = () => clean()
|
||||
.then(() => reject(new Error('Unknown error.')));
|
||||
closeTimeout = setTimeout(
|
||||
() => clean()
|
||||
.then(() => {
|
||||
iframeElt.onerror = () => {
|
||||
reject(new Error('Unknown error.'));
|
||||
};
|
||||
closeTimeout = setTimeout(() => {
|
||||
isConnectionDown = true;
|
||||
store.commit('setOffline', true);
|
||||
store.commit('updateLastOfflineCheck');
|
||||
reject(new Error('You are offline.'));
|
||||
}),
|
||||
networkTimeout,
|
||||
);
|
||||
}, networkTimeout);
|
||||
} else {
|
||||
closeTimeout = setTimeout(
|
||||
() => clean()
|
||||
.then(() => reject(new Error('Timeout.'))),
|
||||
oauth2AuthorizationTimeout,
|
||||
);
|
||||
closeTimeout = setTimeout(() => {
|
||||
reject(new Error('Timeout.'));
|
||||
}, oauth2AuthorizationTimeout);
|
||||
}
|
||||
|
||||
msgHandler = event => event.source === wnd && event.origin === utils.origin && clean()
|
||||
.then(() => {
|
||||
msgHandler = (event) => {
|
||||
if (event.source === wnd && event.origin === utils.origin) {
|
||||
const data = utils.parseQueryParams(`${event.data}`.slice(1));
|
||||
if (data.error || data.state !== state) {
|
||||
console.error(data); // eslint-disable-line no-console
|
||||
@ -178,16 +178,31 @@ export default {
|
||||
expiresIn: data.expires_in,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener('message', msgHandler);
|
||||
if (!silent) {
|
||||
checkClosedInterval = setInterval(() => wnd.closed && clean()
|
||||
.then(() => reject(new Error('Authorize window was closed.'))), 250);
|
||||
checkClosedInterval = setInterval(() => {
|
||||
if (wnd.closed) {
|
||||
reject(new Error('Authorize window was closed.'));
|
||||
}
|
||||
}, 250);
|
||||
}
|
||||
});
|
||||
} finally {
|
||||
clearInterval(checkClosedInterval);
|
||||
if (!silent && !wnd.closed) {
|
||||
wnd.close();
|
||||
}
|
||||
if (iframeElt) {
|
||||
document.body.removeChild(iframeElt);
|
||||
}
|
||||
clearTimeout(closeTimeout);
|
||||
window.removeEventListener('message', msgHandler);
|
||||
}
|
||||
},
|
||||
request(configParam, offlineCheck = false) {
|
||||
async request(configParam, offlineCheck = false) {
|
||||
let retryAfter = 500; // 500 ms
|
||||
const maxRetryAfter = 10 * 1000; // 10 sec
|
||||
const config = Object.assign({}, configParam);
|
||||
@ -198,27 +213,9 @@ export default {
|
||||
config.headers['Content-Type'] = 'application/json';
|
||||
}
|
||||
|
||||
function parseHeaders(xhr) {
|
||||
const pairs = xhr.getAllResponseHeaders().trim().split('\n');
|
||||
return pairs.reduce((headers, header) => {
|
||||
const split = header.trim().split(':');
|
||||
const key = split.shift().trim().toLowerCase();
|
||||
const value = split.join(':').trim();
|
||||
headers[key] = value;
|
||||
return headers;
|
||||
}, {});
|
||||
}
|
||||
|
||||
function isRetriable(err) {
|
||||
if (err.status === 403) {
|
||||
const googleReason = ((((err.body || {}).error || {}).errors || [])[0] || {}).reason;
|
||||
return googleReason === 'rateLimitExceeded' || googleReason === 'userRateLimitExceeded';
|
||||
}
|
||||
return err.status === 429 || (err.status >= 500 && err.status < 600);
|
||||
}
|
||||
|
||||
const attempt =
|
||||
() => new Promise((resolve, reject) => {
|
||||
const attempt = async () => {
|
||||
try {
|
||||
await new Promise((resolve, reject) => {
|
||||
if (offlineCheck) {
|
||||
store.commit('updateLastOfflineCheck');
|
||||
}
|
||||
@ -280,19 +277,20 @@ export default {
|
||||
xhr.responseType = 'blob';
|
||||
}
|
||||
xhr.send(config.body || null);
|
||||
})
|
||||
.catch((err) => {
|
||||
});
|
||||
} catch (err) {
|
||||
// Try again later in case of retriable error
|
||||
if (isRetriable(err) && retryAfter < maxRetryAfter) {
|
||||
return new Promise((resolve) => {
|
||||
await new Promise((resolve) => {
|
||||
setTimeout(resolve, retryAfter);
|
||||
// Exponential backoff
|
||||
retryAfter *= 2;
|
||||
})
|
||||
.then(attempt);
|
||||
});
|
||||
attempt();
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return attempt();
|
||||
},
|
||||
|
@ -15,24 +15,21 @@ export default new Provider({
|
||||
const token = this.getToken(location);
|
||||
return `${location.pageId} — ${location.blogUrl} — ${token.name}`;
|
||||
},
|
||||
publish(token, html, metadata, publishLocation) {
|
||||
return googleHelper.uploadBlogger(
|
||||
async publish(token, html, metadata, publishLocation) {
|
||||
const page = await googleHelper.uploadBlogger({
|
||||
token,
|
||||
publishLocation.blogUrl,
|
||||
publishLocation.blogId,
|
||||
publishLocation.pageId,
|
||||
metadata.title,
|
||||
html,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
true,
|
||||
)
|
||||
.then(page => ({
|
||||
blogUrl: publishLocation.blogUrl,
|
||||
blogId: publishLocation.blogId,
|
||||
postId: publishLocation.pageId,
|
||||
title: metadata.title,
|
||||
content: html,
|
||||
isPage: true,
|
||||
});
|
||||
return {
|
||||
...publishLocation,
|
||||
blogId: page.blog.id,
|
||||
pageId: page.id,
|
||||
}));
|
||||
};
|
||||
},
|
||||
makeLocation(token, blogUrl, pageId) {
|
||||
const location = {
|
||||
|
@ -15,23 +15,21 @@ export default new Provider({
|
||||
const token = this.getToken(location);
|
||||
return `${location.postId} — ${location.blogUrl} — ${token.name}`;
|
||||
},
|
||||
publish(token, html, metadata, publishLocation) {
|
||||
return googleHelper.uploadBlogger(
|
||||
async publish(token, html, metadata, publishLocation) {
|
||||
const post = await googleHelper.uploadBlogger({
|
||||
...publishLocation,
|
||||
token,
|
||||
publishLocation.blogUrl,
|
||||
publishLocation.blogId,
|
||||
publishLocation.postId,
|
||||
metadata.title,
|
||||
html,
|
||||
metadata.tags,
|
||||
metadata.status === 'draft',
|
||||
metadata.date,
|
||||
)
|
||||
.then(post => ({
|
||||
title: metadata.title,
|
||||
content: html,
|
||||
labels: metadata.tags,
|
||||
isDraft: metadata.status === 'draft',
|
||||
published: metadata.date,
|
||||
});
|
||||
return {
|
||||
...publishLocation,
|
||||
blogId: post.blog.id,
|
||||
postId: post.id,
|
||||
}));
|
||||
};
|
||||
},
|
||||
makeLocation(token, blogUrl, postId) {
|
||||
const location = {
|
||||
|
@ -2,6 +2,7 @@ import providerRegistry from './providerRegistry';
|
||||
import emptyContent from '../../../data/emptyContent';
|
||||
import utils from '../../utils';
|
||||
import store from '../../../store';
|
||||
import fileSvc from '../../fileSvc';
|
||||
|
||||
const dataExtractor = /<!--stackedit_data:([A-Za-z0-9+/=\s]+)-->$/;
|
||||
|
||||
@ -66,6 +67,14 @@ export default class Provider {
|
||||
return utils.addItemHash(result);
|
||||
}
|
||||
|
||||
static getContentSyncData(fileId) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][`${fileId}/content`];
|
||||
if (!syncData) {
|
||||
throw new Error(); // No need for a proper error message.
|
||||
}
|
||||
return syncData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find and open a file with location that meets the criteria
|
||||
*/
|
||||
@ -73,13 +82,13 @@ export default class Provider {
|
||||
const location = utils.search(allLocations, criteria);
|
||||
if (location) {
|
||||
// Found one, open it if it exists
|
||||
const file = store.state.file.itemMap[location.fileId];
|
||||
if (file) {
|
||||
store.commit('file/setCurrentId', file.id);
|
||||
const item = store.state.file.itemMap[location.fileId];
|
||||
if (item) {
|
||||
store.commit('file/setCurrentId', item.id);
|
||||
// If file is in the trash, restore it
|
||||
if (file.parentId === 'trash') {
|
||||
store.commit('file/patchItem', {
|
||||
...file,
|
||||
if (item.parentId === 'trash') {
|
||||
fileSvc.setOrPatchItem({
|
||||
...item,
|
||||
parentId: null,
|
||||
});
|
||||
}
|
||||
|
@ -3,13 +3,6 @@ import couchdbHelper from './helpers/couchdbHelper';
|
||||
import Provider from './common/Provider';
|
||||
import utils from '../utils';
|
||||
|
||||
const getSyncData = (fileId) => {
|
||||
const syncData = store.getters['data/syncDataByItemId'][`${fileId}/content`];
|
||||
return syncData
|
||||
? Promise.resolve(syncData)
|
||||
: Promise.reject(); // No need for a proper error message.
|
||||
};
|
||||
|
||||
let syncLastSeq;
|
||||
|
||||
export default new Provider({
|
||||
@ -17,7 +10,7 @@ export default new Provider({
|
||||
getToken() {
|
||||
return store.getters['workspace/syncToken'];
|
||||
},
|
||||
initWorkspace() {
|
||||
async initWorkspace() {
|
||||
const dbUrl = (utils.queryParams.dbUrl || '').replace(/\/?$/, ''); // Remove trailing /
|
||||
const workspaceParams = {
|
||||
providerId: this.id,
|
||||
@ -35,9 +28,16 @@ export default new Provider({
|
||||
});
|
||||
}
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => getWorkspace() || couchdbHelper.getDb(getToken())
|
||||
.then((db) => {
|
||||
// Create the workspace
|
||||
let workspace = getWorkspace();
|
||||
if (!workspace) {
|
||||
// Make sure the database exists and retrieve its name
|
||||
let db;
|
||||
try {
|
||||
db = await couchdbHelper.getDb(getToken());
|
||||
} catch (e) {
|
||||
throw new Error(`${dbUrl} is not accessible. Make sure you have the proper permissions.`);
|
||||
}
|
||||
store.dispatch('data/patchWorkspaces', {
|
||||
[workspaceId]: {
|
||||
id: workspaceId,
|
||||
@ -46,11 +46,9 @@ export default new Provider({
|
||||
dbUrl,
|
||||
},
|
||||
});
|
||||
return getWorkspace();
|
||||
}, () => {
|
||||
throw new Error(`${dbUrl} is not accessible. Make sure you have the right permissions.`);
|
||||
}))
|
||||
.then((workspace) => {
|
||||
workspace = getWorkspace();
|
||||
}
|
||||
|
||||
// Fix the URL hash
|
||||
utils.setQueryParams(workspaceParams);
|
||||
if (workspace.url !== window.location.href) {
|
||||
@ -62,13 +60,11 @@ export default new Provider({
|
||||
});
|
||||
}
|
||||
return getWorkspace();
|
||||
});
|
||||
},
|
||||
getChanges() {
|
||||
async getChanges() {
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
const lastSeq = store.getters['data/localSettings'].syncLastSeq;
|
||||
return couchdbHelper.getChanges(syncToken, lastSeq)
|
||||
.then((result) => {
|
||||
const result = await couchdbHelper.getChanges(syncToken, lastSeq);
|
||||
const changes = result.changes.filter((change) => {
|
||||
if (!change.deleted && change.doc) {
|
||||
change.item = change.doc.item;
|
||||
@ -89,31 +85,28 @@ export default new Provider({
|
||||
});
|
||||
syncLastSeq = result.lastSeq;
|
||||
return changes;
|
||||
});
|
||||
},
|
||||
onChangesApplied() {
|
||||
store.dispatch('data/patchLocalSettings', {
|
||||
syncLastSeq,
|
||||
});
|
||||
},
|
||||
saveSimpleItem(item, syncData) {
|
||||
async saveSimpleItem(item, syncData) {
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
return couchdbHelper.uploadDocument(
|
||||
syncToken,
|
||||
const { id, rev } = couchdbHelper.uploadDocument({
|
||||
token: syncToken,
|
||||
item,
|
||||
undefined,
|
||||
undefined,
|
||||
syncData && syncData.id,
|
||||
syncData && syncData.rev,
|
||||
)
|
||||
.then(res => ({
|
||||
documentId: syncData && syncData.id,
|
||||
rev: syncData && syncData.rev,
|
||||
});
|
||||
return {
|
||||
// Build sync data
|
||||
id: res.id,
|
||||
id,
|
||||
itemId: item.id,
|
||||
type: item.type,
|
||||
hash: item.hash,
|
||||
rev: res.rev,
|
||||
}));
|
||||
rev,
|
||||
};
|
||||
},
|
||||
removeItem(syncData) {
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
@ -122,14 +115,13 @@ export default new Provider({
|
||||
downloadContent(token, syncLocation) {
|
||||
return this.downloadData(`${syncLocation.fileId}/content`);
|
||||
},
|
||||
downloadData(dataId) {
|
||||
async downloadData(dataId) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][dataId];
|
||||
if (!syncData) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
return couchdbHelper.retrieveDocumentWithAttachments(syncToken, syncData.id)
|
||||
.then((body) => {
|
||||
const body = await couchdbHelper.retrieveDocumentWithAttachments(syncToken, syncData.id);
|
||||
let item;
|
||||
if (body.item.type === 'content') {
|
||||
item = Provider.parseContent(body.attachments.data, body.item.id);
|
||||
@ -147,17 +139,14 @@ export default new Provider({
|
||||
});
|
||||
}
|
||||
return item;
|
||||
});
|
||||
},
|
||||
uploadContent(token, content, syncLocation) {
|
||||
return this.uploadData(content)
|
||||
.then(() => syncLocation);
|
||||
async uploadContent(token, content, syncLocation) {
|
||||
await this.uploadData(content);
|
||||
return syncLocation;
|
||||
},
|
||||
uploadData(item) {
|
||||
async uploadData(item) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][item.id];
|
||||
if (syncData && syncData.hash === item.hash) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
if (!syncData || syncData.hash !== item.hash) {
|
||||
let data;
|
||||
let dataType;
|
||||
if (item.type === 'content') {
|
||||
@ -168,19 +157,19 @@ export default new Provider({
|
||||
dataType = 'application/json';
|
||||
}
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
return couchdbHelper.uploadDocument(
|
||||
syncToken,
|
||||
{
|
||||
const res = await couchdbHelper.uploadDocument({
|
||||
token: syncToken,
|
||||
item: {
|
||||
id: item.id,
|
||||
type: item.type,
|
||||
hash: item.hash,
|
||||
},
|
||||
data,
|
||||
dataType,
|
||||
syncData && syncData.id,
|
||||
syncData && syncData.rev,
|
||||
)
|
||||
.then(res => store.dispatch('data/patchSyncData', {
|
||||
documentId: syncData && syncData.id,
|
||||
rev: syncData && syncData.rev,
|
||||
});
|
||||
store.dispatch('data/patchSyncData', {
|
||||
[res.id]: {
|
||||
// Build sync data
|
||||
id: res.id,
|
||||
@ -189,12 +178,12 @@ export default new Provider({
|
||||
hash: item.hash,
|
||||
rev: res.rev,
|
||||
},
|
||||
}));
|
||||
});
|
||||
}
|
||||
},
|
||||
listRevisions(token, fileId) {
|
||||
return getSyncData(fileId)
|
||||
.then(syncData => couchdbHelper.retrieveDocumentWithRevisions(token, syncData.id))
|
||||
.then((body) => {
|
||||
async listRevisions(token, fileId) {
|
||||
const syncData = Provider.getContentSyncData(fileId);
|
||||
const body = await couchdbHelper.retrieveDocumentWithRevisions(token, syncData.id);
|
||||
const revisions = [];
|
||||
body._revs_info.forEach((revInfo) => { // eslint-disable-line no-underscore-dangle
|
||||
if (revInfo.status === 'available') {
|
||||
@ -206,20 +195,17 @@ export default new Provider({
|
||||
}
|
||||
});
|
||||
return revisions;
|
||||
});
|
||||
},
|
||||
loadRevision(token, fileId, revision) {
|
||||
return getSyncData(fileId)
|
||||
.then(syncData => couchdbHelper.retrieveDocument(token, syncData.id, revision.id))
|
||||
.then((body) => {
|
||||
async loadRevision(token, fileId, revision) {
|
||||
const syncData = Provider.getContentSyncData(fileId);
|
||||
const body = await couchdbHelper.retrieveDocument(token, syncData.id, revision.id);
|
||||
revision.sub = body.sub;
|
||||
revision.created = body.time || 1; // Has to be truthy to prevent from loading several times
|
||||
});
|
||||
},
|
||||
getRevisionContent(token, fileId, revisionId) {
|
||||
return getSyncData(fileId)
|
||||
.then(syncData => couchdbHelper
|
||||
.retrieveDocumentWithAttachments(token, syncData.id, revisionId))
|
||||
.then(body => Provider.parseContent(body.attachments.data, body.item.id));
|
||||
async getRevisionContent(token, fileId, revisionId) {
|
||||
const syncData = Provider.getContentSyncData(fileId);
|
||||
const body = await couchdbHelper
|
||||
.retrieveDocumentWithAttachments(token, syncData.id, revisionId);
|
||||
return Provider.parseContent(body.attachments.data, body.item.id);
|
||||
},
|
||||
});
|
||||
|
@ -34,61 +34,62 @@ export default new Provider({
|
||||
checkPath(path) {
|
||||
return path && path.match(/^\/[^\\<>:"|?*]+$/);
|
||||
},
|
||||
downloadContent(token, syncLocation) {
|
||||
return dropboxHelper.downloadFile(
|
||||
async downloadContent(token, syncLocation) {
|
||||
const { content } = await dropboxHelper.downloadFile({
|
||||
token,
|
||||
makePathRelative(token, syncLocation.path),
|
||||
syncLocation.dropboxFileId,
|
||||
)
|
||||
.then(({ content }) => Provider.parseContent(content, `${syncLocation.fileId}/content`));
|
||||
path: makePathRelative(token, syncLocation.path),
|
||||
fileId: syncLocation.dropboxFileId,
|
||||
});
|
||||
return Provider.parseContent(content, `${syncLocation.fileId}/content`);
|
||||
},
|
||||
uploadContent(token, content, syncLocation) {
|
||||
return dropboxHelper.uploadFile(
|
||||
async uploadContent(token, content, syncLocation) {
|
||||
const dropboxFile = await dropboxHelper.uploadFile({
|
||||
token,
|
||||
makePathRelative(token, syncLocation.path),
|
||||
Provider.serializeContent(content),
|
||||
syncLocation.dropboxFileId,
|
||||
)
|
||||
.then(dropboxFile => ({
|
||||
path: makePathRelative(token, syncLocation.path),
|
||||
content: Provider.serializeContent(content),
|
||||
fileId: syncLocation.dropboxFileId,
|
||||
});
|
||||
return {
|
||||
...syncLocation,
|
||||
path: makePathAbsolute(token, dropboxFile.path_display),
|
||||
dropboxFileId: dropboxFile.id,
|
||||
}));
|
||||
};
|
||||
},
|
||||
publish(token, html, metadata, publishLocation) {
|
||||
return dropboxHelper.uploadFile(
|
||||
async publish(token, html, metadata, publishLocation) {
|
||||
const dropboxFile = await dropboxHelper.uploadFile({
|
||||
token,
|
||||
publishLocation.path,
|
||||
html,
|
||||
publishLocation.dropboxFileId,
|
||||
)
|
||||
.then(dropboxFile => ({
|
||||
path: publishLocation.path,
|
||||
content: html,
|
||||
fileId: publishLocation.dropboxFileId,
|
||||
});
|
||||
return {
|
||||
...publishLocation,
|
||||
path: makePathAbsolute(token, dropboxFile.path_display),
|
||||
dropboxFileId: dropboxFile.id,
|
||||
}));
|
||||
};
|
||||
},
|
||||
openFiles(token, paths) {
|
||||
const openOneFile = () => {
|
||||
const path = paths.pop();
|
||||
if (!path) {
|
||||
return null;
|
||||
}
|
||||
if (Provider.openFileWithLocation(store.getters['syncLocation/items'], {
|
||||
async openFiles(token, paths) {
|
||||
await utils.awaitSequence(paths, async (path) => {
|
||||
// Check if the file exists and open it
|
||||
if (!Provider.openFileWithLocation(store.getters['syncLocation/items'], {
|
||||
providerId: this.id,
|
||||
path,
|
||||
})) {
|
||||
// File exists and has just been opened. Next...
|
||||
return openOneFile();
|
||||
}
|
||||
// Download content from Dropbox and create the file
|
||||
// Download content from Dropbox
|
||||
const syncLocation = {
|
||||
path,
|
||||
providerId: this.id,
|
||||
sub: token.sub,
|
||||
};
|
||||
return this.downloadContent(token, syncLocation)
|
||||
.then((content) => {
|
||||
let content;
|
||||
try {
|
||||
content = await this.downloadContent(token, syncLocation);
|
||||
} catch (e) {
|
||||
store.dispatch('notification/error', `Could not open file ${path}.`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create the file
|
||||
let name = path;
|
||||
const slashPos = name.lastIndexOf('/');
|
||||
if (slashPos > -1 && slashPos < name.length - 1) {
|
||||
@ -98,7 +99,7 @@ export default new Provider({
|
||||
if (dotPos > 0 && slashPos < name.length) {
|
||||
name = name.slice(0, dotPos);
|
||||
}
|
||||
return fileSvc.createFile({
|
||||
const item = await fileSvc.createFile({
|
||||
name,
|
||||
parentId: store.getters['file/current'].parentId,
|
||||
text: content.text,
|
||||
@ -106,8 +107,6 @@ export default new Provider({
|
||||
discussions: content.discussions,
|
||||
comments: content.comments,
|
||||
}, true);
|
||||
})
|
||||
.then((item) => {
|
||||
store.commit('file/setCurrentId', item.id);
|
||||
store.commit('syncLocation/setItem', {
|
||||
...syncLocation,
|
||||
@ -115,13 +114,8 @@ export default new Provider({
|
||||
fileId: item.id,
|
||||
});
|
||||
store.dispatch('notification/info', `${store.getters['file/current'].name} was imported from Dropbox.`);
|
||||
})
|
||||
.catch(() => {
|
||||
store.dispatch('notification/error', `Could not open file ${path}.`);
|
||||
})
|
||||
.then(() => openOneFile());
|
||||
};
|
||||
return Promise.resolve(openOneFile());
|
||||
}
|
||||
});
|
||||
},
|
||||
makeLocation(token, path) {
|
||||
return {
|
||||
|
@ -15,39 +15,38 @@ export default new Provider({
|
||||
const token = this.getToken(location);
|
||||
return `${location.filename} — ${location.gistId} — ${token.name}`;
|
||||
},
|
||||
downloadContent(token, syncLocation) {
|
||||
return githubHelper.downloadGist(token, syncLocation.gistId, syncLocation.filename)
|
||||
.then(content => Provider.parseContent(content, `${syncLocation.fileId}/content`));
|
||||
async downloadContent(token, syncLocation) {
|
||||
const content = await githubHelper.downloadGist({
|
||||
...syncLocation,
|
||||
token,
|
||||
});
|
||||
return Provider.parseContent(content, `${syncLocation.fileId}/content`);
|
||||
},
|
||||
uploadContent(token, content, syncLocation) {
|
||||
async uploadContent(token, content, syncLocation) {
|
||||
const file = store.state.file.itemMap[syncLocation.fileId];
|
||||
const description = utils.sanitizeName(file && file.name);
|
||||
return githubHelper.uploadGist(
|
||||
const gist = await githubHelper.uploadGist({
|
||||
...syncLocation,
|
||||
token,
|
||||
description,
|
||||
syncLocation.filename,
|
||||
Provider.serializeContent(content),
|
||||
syncLocation.isPublic,
|
||||
syncLocation.gistId,
|
||||
)
|
||||
.then(gist => ({
|
||||
content: Provider.serializeContent(content),
|
||||
});
|
||||
return {
|
||||
...syncLocation,
|
||||
gistId: gist.id,
|
||||
}));
|
||||
};
|
||||
},
|
||||
publish(token, html, metadata, publishLocation) {
|
||||
return githubHelper.uploadGist(
|
||||
async publish(token, html, metadata, publishLocation) {
|
||||
const gist = await githubHelper.uploadGist({
|
||||
...publishLocation,
|
||||
token,
|
||||
metadata.title,
|
||||
publishLocation.filename,
|
||||
html,
|
||||
publishLocation.isPublic,
|
||||
publishLocation.gistId,
|
||||
)
|
||||
.then(gist => ({
|
||||
description: metadata.title,
|
||||
content: html,
|
||||
});
|
||||
return {
|
||||
...publishLocation,
|
||||
gistId: gist.id,
|
||||
}));
|
||||
};
|
||||
},
|
||||
makeLocation(token, filename, isPublic, gistId) {
|
||||
return {
|
||||
|
@ -18,68 +18,58 @@ export default new Provider({
|
||||
const token = this.getToken(location);
|
||||
return `${location.path} — ${location.owner}/${location.repo} — ${token.name}`;
|
||||
},
|
||||
downloadContent(token, syncLocation) {
|
||||
return githubHelper.downloadFile(
|
||||
async downloadContent(token, syncLocation) {
|
||||
try {
|
||||
const { sha, content } = await githubHelper.downloadFile({
|
||||
...syncLocation,
|
||||
token,
|
||||
syncLocation.owner,
|
||||
syncLocation.repo,
|
||||
syncLocation.branch,
|
||||
syncLocation.path,
|
||||
)
|
||||
.then(({ sha, content }) => {
|
||||
});
|
||||
savedSha[syncLocation.id] = sha;
|
||||
return Provider.parseContent(content, `${syncLocation.fileId}/content`);
|
||||
})
|
||||
.catch(() => null); // Ignore error, upload is going to fail anyway
|
||||
},
|
||||
uploadContent(token, content, syncLocation) {
|
||||
let result = Promise.resolve();
|
||||
if (!savedSha[syncLocation.id]) {
|
||||
result = this.downloadContent(token, syncLocation); // Get the last sha
|
||||
}
|
||||
return result
|
||||
.then(() => {
|
||||
const sha = savedSha[syncLocation.id];
|
||||
delete savedSha[syncLocation.id];
|
||||
return githubHelper.uploadFile(
|
||||
token,
|
||||
syncLocation.owner,
|
||||
syncLocation.repo,
|
||||
syncLocation.branch,
|
||||
syncLocation.path,
|
||||
Provider.serializeContent(content),
|
||||
sha,
|
||||
);
|
||||
})
|
||||
.then(() => syncLocation);
|
||||
},
|
||||
publish(token, html, metadata, publishLocation) {
|
||||
return this.downloadContent(token, publishLocation) // Get the last sha
|
||||
.then(() => {
|
||||
const sha = savedSha[publishLocation.id];
|
||||
delete savedSha[publishLocation.id];
|
||||
return githubHelper.uploadFile(
|
||||
token,
|
||||
publishLocation.owner,
|
||||
publishLocation.repo,
|
||||
publishLocation.branch,
|
||||
publishLocation.path,
|
||||
html,
|
||||
sha,
|
||||
);
|
||||
})
|
||||
.then(() => publishLocation);
|
||||
},
|
||||
openFile(token, syncLocation) {
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
if (Provider.openFileWithLocation(store.getters['syncLocation/items'], syncLocation)) {
|
||||
// File exists and has just been opened. Next...
|
||||
} catch (e) {
|
||||
// Ignore error, upload is going to fail anyway
|
||||
return null;
|
||||
}
|
||||
// Download content from GitHub and create the file
|
||||
return this.downloadContent(token, syncLocation)
|
||||
.then((content) => {
|
||||
},
|
||||
async uploadContent(token, content, syncLocation) {
|
||||
if (!savedSha[syncLocation.id]) {
|
||||
await this.downloadContent(token, syncLocation); // Get the last sha
|
||||
}
|
||||
const sha = savedSha[syncLocation.id];
|
||||
delete savedSha[syncLocation.id];
|
||||
await githubHelper.uploadFile({
|
||||
...syncLocation,
|
||||
token,
|
||||
content: Provider.serializeContent(content),
|
||||
sha,
|
||||
});
|
||||
return syncLocation;
|
||||
},
|
||||
async publish(token, html, metadata, publishLocation) {
|
||||
await this.downloadContent(token, publishLocation); // Get the last sha
|
||||
const sha = savedSha[publishLocation.id];
|
||||
delete savedSha[publishLocation.id];
|
||||
await githubHelper.uploadFile({
|
||||
...publishLocation,
|
||||
token,
|
||||
content: html,
|
||||
sha,
|
||||
});
|
||||
return publishLocation;
|
||||
},
|
||||
async openFile(token, syncLocation) {
|
||||
// Check if the file exists and open it
|
||||
if (!Provider.openFileWithLocation(store.getters['syncLocation/items'], syncLocation)) {
|
||||
// Download content from GitHub
|
||||
let content;
|
||||
try {
|
||||
content = await this.downloadContent(token, syncLocation);
|
||||
} catch (e) {
|
||||
store.dispatch('notification/error', `Could not open file ${syncLocation.path}.`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create the file
|
||||
let name = syncLocation.path;
|
||||
const slashPos = name.lastIndexOf('/');
|
||||
if (slashPos > -1 && slashPos < name.length - 1) {
|
||||
@ -89,7 +79,7 @@ export default new Provider({
|
||||
if (dotPos > 0 && slashPos < name.length) {
|
||||
name = name.slice(0, dotPos);
|
||||
}
|
||||
return fileSvc.createFile({
|
||||
const item = await fileSvc.createFile({
|
||||
name,
|
||||
parentId: store.getters['file/current'].parentId,
|
||||
text: content.text,
|
||||
@ -97,8 +87,6 @@ export default new Provider({
|
||||
discussions: content.discussions,
|
||||
comments: content.comments,
|
||||
}, true);
|
||||
})
|
||||
.then((item) => {
|
||||
store.commit('file/setCurrentId', item.id);
|
||||
store.commit('syncLocation/setItem', {
|
||||
...syncLocation,
|
||||
@ -106,11 +94,7 @@ export default new Provider({
|
||||
fileId: item.id,
|
||||
});
|
||||
store.dispatch('notification/info', `${store.getters['file/current'].name} was imported from GitHub.`);
|
||||
})
|
||||
.catch(() => {
|
||||
store.dispatch('notification/error', `Could not open file ${syncLocation.path}.`);
|
||||
});
|
||||
});
|
||||
}
|
||||
},
|
||||
parseRepoUrl(url) {
|
||||
const parsedRepo = url && url.match(/([^/:]+)\/([^/]+?)(?:\.git|\/)?$/);
|
||||
|
@ -4,15 +4,8 @@ import Provider from './common/Provider';
|
||||
import utils from '../utils';
|
||||
import userSvc from '../userSvc';
|
||||
|
||||
const getSyncData = (fileId) => {
|
||||
const syncData = store.getters['data/syncDataByItemId'][`${fileId}/content`];
|
||||
return syncData
|
||||
? Promise.resolve(syncData)
|
||||
: Promise.reject(); // No need for a proper error message.
|
||||
};
|
||||
|
||||
const getAbsolutePath = syncData =>
|
||||
(store.getters['workspace/currentWorkspace'].path || '') + syncData.id;
|
||||
`${store.getters['workspace/currentWorkspace'].path || ''}${syncData.id}`;
|
||||
|
||||
const getWorkspaceWithOwner = () => {
|
||||
const workspace = store.getters['workspace/currentWorkspace'];
|
||||
@ -38,7 +31,7 @@ export default new Provider({
|
||||
getToken() {
|
||||
return store.getters['workspace/syncToken'];
|
||||
},
|
||||
initWorkspace() {
|
||||
async initWorkspace() {
|
||||
const [owner, repo] = (utils.queryParams.repo || '').split('/');
|
||||
const { branch } = utils.queryParams;
|
||||
const workspaceParams = {
|
||||
@ -55,23 +48,17 @@ export default new Provider({
|
||||
const workspaceId = utils.makeWorkspaceId(workspaceParams);
|
||||
let workspace = store.getters['data/sanitizedWorkspaces'][workspaceId];
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
// See if we already have a token
|
||||
let token;
|
||||
if (workspace) {
|
||||
// Token sub is in the workspace
|
||||
const token = store.getters['data/githubTokens'][workspace.sub];
|
||||
if (token) {
|
||||
return token;
|
||||
token = store.getters['data/githubTokens'][workspace.sub];
|
||||
}
|
||||
if (!token) {
|
||||
await store.dispatch('modal/open', { type: 'githubAccount' });
|
||||
token = await githubHelper.addAccount(store.getters['data/localSettings'].githubRepoFullAccess);
|
||||
}
|
||||
// If no token has been found, popup an authorize window and get one
|
||||
return store.dispatch('modal/open', {
|
||||
type: 'githubAccount',
|
||||
onResolve: () => githubHelper.addAccount(store.getters['data/localSettings'].githubRepoFullAccess),
|
||||
});
|
||||
})
|
||||
.then((token) => {
|
||||
|
||||
if (!workspace) {
|
||||
const pathEntries = (path || '').split('/');
|
||||
const name = pathEntries[pathEntries.length - 2] || repo; // path ends with `/`
|
||||
@ -82,6 +69,7 @@ export default new Provider({
|
||||
name,
|
||||
};
|
||||
}
|
||||
|
||||
// Fix the URL hash
|
||||
utils.setQueryParams(workspaceParams);
|
||||
if (workspace.url !== window.location.href) {
|
||||
@ -93,13 +81,16 @@ export default new Provider({
|
||||
});
|
||||
}
|
||||
return store.getters['data/sanitizedWorkspaces'][workspaceId];
|
||||
});
|
||||
},
|
||||
getChanges() {
|
||||
async getChanges() {
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
const { owner, repo, branch } = getWorkspaceWithOwner();
|
||||
return githubHelper.getHeadTree(syncToken, owner, repo, branch)
|
||||
.then((tree) => {
|
||||
const tree = await githubHelper.getTree({
|
||||
token: syncToken,
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
});
|
||||
const workspacePath = store.getters['workspace/currentWorkspace'].path || '';
|
||||
const syncDataByPath = store.getters['data/syncData'];
|
||||
const syncDataByItemId = store.getters['data/syncDataByItemId'];
|
||||
@ -296,14 +287,10 @@ export default new Provider({
|
||||
});
|
||||
|
||||
return changes;
|
||||
});
|
||||
},
|
||||
saveSimpleItem(item) {
|
||||
async saveSimpleItem(item) {
|
||||
const path = store.getters.itemPaths[item.fileId || item.id];
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
const { owner, repo, branch } = getWorkspaceWithOwner();
|
||||
const syncData = {
|
||||
itemId: item.id,
|
||||
type: item.type,
|
||||
@ -312,10 +299,10 @@ export default new Provider({
|
||||
|
||||
if (item.type === 'file') {
|
||||
syncData.id = `${path}.md`;
|
||||
} else if (item.type === 'folder') {
|
||||
syncData.id = path;
|
||||
return syncData;
|
||||
}
|
||||
if (syncData.id) {
|
||||
if (item.type === 'folder') {
|
||||
syncData.id = path;
|
||||
return syncData;
|
||||
}
|
||||
|
||||
@ -328,42 +315,38 @@ export default new Provider({
|
||||
}), true);
|
||||
const extension = item.type === 'syncLocation' ? 'sync' : 'publish';
|
||||
syncData.id = `${path}.${data}.${extension}`;
|
||||
return githubHelper.uploadFile(
|
||||
syncToken,
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
getAbsolutePath(syncData),
|
||||
'',
|
||||
treeShaMap[syncData.id],
|
||||
).then(() => syncData);
|
||||
await githubHelper.uploadFile({
|
||||
...getWorkspaceWithOwner(),
|
||||
token: syncToken,
|
||||
path: getAbsolutePath(syncData),
|
||||
content: '',
|
||||
sha: treeShaMap[syncData.id],
|
||||
});
|
||||
return syncData;
|
||||
},
|
||||
removeItem(syncData) {
|
||||
async removeItem(syncData) {
|
||||
// Ignore content deletion
|
||||
if (syncData.type === 'content') {
|
||||
return Promise.resolve();
|
||||
}
|
||||
if (syncData.type !== 'content') {
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
const { owner, repo, branch } = getWorkspaceWithOwner();
|
||||
return githubHelper.removeFile(
|
||||
syncToken,
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
getAbsolutePath(syncData),
|
||||
treeShaMap[syncData.id],
|
||||
);
|
||||
await githubHelper.removeFile({
|
||||
...getWorkspaceWithOwner(),
|
||||
token: syncToken,
|
||||
path: getAbsolutePath(syncData),
|
||||
sha: treeShaMap[syncData.id],
|
||||
});
|
||||
}
|
||||
},
|
||||
downloadContent(token, syncLocation) {
|
||||
async downloadContent(token, syncLocation) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][syncLocation.fileId];
|
||||
const contentSyncData = store.getters['data/syncDataByItemId'][`${syncLocation.fileId}/content`];
|
||||
if (!syncData || !contentSyncData) {
|
||||
return Promise.resolve();
|
||||
return null;
|
||||
}
|
||||
const { owner, repo, branch } = getWorkspaceWithOwner();
|
||||
return githubHelper.downloadFile(token, owner, repo, branch, getAbsolutePath(syncData))
|
||||
.then(({ sha, content }) => {
|
||||
const { sha, content } = await githubHelper.downloadFile({
|
||||
...getWorkspaceWithOwner(),
|
||||
token,
|
||||
path: getAbsolutePath(syncData),
|
||||
});
|
||||
const item = Provider.parseContent(content, `${syncLocation.fileId}/content`);
|
||||
if (item.hash !== contentSyncData.hash) {
|
||||
store.dispatch('data/patchSyncData', {
|
||||
@ -375,17 +358,18 @@ export default new Provider({
|
||||
});
|
||||
}
|
||||
return item;
|
||||
});
|
||||
},
|
||||
downloadData(dataId) {
|
||||
async downloadData(dataId) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][dataId];
|
||||
if (!syncData) {
|
||||
return Promise.resolve();
|
||||
return null;
|
||||
}
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
const { owner, repo, branch } = getWorkspaceWithOwner();
|
||||
return githubHelper.downloadFile(syncToken, owner, repo, branch, getAbsolutePath(syncData))
|
||||
.then(({ sha, content }) => {
|
||||
const { sha, content } = await githubHelper.downloadFile({
|
||||
...getWorkspaceWithOwner(),
|
||||
token: syncToken,
|
||||
path: getAbsolutePath(syncData),
|
||||
});
|
||||
const item = JSON.parse(content);
|
||||
if (item.hash !== syncData.hash) {
|
||||
store.dispatch('data/patchSyncData', {
|
||||
@ -397,26 +381,20 @@ export default new Provider({
|
||||
});
|
||||
}
|
||||
return item;
|
||||
});
|
||||
},
|
||||
uploadContent(token, content, syncLocation) {
|
||||
async uploadContent(token, content, syncLocation) {
|
||||
const contentSyncData = store.getters['data/syncDataByItemId'][`${syncLocation.fileId}/content`];
|
||||
if (contentSyncData && contentSyncData.hash === content.hash) {
|
||||
return Promise.resolve(syncLocation);
|
||||
}
|
||||
const syncData = store.getters['data/syncDataByItemId'][syncLocation.fileId];
|
||||
const { owner, repo, branch } = getWorkspaceWithOwner();
|
||||
return githubHelper.uploadFile(
|
||||
if (!contentSyncData || contentSyncData.hash !== content.hash) {
|
||||
const path = `${store.getters.itemPaths[syncLocation.fileId]}.md`;
|
||||
const absolutePath = `${store.getters['workspace/currentWorkspace'].path || ''}${path}`;
|
||||
const id = `/${path}`;
|
||||
const res = await githubHelper.uploadFile({
|
||||
...getWorkspaceWithOwner(),
|
||||
token,
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
getAbsolutePath(syncData),
|
||||
Provider.serializeContent(content),
|
||||
treeShaMap[syncData.id],
|
||||
)
|
||||
.then((res) => {
|
||||
const id = `/${syncData.id}`;
|
||||
path: absolutePath,
|
||||
content: Provider.serializeContent(content),
|
||||
sha: treeShaMap[id],
|
||||
});
|
||||
store.dispatch('data/patchSyncData', {
|
||||
[id]: {
|
||||
// Build sync data
|
||||
@ -427,14 +405,12 @@ export default new Provider({
|
||||
sha: res.content.sha,
|
||||
},
|
||||
});
|
||||
return syncLocation;
|
||||
});
|
||||
},
|
||||
uploadData(item) {
|
||||
const oldSyncData = store.getters['data/syncDataByItemId'][item.id];
|
||||
if (oldSyncData && oldSyncData.hash === item.hash) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
return syncLocation;
|
||||
},
|
||||
async uploadData(item) {
|
||||
const oldSyncData = store.getters['data/syncDataByItemId'][item.id];
|
||||
if (!oldSyncData || oldSyncData.hash !== item.hash) {
|
||||
const syncData = {
|
||||
id: `.stackedit-data/${item.id}.json`,
|
||||
itemId: item.id,
|
||||
@ -442,22 +418,20 @@ export default new Provider({
|
||||
hash: item.hash,
|
||||
};
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
const { owner, repo, branch } = getWorkspaceWithOwner();
|
||||
return githubHelper.uploadFile(
|
||||
syncToken,
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
getAbsolutePath(syncData),
|
||||
JSON.stringify(item),
|
||||
oldSyncData && oldSyncData.sha,
|
||||
)
|
||||
.then(res => store.dispatch('data/patchSyncData', {
|
||||
const res = await githubHelper.uploadFile({
|
||||
...getWorkspaceWithOwner(),
|
||||
token: syncToken,
|
||||
path: getAbsolutePath(syncData),
|
||||
content: JSON.stringify(item),
|
||||
sha: oldSyncData && oldSyncData.sha,
|
||||
});
|
||||
store.dispatch('data/patchSyncData', {
|
||||
[syncData.id]: {
|
||||
...syncData,
|
||||
sha: res.content.sha,
|
||||
},
|
||||
}));
|
||||
});
|
||||
}
|
||||
},
|
||||
onSyncEnd() {
|
||||
// Clean up
|
||||
@ -468,34 +442,48 @@ export default new Provider({
|
||||
treeSyncLocationMap = null;
|
||||
treePublishLocationMap = null;
|
||||
},
|
||||
listRevisions(token, fileId) {
|
||||
async listRevisions(token, fileId) {
|
||||
const { owner, repo, branch } = getWorkspaceWithOwner();
|
||||
return getSyncData(fileId)
|
||||
.then(syncData => githubHelper.getCommits(token, owner, repo, branch, syncData.id))
|
||||
.then(entries => entries.map((entry) => {
|
||||
const syncData = Provider.getContentSyncData(fileId);
|
||||
const entries = await githubHelper.getCommits({
|
||||
token,
|
||||
owner,
|
||||
repo,
|
||||
sha: branch,
|
||||
path: syncData.id,
|
||||
});
|
||||
return entries.map(({
|
||||
author,
|
||||
committer,
|
||||
commit,
|
||||
sha,
|
||||
}) => {
|
||||
let user;
|
||||
if (entry.author && entry.author.login) {
|
||||
user = entry.author;
|
||||
} else if (entry.committer && entry.committer.login) {
|
||||
user = entry.committer;
|
||||
if (author && author.login) {
|
||||
user = author;
|
||||
} else if (committer && committer.login) {
|
||||
user = committer;
|
||||
}
|
||||
const sub = `gh:${user.id}`;
|
||||
userSvc.addInfo({ id: sub, name: user.login, imageUrl: user.avatar_url });
|
||||
const date = (entry.commit.author && entry.commit.author.date)
|
||||
|| (entry.commit.committer && entry.commit.committer.date);
|
||||
const date = (commit.author && commit.author.date)
|
||||
|| (commit.committer && commit.committer.date);
|
||||
return {
|
||||
id: entry.sha,
|
||||
id: sha,
|
||||
sub,
|
||||
created: date ? new Date(date).getTime() : 1,
|
||||
};
|
||||
})
|
||||
.sort((revision1, revision2) => revision2.created - revision1.created));
|
||||
.sort((revision1, revision2) => revision2.created - revision1.created);
|
||||
},
|
||||
getRevisionContent(token, fileId, revisionId) {
|
||||
const { owner, repo } = getWorkspaceWithOwner();
|
||||
return getSyncData(fileId)
|
||||
.then(syncData => githubHelper
|
||||
.downloadFile(token, owner, repo, revisionId, getAbsolutePath(syncData)))
|
||||
.then(({ content }) => Provider.parseContent(content, `${fileId}/content`));
|
||||
async getRevisionContent(token, fileId, revisionId) {
|
||||
const syncData = Provider.getContentSyncData(fileId);
|
||||
const { content } = await githubHelper.downloadFile({
|
||||
...getWorkspaceWithOwner(),
|
||||
token,
|
||||
branch: revisionId,
|
||||
path: getAbsolutePath(syncData),
|
||||
});
|
||||
return Provider.parseContent(content, `${fileId}/content`);
|
||||
},
|
||||
});
|
||||
|
@ -10,21 +10,17 @@ export default new Provider({
|
||||
getToken() {
|
||||
return store.getters['workspace/syncToken'];
|
||||
},
|
||||
initWorkspace() {
|
||||
async initWorkspace() {
|
||||
// Nothing much to do since the main workspace isn't necessarily synchronized
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
// Remove the URL hash
|
||||
utils.setQueryParams();
|
||||
// Return the main workspace
|
||||
return store.getters['data/workspaces'].main;
|
||||
});
|
||||
},
|
||||
getChanges() {
|
||||
async getChanges() {
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
const startPageToken = store.getters['data/localSettings'].syncStartPageToken;
|
||||
return googleHelper.getChanges(syncToken, startPageToken, true)
|
||||
.then((result) => {
|
||||
const result = await googleHelper.getChanges(syncToken, startPageToken, true);
|
||||
const changes = result.changes.filter((change) => {
|
||||
if (change.file) {
|
||||
// Parse item from file name
|
||||
@ -46,29 +42,27 @@ export default new Provider({
|
||||
});
|
||||
syncStartPageToken = result.startPageToken;
|
||||
return changes;
|
||||
});
|
||||
},
|
||||
onChangesApplied() {
|
||||
store.dispatch('data/patchLocalSettings', {
|
||||
syncStartPageToken,
|
||||
});
|
||||
},
|
||||
saveSimpleItem(item, syncData, ifNotTooLate) {
|
||||
async saveSimpleItem(item, syncData, ifNotTooLate) {
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
return googleHelper.uploadAppDataFile(
|
||||
syncToken,
|
||||
JSON.stringify(item),
|
||||
undefined,
|
||||
syncData && syncData.id,
|
||||
const file = await googleHelper.uploadAppDataFile({
|
||||
token: syncToken,
|
||||
name: JSON.stringify(item),
|
||||
fileId: syncData && syncData.id,
|
||||
ifNotTooLate,
|
||||
)
|
||||
.then(file => ({
|
||||
});
|
||||
// Build sync data
|
||||
return {
|
||||
id: file.id,
|
||||
itemId: item.id,
|
||||
type: item.type,
|
||||
hash: item.hash,
|
||||
}));
|
||||
};
|
||||
},
|
||||
removeItem(syncData, ifNotTooLate) {
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
@ -77,14 +71,13 @@ export default new Provider({
|
||||
downloadContent(token, syncLocation) {
|
||||
return this.downloadData(`${syncLocation.fileId}/content`);
|
||||
},
|
||||
downloadData(dataId) {
|
||||
async downloadData(dataId) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][dataId];
|
||||
if (!syncData) {
|
||||
return Promise.resolve();
|
||||
return null;
|
||||
}
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
return googleHelper.downloadAppDataFile(syncToken, syncData.id)
|
||||
.then((data) => {
|
||||
const data = await googleHelper.downloadAppDataFile(syncToken, syncData.id);
|
||||
const item = utils.addItemHash(JSON.parse(data));
|
||||
if (item.hash !== syncData.hash) {
|
||||
store.dispatch('data/patchSyncData', {
|
||||
@ -95,30 +88,27 @@ export default new Provider({
|
||||
});
|
||||
}
|
||||
return item;
|
||||
});
|
||||
},
|
||||
uploadContent(token, content, syncLocation, ifNotTooLate) {
|
||||
return this.uploadData(content, ifNotTooLate)
|
||||
.then(() => syncLocation);
|
||||
async uploadContent(token, content, syncLocation, ifNotTooLate) {
|
||||
await this.uploadData(content, ifNotTooLate);
|
||||
return syncLocation;
|
||||
},
|
||||
uploadData(item, ifNotTooLate) {
|
||||
async uploadData(item, ifNotTooLate) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][item.id];
|
||||
if (syncData && syncData.hash === item.hash) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
if (!syncData || syncData.hash !== item.hash) {
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
return googleHelper.uploadAppDataFile(
|
||||
syncToken,
|
||||
JSON.stringify({
|
||||
const file = await googleHelper.uploadAppDataFile({
|
||||
token: syncToken,
|
||||
name: JSON.stringify({
|
||||
id: item.id,
|
||||
type: item.type,
|
||||
hash: item.hash,
|
||||
}),
|
||||
JSON.stringify(item),
|
||||
syncData && syncData.id,
|
||||
media: JSON.stringify(item),
|
||||
fileId: syncData && syncData.id,
|
||||
ifNotTooLate,
|
||||
)
|
||||
.then(file => store.dispatch('data/patchSyncData', {
|
||||
});
|
||||
store.dispatch('data/patchSyncData', {
|
||||
[file.id]: {
|
||||
// Build sync data
|
||||
id: file.id,
|
||||
@ -126,27 +116,22 @@ export default new Provider({
|
||||
type: item.type,
|
||||
hash: item.hash,
|
||||
},
|
||||
}));
|
||||
},
|
||||
listRevisions(token, fileId) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][`${fileId}/content`];
|
||||
if (!syncData) {
|
||||
return Promise.reject(); // No need for a proper error message.
|
||||
});
|
||||
}
|
||||
return googleHelper.getAppDataFileRevisions(token, syncData.id)
|
||||
.then(revisions => revisions.map(revision => ({
|
||||
},
|
||||
async listRevisions(token, fileId) {
|
||||
const syncData = Provider.getContentSyncData(fileId);
|
||||
const revisions = await googleHelper.getAppDataFileRevisions(token, syncData.id);
|
||||
return revisions.map(revision => ({
|
||||
id: revision.id,
|
||||
sub: revision.lastModifyingUser && `go:${revision.lastModifyingUser.permissionId}`,
|
||||
created: new Date(revision.modifiedTime).getTime(),
|
||||
}))
|
||||
.sort((revision1, revision2) => revision2.created - revision1.created));
|
||||
.sort((revision1, revision2) => revision2.created - revision1.created);
|
||||
},
|
||||
getRevisionContent(token, fileId, revisionId) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][`${fileId}/content`];
|
||||
if (!syncData) {
|
||||
return Promise.reject(); // No need for a proper error message.
|
||||
}
|
||||
return googleHelper.downloadAppDataFileRevision(token, syncData.id, revisionId)
|
||||
.then(content => JSON.parse(content));
|
||||
async getRevisionContent(token, fileId, revisionId) {
|
||||
const syncData = Provider.getContentSyncData(fileId);
|
||||
const content = await googleHelper.downloadAppDataFileRevision(token, syncData.id, revisionId);
|
||||
return JSON.parse(content);
|
||||
},
|
||||
});
|
||||
|
@ -17,22 +17,20 @@ export default new Provider({
|
||||
const token = this.getToken(location);
|
||||
return `${location.driveFileId} — ${token.name}`;
|
||||
},
|
||||
initAction() {
|
||||
async initAction() {
|
||||
const state = googleHelper.driveState || {};
|
||||
return state.userId && Promise.resolve()
|
||||
.then(() => {
|
||||
if (state.userId) {
|
||||
// Try to find the token corresponding to the user ID
|
||||
const token = store.getters['data/googleTokens'][state.userId];
|
||||
let token = store.getters['data/googleTokens'][state.userId];
|
||||
// If not found or not enough permission, popup an OAuth2 window
|
||||
return token && token.isDrive ? token : store.dispatch('modal/open', {
|
||||
type: 'googleDriveAccount',
|
||||
onResolve: () => googleHelper.addDriveAccount(
|
||||
if (!token || !token.isDrive) {
|
||||
await store.dispatch('modal/open', { type: 'googleDriveAccount' });
|
||||
token = await googleHelper.addDriveAccount(
|
||||
!store.getters['data/localSettings'].googleDriveRestrictedAccess,
|
||||
state.userId,
|
||||
),
|
||||
});
|
||||
})
|
||||
.then((token) => {
|
||||
);
|
||||
}
|
||||
|
||||
const openWorkspaceIfExists = (file) => {
|
||||
const folderId = file
|
||||
&& file.appProperties
|
||||
@ -56,131 +54,121 @@ export default new Provider({
|
||||
case 'create':
|
||||
default:
|
||||
// See if folder is part of a workspace we can open
|
||||
return googleHelper.getFile(token, state.folderId)
|
||||
.then((folder) => {
|
||||
try {
|
||||
const folder = await googleHelper.getFile(token, state.folderId);
|
||||
folder.appProperties = folder.appProperties || {};
|
||||
googleHelper.driveActionFolder = folder;
|
||||
openWorkspaceIfExists(folder);
|
||||
}, (err) => {
|
||||
} catch (err) {
|
||||
if (!err || err.status !== 404) {
|
||||
throw err;
|
||||
}
|
||||
// We received an HTTP 404 meaning we have no permission to read the folder
|
||||
googleHelper.driveActionFolder = { id: state.folderId };
|
||||
});
|
||||
}
|
||||
break;
|
||||
|
||||
case 'open': {
|
||||
const getOneFile = (ids = state.ids || []) => {
|
||||
const id = ids.shift();
|
||||
return id && googleHelper.getFile(token, id)
|
||||
.then((file) => {
|
||||
await utils.awaitSequence(state.ids || [], async (id) => {
|
||||
const file = await googleHelper.getFile(token, id);
|
||||
file.appProperties = file.appProperties || {};
|
||||
googleHelper.driveActionFiles.push(file);
|
||||
return getOneFile(ids);
|
||||
});
|
||||
};
|
||||
|
||||
return getOneFile()
|
||||
// Check if first file is part of a workspace
|
||||
.then(() => openWorkspaceIfExists(googleHelper.driveActionFiles[0]));
|
||||
openWorkspaceIfExists(googleHelper.driveActionFiles[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
performAction() {
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
async performAction() {
|
||||
const state = googleHelper.driveState || {};
|
||||
const token = store.getters['data/googleTokens'][state.userId];
|
||||
switch (token && state.action) {
|
||||
case 'create':
|
||||
return fileSvc.createFile({}, true)
|
||||
.then((file) => {
|
||||
case 'create': {
|
||||
const file = await fileSvc.createFile({}, true);
|
||||
store.commit('file/setCurrentId', file.id);
|
||||
// Return a new syncLocation
|
||||
return this.makeLocation(token, null, googleHelper.driveActionFolder.id);
|
||||
});
|
||||
}
|
||||
case 'open':
|
||||
return store.dispatch(
|
||||
store.dispatch(
|
||||
'queue/enqueue',
|
||||
() => this.openFiles(token, googleHelper.driveActionFiles),
|
||||
);
|
||||
return null;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
});
|
||||
},
|
||||
downloadContent(token, syncLocation) {
|
||||
return googleHelper.downloadFile(token, syncLocation.driveFileId)
|
||||
.then(content => Provider.parseContent(content, `${syncLocation.fileId}/content`));
|
||||
async downloadContent(token, syncLocation) {
|
||||
const content = await googleHelper.downloadFile(token, syncLocation.driveFileId);
|
||||
return Provider.parseContent(content, `${syncLocation.fileId}/content`);
|
||||
},
|
||||
uploadContent(token, content, syncLocation, ifNotTooLate) {
|
||||
async uploadContent(token, content, syncLocation, ifNotTooLate) {
|
||||
const file = store.state.file.itemMap[syncLocation.fileId];
|
||||
const name = utils.sanitizeName(file && file.name);
|
||||
const parents = [];
|
||||
if (syncLocation.driveParentId) {
|
||||
parents.push(syncLocation.driveParentId);
|
||||
}
|
||||
return googleHelper.uploadFile(
|
||||
const driveFile = await googleHelper.uploadFile({
|
||||
token,
|
||||
name,
|
||||
parents,
|
||||
undefined,
|
||||
Provider.serializeContent(content),
|
||||
undefined,
|
||||
syncLocation.driveFileId,
|
||||
undefined,
|
||||
media: Provider.serializeContent(content),
|
||||
fileId: syncLocation.driveFileId,
|
||||
ifNotTooLate,
|
||||
)
|
||||
.then(driveFile => ({
|
||||
});
|
||||
return {
|
||||
...syncLocation,
|
||||
driveFileId: driveFile.id,
|
||||
}));
|
||||
};
|
||||
},
|
||||
publish(token, html, metadata, publishLocation) {
|
||||
return googleHelper.uploadFile(
|
||||
async publish(token, html, metadata, publishLocation) {
|
||||
const driveFile = await googleHelper.uploadFile({
|
||||
token,
|
||||
metadata.title,
|
||||
[],
|
||||
undefined,
|
||||
html,
|
||||
publishLocation.templateId ? 'text/html' : undefined,
|
||||
publishLocation.driveFileId,
|
||||
)
|
||||
.then(driveFile => ({
|
||||
name: metadata.title,
|
||||
parents: [],
|
||||
media: html,
|
||||
mediaType: publishLocation.templateId ? 'text/html' : undefined,
|
||||
fileId: publishLocation.driveFileId,
|
||||
});
|
||||
return {
|
||||
...publishLocation,
|
||||
driveFileId: driveFile.id,
|
||||
}));
|
||||
};
|
||||
},
|
||||
openFiles(token, driveFiles) {
|
||||
const openOneFile = () => {
|
||||
const driveFile = driveFiles.shift();
|
||||
if (!driveFile) {
|
||||
return null;
|
||||
}
|
||||
if (Provider.openFileWithLocation(store.getters['syncLocation/items'], {
|
||||
async openFiles(token, driveFiles) {
|
||||
return utils.awaitSequence(driveFiles, async (driveFile) => {
|
||||
// Check if the file exists and open it
|
||||
if (!Provider.openFileWithLocation(store.getters['syncLocation/items'], {
|
||||
providerId: this.id,
|
||||
driveFileId: driveFile.id,
|
||||
})) {
|
||||
// File exists and has just been opened. Next...
|
||||
return openOneFile();
|
||||
}
|
||||
// Download content from Google Drive and create the file
|
||||
// Download content from Google Drive
|
||||
const syncLocation = {
|
||||
driveFileId: driveFile.id,
|
||||
providerId: this.id,
|
||||
sub: token.sub,
|
||||
};
|
||||
return this.downloadContent(token, syncLocation)
|
||||
.then(content => fileSvc.createFile({
|
||||
let content;
|
||||
try {
|
||||
content = await this.downloadContent(token, syncLocation);
|
||||
} catch (e) {
|
||||
store.dispatch('notification/error', `Could not open file ${driveFile.id}.`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create the file
|
||||
const item = await fileSvc.createFile({
|
||||
name: driveFile.name,
|
||||
parentId: store.getters['file/current'].parentId,
|
||||
text: content.text,
|
||||
properties: content.properties,
|
||||
discussions: content.discussions,
|
||||
comments: content.comments,
|
||||
}, true))
|
||||
.then((item) => {
|
||||
}, true);
|
||||
store.commit('file/setCurrentId', item.id);
|
||||
store.commit('syncLocation/setItem', {
|
||||
...syncLocation,
|
||||
@ -188,13 +176,8 @@ export default new Provider({
|
||||
fileId: item.id,
|
||||
});
|
||||
store.dispatch('notification/info', `${store.getters['file/current'].name} was imported from Google Drive.`);
|
||||
})
|
||||
.catch(() => {
|
||||
store.dispatch('notification/error', `Could not open file ${driveFile.id}.`);
|
||||
})
|
||||
.then(() => openOneFile());
|
||||
};
|
||||
return Promise.resolve(openOneFile());
|
||||
}
|
||||
});
|
||||
},
|
||||
makeLocation(token, fileId, folderId) {
|
||||
const location = {
|
||||
|
@ -4,13 +4,6 @@ import Provider from './common/Provider';
|
||||
import utils from '../utils';
|
||||
import fileSvc from '../fileSvc';
|
||||
|
||||
const getSyncData = (fileId) => {
|
||||
const syncData = store.getters['data/syncDataByItemId'][`${fileId}/content`];
|
||||
return syncData
|
||||
? Promise.resolve(syncData)
|
||||
: Promise.reject(); // No need for a proper error message.
|
||||
};
|
||||
|
||||
let fileIdToOpen;
|
||||
let syncStartPageToken;
|
||||
|
||||
@ -19,7 +12,7 @@ export default new Provider({
|
||||
getToken() {
|
||||
return store.getters['workspace/syncToken'];
|
||||
},
|
||||
initWorkspace() {
|
||||
async initWorkspace() {
|
||||
const makeWorkspaceParams = folderId => ({
|
||||
providerId: this.id,
|
||||
folderId,
|
||||
@ -31,67 +24,48 @@ export default new Provider({
|
||||
const getWorkspace = folderId =>
|
||||
store.getters['data/sanitizedWorkspaces'][makeWorkspaceId(folderId)];
|
||||
|
||||
const initFolder = (token, folder) => Promise.resolve({
|
||||
const initFolder = async (token, folder) => {
|
||||
const appProperties = {
|
||||
folderId: folder.id,
|
||||
dataFolderId: folder.appProperties.dataFolderId,
|
||||
trashFolderId: folder.appProperties.trashFolderId,
|
||||
})
|
||||
.then((properties) => {
|
||||
};
|
||||
|
||||
// Make sure data folder exists
|
||||
if (properties.dataFolderId) {
|
||||
return properties;
|
||||
}
|
||||
return googleHelper.uploadFile(
|
||||
if (!appProperties.dataFolderId) {
|
||||
appProperties.dataFolderId = (await googleHelper.uploadFile({
|
||||
token,
|
||||
'.stackedit-data',
|
||||
[folder.id],
|
||||
{ folderId: folder.id },
|
||||
undefined,
|
||||
googleHelper.folderMimeType,
|
||||
)
|
||||
.then(dataFolder => ({
|
||||
...properties,
|
||||
dataFolderId: dataFolder.id,
|
||||
}));
|
||||
})
|
||||
.then((properties) => {
|
||||
name: '.stackedit-data',
|
||||
parents: [folder.id],
|
||||
appProperties: { folderId: folder.id },
|
||||
mediaType: googleHelper.folderMimeType,
|
||||
})).id;
|
||||
}
|
||||
|
||||
// Make sure trash folder exists
|
||||
if (properties.trashFolderId) {
|
||||
return properties;
|
||||
}
|
||||
return googleHelper.uploadFile(
|
||||
if (!appProperties.trashFolderId) {
|
||||
appProperties.trashFolderId = (await googleHelper.uploadFile({
|
||||
token,
|
||||
'.stackedit-trash',
|
||||
[folder.id],
|
||||
{ folderId: folder.id },
|
||||
undefined,
|
||||
googleHelper.folderMimeType,
|
||||
)
|
||||
.then(trashFolder => ({
|
||||
...properties,
|
||||
trashFolderId: trashFolder.id,
|
||||
}));
|
||||
})
|
||||
.then((properties) => {
|
||||
name: '.stackedit-trash',
|
||||
parents: [folder.id],
|
||||
appProperties: { folderId: folder.id },
|
||||
mediaType: googleHelper.folderMimeType,
|
||||
})).id;
|
||||
}
|
||||
|
||||
// Update workspace if some properties are missing
|
||||
if (properties.folderId === folder.appProperties.folderId
|
||||
&& properties.dataFolderId === folder.appProperties.dataFolderId
|
||||
&& properties.trashFolderId === folder.appProperties.trashFolderId
|
||||
if (appProperties.folderId !== folder.appProperties.folderId
|
||||
|| appProperties.dataFolderId !== folder.appProperties.dataFolderId
|
||||
|| appProperties.trashFolderId !== folder.appProperties.trashFolderId
|
||||
) {
|
||||
return properties;
|
||||
}
|
||||
return googleHelper.uploadFile(
|
||||
await googleHelper.uploadFile({
|
||||
token,
|
||||
undefined,
|
||||
undefined,
|
||||
properties,
|
||||
undefined,
|
||||
googleHelper.folderMimeType,
|
||||
folder.id,
|
||||
)
|
||||
.then(() => properties);
|
||||
})
|
||||
.then((properties) => {
|
||||
appProperties,
|
||||
mediaType: googleHelper.folderMimeType,
|
||||
fileId: folder.id,
|
||||
});
|
||||
}
|
||||
|
||||
// Update workspace in the store
|
||||
const workspaceId = makeWorkspaceId(folder.id);
|
||||
store.dispatch('data/patchWorkspaces', {
|
||||
@ -103,58 +77,56 @@ export default new Provider({
|
||||
url: window.location.href,
|
||||
folderId: folder.id,
|
||||
teamDriveId: folder.teamDriveId,
|
||||
dataFolderId: properties.dataFolderId,
|
||||
trashFolderId: properties.trashFolderId,
|
||||
dataFolderId: appProperties.dataFolderId,
|
||||
trashFolderId: appProperties.trashFolderId,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
// Return the workspace
|
||||
return store.getters['data/sanitizedWorkspaces'][workspaceId];
|
||||
});
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
const workspace = getWorkspace(utils.queryParams.folderId);
|
||||
// See if we already have a token
|
||||
const googleTokens = store.getters['data/googleTokens'];
|
||||
// Token sub is in the workspace or in the url if workspace is about to be created
|
||||
const token = workspace ? googleTokens[workspace.sub] : googleTokens[utils.queryParams.sub];
|
||||
if (token && token.isDrive && token.driveFullAccess) {
|
||||
return token;
|
||||
}
|
||||
const { sub } = getWorkspace(utils.queryParams.folderId) || utils.queryParams;
|
||||
// See if we already have a token
|
||||
let token = store.getters['data/googleTokens'][sub];
|
||||
// If no token has been found, popup an authorize window and get one
|
||||
return store.dispatch('modal/workspaceGoogleRedirection', {
|
||||
onResolve: () => googleHelper.addDriveAccount(true, utils.queryParams.sub),
|
||||
});
|
||||
})
|
||||
.then(token => Promise.resolve()
|
||||
if (!token || !token.isDrive || !token.driveFullAccess) {
|
||||
await store.dispatch('modal/workspaceGoogleRedirection');
|
||||
token = await googleHelper.addDriveAccount(true, utils.queryParams.sub);
|
||||
}
|
||||
|
||||
let { folderId } = utils.queryParams;
|
||||
// If no folderId is provided, create one
|
||||
.then(() => utils.queryParams.folderId || googleHelper.uploadFile(
|
||||
if (!folderId) {
|
||||
const folder = await googleHelper.uploadFile({
|
||||
token,
|
||||
'StackEdit workspace',
|
||||
[],
|
||||
undefined,
|
||||
undefined,
|
||||
googleHelper.folderMimeType,
|
||||
)
|
||||
.then(folder => initFolder(token, {
|
||||
name: 'StackEdit workspace',
|
||||
parents: [],
|
||||
mediaType: googleHelper.folderMimeType,
|
||||
});
|
||||
await initFolder(token, {
|
||||
...folder,
|
||||
appProperties: {},
|
||||
})
|
||||
.then(() => folder.id)))
|
||||
// If workspace does not exist, initialize one
|
||||
.then(folderId => getWorkspace(folderId) || googleHelper.getFile(token, folderId)
|
||||
.then((folder) => {
|
||||
});
|
||||
folderId = folder.id;
|
||||
}
|
||||
|
||||
// Init workspace
|
||||
let workspace = getWorkspace(folderId);
|
||||
if (!workspace) {
|
||||
let folder;
|
||||
try {
|
||||
folder = googleHelper.getFile(token, folderId);
|
||||
} catch (err) {
|
||||
throw new Error(`Folder ${folderId} is not accessible. Make sure you have the right permissions.`);
|
||||
}
|
||||
folder.appProperties = folder.appProperties || {};
|
||||
const folderIdProperty = folder.appProperties.folderId;
|
||||
if (folderIdProperty && folderIdProperty !== folderId) {
|
||||
throw new Error(`Folder ${folderId} is part of another workspace.`);
|
||||
}
|
||||
return initFolder(token, folder);
|
||||
}, () => {
|
||||
throw new Error(`Folder ${folderId} is not accessible. Make sure you have the right permissions.`);
|
||||
}))
|
||||
.then((workspace) => {
|
||||
await initFolder(token, folder);
|
||||
workspace = getWorkspace(folderId);
|
||||
}
|
||||
|
||||
// Fix the URL hash
|
||||
utils.setQueryParams(makeWorkspaceParams(workspace.folderId));
|
||||
if (workspace.url !== window.location.href) {
|
||||
@ -166,17 +138,12 @@ export default new Provider({
|
||||
});
|
||||
}
|
||||
return store.getters['data/sanitizedWorkspaces'][workspace.id];
|
||||
}));
|
||||
},
|
||||
performAction() {
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
async performAction() {
|
||||
const state = googleHelper.driveState || {};
|
||||
const token = this.getToken();
|
||||
switch (token && state.action) {
|
||||
case 'create':
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
case 'create': {
|
||||
const driveFolder = googleHelper.driveActionFolder;
|
||||
let syncData = store.getters['data/syncData'][driveFolder.id];
|
||||
if (!syncData && driveFolder.appProperties.id) {
|
||||
@ -196,17 +163,14 @@ export default new Provider({
|
||||
[syncData.id]: syncData,
|
||||
});
|
||||
}
|
||||
return fileSvc.createFile({
|
||||
const file = await fileSvc.createFile({
|
||||
parentId: syncData && syncData.itemId,
|
||||
}, true)
|
||||
.then((file) => {
|
||||
}, true);
|
||||
store.commit('file/setCurrentId', file.id);
|
||||
// File will be created on next workspace sync
|
||||
});
|
||||
});
|
||||
case 'open':
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
break;
|
||||
}
|
||||
case 'open': {
|
||||
// open first file only
|
||||
const firstFile = googleHelper.driveActionFiles[0];
|
||||
const syncData = store.getters['data/syncData'][firstFile.id];
|
||||
@ -215,24 +179,24 @@ export default new Provider({
|
||||
} else {
|
||||
store.commit('file/setCurrentId', syncData.itemId);
|
||||
}
|
||||
});
|
||||
default:
|
||||
return null;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
}
|
||||
});
|
||||
},
|
||||
getChanges() {
|
||||
async getChanges() {
|
||||
const workspace = store.getters['workspace/currentWorkspace'];
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
const startPageToken = store.getters['data/localSettings'].syncStartPageToken;
|
||||
return googleHelper.getChanges(syncToken, startPageToken, false, workspace.teamDriveId)
|
||||
.then((result) => {
|
||||
const lastStartPageToken = store.getters['data/localSettings'].syncStartPageToken;
|
||||
const { changes, startPageToken } = await googleHelper
|
||||
.getChanges(syncToken, lastStartPageToken, false, workspace.teamDriveId);
|
||||
|
||||
// Collect possible parent IDs
|
||||
const parentIds = {};
|
||||
Object.entries(store.getters['data/syncDataByItemId']).forEach(([id, syncData]) => {
|
||||
parentIds[syncData.id] = id;
|
||||
});
|
||||
result.changes.forEach((change) => {
|
||||
changes.forEach((change) => {
|
||||
const { id } = (change.file || {}).appProperties || {};
|
||||
if (id) {
|
||||
parentIds[change.fileId] = id;
|
||||
@ -240,8 +204,8 @@ export default new Provider({
|
||||
});
|
||||
|
||||
// Collect changes
|
||||
const changes = [];
|
||||
result.changes.forEach((change) => {
|
||||
const result = [];
|
||||
changes.forEach((change) => {
|
||||
// Ignore changes on StackEdit own folders
|
||||
if (change.fileId === workspace.folderId
|
||||
|| change.fileId === workspace.dataFolderId
|
||||
@ -335,41 +299,37 @@ export default new Provider({
|
||||
|
||||
// Push change
|
||||
change.syncDataId = change.fileId;
|
||||
changes.push(change);
|
||||
result.push(change);
|
||||
if (contentChange) {
|
||||
changes.push(contentChange);
|
||||
result.push(contentChange);
|
||||
}
|
||||
});
|
||||
syncStartPageToken = result.startPageToken;
|
||||
return changes;
|
||||
});
|
||||
syncStartPageToken = startPageToken;
|
||||
return result;
|
||||
},
|
||||
onChangesApplied() {
|
||||
store.dispatch('data/patchLocalSettings', {
|
||||
syncStartPageToken,
|
||||
});
|
||||
},
|
||||
saveSimpleItem(item, syncData, ifNotTooLate) {
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
async saveSimpleItem(item, syncData, ifNotTooLate) {
|
||||
const workspace = store.getters['workspace/currentWorkspace'];
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
let file;
|
||||
if (item.type !== 'file' && item.type !== 'folder') {
|
||||
return googleHelper.uploadFile(
|
||||
syncToken,
|
||||
JSON.stringify(item),
|
||||
[workspace.dataFolderId],
|
||||
{
|
||||
// For sync/publish locations, store item as filename
|
||||
file = await googleHelper.uploadFile({
|
||||
token: syncToken,
|
||||
name: JSON.stringify(item),
|
||||
parents: [workspace.dataFolderId],
|
||||
appProperties: {
|
||||
folderId: workspace.folderId,
|
||||
},
|
||||
undefined,
|
||||
undefined,
|
||||
syncData && syncData.id,
|
||||
syncData && syncData.parentIds,
|
||||
fileId: syncData && syncData.id,
|
||||
oldParents: syncData && syncData.parentIds,
|
||||
ifNotTooLate,
|
||||
);
|
||||
}
|
||||
|
||||
});
|
||||
} else {
|
||||
// For type `file` or `folder`
|
||||
const parentSyncData = store.getters['data/syncDataByItemId'][item.parentId];
|
||||
let parentId;
|
||||
@ -381,45 +341,42 @@ export default new Provider({
|
||||
parentId = workspace.folderId;
|
||||
}
|
||||
|
||||
return googleHelper.uploadFile(
|
||||
syncToken,
|
||||
item.name,
|
||||
[parentId],
|
||||
{
|
||||
file = await googleHelper.uploadFile({
|
||||
token: syncToken,
|
||||
name: item.name,
|
||||
parents: [parentId],
|
||||
appProperties: {
|
||||
id: item.id,
|
||||
folderId: workspace.folderId,
|
||||
},
|
||||
undefined,
|
||||
item.type === 'folder' ? googleHelper.folderMimeType : undefined,
|
||||
syncData && syncData.id,
|
||||
syncData && syncData.parentIds,
|
||||
mediaType: item.type === 'folder' ? googleHelper.folderMimeType : undefined,
|
||||
fileId: syncData && syncData.id,
|
||||
oldParents: syncData && syncData.parentIds,
|
||||
ifNotTooLate,
|
||||
);
|
||||
})
|
||||
.then(file => ({
|
||||
});
|
||||
}
|
||||
// Build sync data
|
||||
return {
|
||||
id: file.id,
|
||||
itemId: item.id,
|
||||
type: item.type,
|
||||
hash: item.hash,
|
||||
}));
|
||||
};
|
||||
},
|
||||
removeItem(syncData, ifNotTooLate) {
|
||||
async removeItem(syncData, ifNotTooLate) {
|
||||
// Ignore content deletion
|
||||
if (syncData.type === 'content') {
|
||||
return Promise.resolve();
|
||||
}
|
||||
if (syncData.type !== 'content') {
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
return googleHelper.removeFile(syncToken, syncData.id, ifNotTooLate);
|
||||
await googleHelper.removeFile(syncToken, syncData.id, ifNotTooLate);
|
||||
}
|
||||
},
|
||||
downloadContent(token, syncLocation) {
|
||||
async downloadContent(token, syncLocation) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][syncLocation.fileId];
|
||||
const contentSyncData = store.getters['data/syncDataByItemId'][`${syncLocation.fileId}/content`];
|
||||
if (!syncData || !contentSyncData) {
|
||||
return Promise.resolve();
|
||||
return null;
|
||||
}
|
||||
return googleHelper.downloadFile(token, syncData.id)
|
||||
.then((content) => {
|
||||
const content = await googleHelper.downloadFile(token, syncData.id);
|
||||
const item = Provider.parseContent(content, `${syncLocation.fileId}/content`);
|
||||
if (item.hash !== contentSyncData.hash) {
|
||||
store.dispatch('data/patchSyncData', {
|
||||
@ -429,6 +386,7 @@ export default new Provider({
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Open the file requested by action if it wasn't synced yet
|
||||
if (fileIdToOpen && fileIdToOpen === syncData.id) {
|
||||
fileIdToOpen = null;
|
||||
@ -438,16 +396,14 @@ export default new Provider({
|
||||
}, 10);
|
||||
}
|
||||
return item;
|
||||
});
|
||||
},
|
||||
downloadData(dataId) {
|
||||
async downloadData(dataId) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][dataId];
|
||||
if (!syncData) {
|
||||
return Promise.resolve();
|
||||
return null;
|
||||
}
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
return googleHelper.downloadFile(syncToken, syncData.id)
|
||||
.then((content) => {
|
||||
const content = await googleHelper.downloadFile(syncToken, syncData.id);
|
||||
const item = JSON.parse(content);
|
||||
if (item.hash !== syncData.hash) {
|
||||
store.dispatch('data/patchSyncData', {
|
||||
@ -458,50 +414,37 @@ export default new Provider({
|
||||
});
|
||||
}
|
||||
return item;
|
||||
});
|
||||
},
|
||||
uploadContent(token, content, syncLocation, ifNotTooLate) {
|
||||
async uploadContent(token, content, syncLocation, ifNotTooLate) {
|
||||
const contentSyncData = store.getters['data/syncDataByItemId'][`${syncLocation.fileId}/content`];
|
||||
if (contentSyncData && contentSyncData.hash === content.hash) {
|
||||
return Promise.resolve(syncLocation);
|
||||
}
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
if (!contentSyncData || contentSyncData.hash !== content.hash) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][syncLocation.fileId];
|
||||
let file;
|
||||
if (syncData) {
|
||||
// Only update file media
|
||||
return googleHelper.uploadFile(
|
||||
file = await googleHelper.uploadFile({
|
||||
token,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
Provider.serializeContent(content),
|
||||
undefined,
|
||||
syncData.id,
|
||||
undefined,
|
||||
media: Provider.serializeContent(content),
|
||||
fileId: syncData.id,
|
||||
ifNotTooLate,
|
||||
);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// Create file with media
|
||||
const workspace = store.getters['workspace/currentWorkspace'];
|
||||
// Use deepCopy to freeze objects
|
||||
const item = utils.deepCopy(store.state.file.itemMap[syncLocation.fileId]);
|
||||
const parentSyncData = store.getters['data/syncDataByItemId'][item.parentId];
|
||||
return googleHelper.uploadFile(
|
||||
file = await googleHelper.uploadFile({
|
||||
token,
|
||||
item.name,
|
||||
[parentSyncData ? parentSyncData.id : workspace.folderId],
|
||||
{
|
||||
name: item.name,
|
||||
parents: [parentSyncData ? parentSyncData.id : workspace.folderId],
|
||||
appProperties: {
|
||||
id: item.id,
|
||||
folderId: workspace.folderId,
|
||||
},
|
||||
Provider.serializeContent(content),
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
media: Provider.serializeContent(content),
|
||||
ifNotTooLate,
|
||||
)
|
||||
.then((file) => {
|
||||
});
|
||||
store.dispatch('data/patchSyncData', {
|
||||
[file.id]: {
|
||||
id: file.id,
|
||||
@ -510,10 +453,8 @@ export default new Provider({
|
||||
hash: item.hash,
|
||||
},
|
||||
});
|
||||
return file;
|
||||
});
|
||||
})
|
||||
.then(file => store.dispatch('data/patchSyncData', {
|
||||
}
|
||||
store.dispatch('data/patchSyncData', {
|
||||
[`${file.id}/content`]: {
|
||||
// Build sync data
|
||||
id: `${file.id}/content`,
|
||||
@ -521,34 +462,32 @@ export default new Provider({
|
||||
type: content.type,
|
||||
hash: content.hash,
|
||||
},
|
||||
}))
|
||||
.then(() => syncLocation);
|
||||
},
|
||||
uploadData(item, ifNotTooLate) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][item.id];
|
||||
if (syncData && syncData.hash === item.hash) {
|
||||
return Promise.resolve();
|
||||
});
|
||||
}
|
||||
return syncLocation;
|
||||
},
|
||||
async uploadData(item, ifNotTooLate) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][item.id];
|
||||
if (!syncData || syncData.hash !== item.hash) {
|
||||
const workspace = store.getters['workspace/currentWorkspace'];
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
return googleHelper.uploadFile(
|
||||
syncToken,
|
||||
JSON.stringify({
|
||||
const file = await googleHelper.uploadFile({
|
||||
token: syncToken,
|
||||
name: JSON.stringify({
|
||||
id: item.id,
|
||||
type: item.type,
|
||||
hash: item.hash,
|
||||
}),
|
||||
[workspace.dataFolderId],
|
||||
{
|
||||
parents: [workspace.dataFolderId],
|
||||
appProperties: {
|
||||
folderId: workspace.folderId,
|
||||
},
|
||||
JSON.stringify(item),
|
||||
undefined,
|
||||
syncData && syncData.id,
|
||||
syncData && syncData.parentIds,
|
||||
media: JSON.stringify(item),
|
||||
fileId: syncData && syncData.id,
|
||||
oldParents: syncData && syncData.parentIds,
|
||||
ifNotTooLate,
|
||||
)
|
||||
.then(file => store.dispatch('data/patchSyncData', {
|
||||
});
|
||||
store.dispatch('data/patchSyncData', {
|
||||
[file.id]: {
|
||||
// Build sync data
|
||||
id: file.id,
|
||||
@ -556,21 +495,22 @@ export default new Provider({
|
||||
type: item.type,
|
||||
hash: item.hash,
|
||||
},
|
||||
}));
|
||||
});
|
||||
}
|
||||
},
|
||||
listRevisions(token, fileId) {
|
||||
return getSyncData(fileId)
|
||||
.then(syncData => googleHelper.getFileRevisions(token, syncData.id))
|
||||
.then(revisions => revisions.map(revision => ({
|
||||
async listRevisions(token, fileId) {
|
||||
const syncData = Provider.getContentSyncData(fileId);
|
||||
const revisions = await googleHelper.getFileRevisions(token, syncData.id);
|
||||
return revisions.map(revision => ({
|
||||
id: revision.id,
|
||||
sub: revision.lastModifyingUser && revision.lastModifyingUser.permissionId,
|
||||
created: new Date(revision.modifiedTime).getTime(),
|
||||
}))
|
||||
.sort((revision1, revision2) => revision2.created - revision1.created));
|
||||
.sort((revision1, revision2) => revision2.created - revision1.created);
|
||||
},
|
||||
getRevisionContent(token, fileId, revisionId) {
|
||||
return getSyncData(fileId)
|
||||
.then(syncData => googleHelper.downloadFileRevision(token, syncData.id, revisionId))
|
||||
.then(content => Provider.parseContent(content, `${fileId}/content`));
|
||||
async getRevisionContent(token, fileId, revisionId) {
|
||||
const syncData = Provider.getContentSyncData(fileId);
|
||||
const content = await googleHelper.downloadFileRevision(token, syncData.id, revisionId);
|
||||
return Provider.parseContent(content, `${fileId}/content`);
|
||||
},
|
||||
});
|
||||
|
@ -2,31 +2,37 @@ import networkSvc from '../../networkSvc';
|
||||
import utils from '../../utils';
|
||||
import store from '../../../store';
|
||||
|
||||
const request = (token, options = {}) => {
|
||||
const request = async (token, options = {}) => {
|
||||
const baseUrl = `${token.dbUrl}/`;
|
||||
const getLastToken = () => store.getters['data/couchdbTokens'][token.sub];
|
||||
|
||||
const ifUnauthorized = cb => (err) => {
|
||||
const assertUnauthorized = (err) => {
|
||||
if (err.status !== 401) {
|
||||
throw err;
|
||||
}
|
||||
return cb(err);
|
||||
};
|
||||
|
||||
const onUnauthorized = () => networkSvc.request({
|
||||
const onUnauthorized = async () => {
|
||||
try {
|
||||
const { name, password } = getLastToken();
|
||||
await networkSvc.request({
|
||||
method: 'POST',
|
||||
url: utils.resolveUrl(baseUrl, '../_session'),
|
||||
withCredentials: true,
|
||||
body: {
|
||||
name: getLastToken().name,
|
||||
password: getLastToken().password,
|
||||
name,
|
||||
password,
|
||||
},
|
||||
})
|
||||
.catch(ifUnauthorized(() => store.dispatch('modal/open', {
|
||||
});
|
||||
} catch (err) {
|
||||
assertUnauthorized(err);
|
||||
await store.dispatch('modal/open', {
|
||||
type: 'couchdbCredentials',
|
||||
token: getLastToken(),
|
||||
})
|
||||
.then(onUnauthorized)));
|
||||
});
|
||||
await onUnauthorized();
|
||||
}
|
||||
};
|
||||
|
||||
const config = {
|
||||
...options,
|
||||
@ -38,55 +44,75 @@ const request = (token, options = {}) => {
|
||||
withCredentials: true,
|
||||
};
|
||||
|
||||
return networkSvc.request(config)
|
||||
.catch(ifUnauthorized(() => onUnauthorized()
|
||||
.then(() => networkSvc.request(config))))
|
||||
.then(res => res.body)
|
||||
.catch((err) => {
|
||||
try {
|
||||
let res;
|
||||
try {
|
||||
res = await networkSvc.request(config);
|
||||
} catch (err) {
|
||||
assertUnauthorized(err);
|
||||
await onUnauthorized();
|
||||
res = await networkSvc.request(config);
|
||||
}
|
||||
return res.body;
|
||||
} catch (err) {
|
||||
if (err.status === 409) {
|
||||
throw new Error('TOO_LATE');
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export default {
|
||||
|
||||
/**
|
||||
* http://docs.couchdb.org/en/2.1.1/api/database/common.html#db
|
||||
*/
|
||||
getDb(token) {
|
||||
return request(token);
|
||||
},
|
||||
getChanges(token, lastSeq) {
|
||||
|
||||
/**
|
||||
* http://docs.couchdb.org/en/2.1.1/api/database/changes.html#db-changes
|
||||
*/
|
||||
async getChanges(token, lastSeq) {
|
||||
const result = {
|
||||
changes: [],
|
||||
lastSeq,
|
||||
};
|
||||
|
||||
const getPage = (since = 0) => request(token, {
|
||||
const getPage = async () => {
|
||||
const body = await request(token, {
|
||||
method: 'GET',
|
||||
path: '_changes',
|
||||
params: {
|
||||
since,
|
||||
since: result.lastSeq || 0,
|
||||
include_docs: true,
|
||||
limit: 1000,
|
||||
},
|
||||
})
|
||||
.then((body) => {
|
||||
result.changes = result.changes.concat(body.results);
|
||||
if (body.pending) {
|
||||
return getPage(body.last_seq);
|
||||
}
|
||||
result.lastSeq = body.last_seq;
|
||||
return result;
|
||||
});
|
||||
result.changes = [...result.changes, ...body.results];
|
||||
result.lastSeq = body.last_seq;
|
||||
if (body.pending) {
|
||||
return getPage();
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
return getPage(lastSeq);
|
||||
return getPage();
|
||||
},
|
||||
uploadDocument(
|
||||
|
||||
/**
|
||||
* http://docs.couchdb.org/en/2.1.1/api/database/common.html#post--db
|
||||
* http://docs.couchdb.org/en/2.1.1/api/document/common.html#put--db-docid
|
||||
*/
|
||||
async uploadDocument({
|
||||
token,
|
||||
item,
|
||||
data = null,
|
||||
dataType = null,
|
||||
documentId = null,
|
||||
rev = null,
|
||||
) {
|
||||
}) {
|
||||
const options = {
|
||||
method: 'POST',
|
||||
body: { item, time: Date.now() },
|
||||
@ -110,34 +136,48 @@ export default {
|
||||
}
|
||||
return request(token, options);
|
||||
},
|
||||
removeDocument(token, documentId, rev) {
|
||||
|
||||
/**
|
||||
* http://docs.couchdb.org/en/2.1.1/api/document/common.html#delete--db-docid
|
||||
*/
|
||||
async removeDocument(token, documentId, rev) {
|
||||
return request(token, {
|
||||
method: 'DELETE',
|
||||
path: documentId,
|
||||
params: { rev },
|
||||
});
|
||||
},
|
||||
retrieveDocument(token, documentId, rev) {
|
||||
|
||||
/**
|
||||
* http://docs.couchdb.org/en/2.1.1/api/document/common.html#get--db-docid
|
||||
*/
|
||||
async retrieveDocument(token, documentId, rev) {
|
||||
return request(token, {
|
||||
path: documentId,
|
||||
params: { rev },
|
||||
});
|
||||
},
|
||||
retrieveDocumentWithAttachments(token, documentId, rev) {
|
||||
return request(token, {
|
||||
|
||||
/**
|
||||
* http://docs.couchdb.org/en/2.1.1/api/document/common.html#get--db-docid
|
||||
*/
|
||||
async retrieveDocumentWithAttachments(token, documentId, rev) {
|
||||
const body = await request(token, {
|
||||
path: documentId,
|
||||
params: { attachments: true, rev },
|
||||
})
|
||||
.then((body) => {
|
||||
});
|
||||
body.attachments = {};
|
||||
// eslint-disable-next-line no-underscore-dangle
|
||||
Object.entries(body._attachments).forEach(([name, attachment]) => {
|
||||
body.attachments[name] = utils.decodeBase64(attachment.data);
|
||||
});
|
||||
return body;
|
||||
});
|
||||
},
|
||||
retrieveDocumentWithRevisions(token, documentId) {
|
||||
|
||||
/**
|
||||
* http://docs.couchdb.org/en/2.1.1/api/document/common.html#get--db-docid
|
||||
*/
|
||||
async retrieveDocumentWithRevisions(token, documentId) {
|
||||
return request(token, {
|
||||
path: documentId,
|
||||
params: {
|
||||
|
@ -1,8 +1,6 @@
|
||||
import networkSvc from '../../networkSvc';
|
||||
import store from '../../../store';
|
||||
|
||||
let Dropbox;
|
||||
|
||||
const getAppKey = (fullAccess) => {
|
||||
if (fullAccess) {
|
||||
return 'lq6mwopab8wskas';
|
||||
@ -22,89 +20,105 @@ const request = (token, options, args) => networkSvc.request({
|
||||
});
|
||||
|
||||
export default {
|
||||
startOauth2(fullAccess, sub = null, silent = false) {
|
||||
return networkSvc.startOauth2(
|
||||
|
||||
/**
|
||||
* https://www.dropbox.com/developers/documentation/http/documentation#oauth2-authorize
|
||||
*/
|
||||
async startOauth2(fullAccess, sub = null, silent = false) {
|
||||
const { accessToken } = await networkSvc.startOauth2(
|
||||
'https://www.dropbox.com/oauth2/authorize',
|
||||
{
|
||||
client_id: getAppKey(fullAccess),
|
||||
response_type: 'token',
|
||||
},
|
||||
silent,
|
||||
)
|
||||
);
|
||||
|
||||
// Call the user info endpoint
|
||||
.then(({ accessToken }) => request({ accessToken }, {
|
||||
const { body } = await request({ accessToken }, {
|
||||
method: 'POST',
|
||||
url: 'https://api.dropboxapi.com/2/users/get_current_account',
|
||||
})
|
||||
.then((res) => {
|
||||
});
|
||||
|
||||
// Check the returned sub consistency
|
||||
if (sub && `${res.body.account_id}` !== sub) {
|
||||
if (sub && `${body.account_id}` !== sub) {
|
||||
throw new Error('Dropbox account ID not expected.');
|
||||
}
|
||||
|
||||
// Build token object including scopes and sub
|
||||
const token = {
|
||||
accessToken,
|
||||
name: res.body.name.display_name,
|
||||
sub: `${res.body.account_id}`,
|
||||
name: body.name.display_name,
|
||||
sub: `${body.account_id}`,
|
||||
fullAccess,
|
||||
};
|
||||
|
||||
// Add token to dropboxTokens
|
||||
store.dispatch('data/setDropboxToken', token);
|
||||
return token;
|
||||
}));
|
||||
},
|
||||
loadClientScript() {
|
||||
if (Dropbox) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
return networkSvc.loadScript('https://www.dropbox.com/static/api/2/dropins.js')
|
||||
.then(() => {
|
||||
({ Dropbox } = window);
|
||||
});
|
||||
},
|
||||
addAccount(fullAccess = false) {
|
||||
return this.startOauth2(fullAccess);
|
||||
},
|
||||
uploadFile(token, path, content, fileId) {
|
||||
return request(token, {
|
||||
|
||||
/**
|
||||
* https://www.dropbox.com/developers/documentation/http/documentation#files-upload
|
||||
*/
|
||||
async uploadFile({
|
||||
token,
|
||||
path,
|
||||
content,
|
||||
fileId,
|
||||
}) {
|
||||
return (await request(token, {
|
||||
method: 'POST',
|
||||
url: 'https://content.dropboxapi.com/2/files/upload',
|
||||
body: content,
|
||||
}, {
|
||||
path: fileId || path,
|
||||
mode: 'overwrite',
|
||||
})
|
||||
.then(res => res.body);
|
||||
})).body;
|
||||
},
|
||||
downloadFile(token, path, fileId) {
|
||||
return request(token, {
|
||||
|
||||
/**
|
||||
* https://www.dropbox.com/developers/documentation/http/documentation#files-download
|
||||
*/
|
||||
async downloadFile({
|
||||
token,
|
||||
path,
|
||||
fileId,
|
||||
}) {
|
||||
const res = await request(token, {
|
||||
method: 'POST',
|
||||
url: 'https://content.dropboxapi.com/2/files/download',
|
||||
raw: true,
|
||||
}, {
|
||||
path: fileId || path,
|
||||
})
|
||||
.then(res => ({
|
||||
});
|
||||
return {
|
||||
id: JSON.parse(res.headers['dropbox-api-result']).id,
|
||||
content: res.body,
|
||||
}));
|
||||
};
|
||||
},
|
||||
openChooser(token) {
|
||||
return this.loadClientScript()
|
||||
.then(() => new Promise((resolve) => {
|
||||
Dropbox.appKey = getAppKey(token.fullAccess);
|
||||
Dropbox.choose({
|
||||
|
||||
/**
|
||||
* https://www.dropbox.com/developers/chooser
|
||||
*/
|
||||
async openChooser(token) {
|
||||
if (!window.Dropbox) {
|
||||
await networkSvc.loadScript('https://www.dropbox.com/static/api/2/dropins.js');
|
||||
}
|
||||
return new Promise((resolve) => {
|
||||
window.Dropbox.appKey = getAppKey(token.fullAccess);
|
||||
window.Dropbox.choose({
|
||||
multiselect: true,
|
||||
linkType: 'direct',
|
||||
success: (files) => {
|
||||
const paths = files.map((file) => {
|
||||
success: files => resolve(files.map((file) => {
|
||||
const path = file.link.replace(/.*\/view\/[^/]*/, '');
|
||||
return decodeURI(path);
|
||||
});
|
||||
resolve(paths);
|
||||
},
|
||||
})),
|
||||
cancel: () => resolve([]),
|
||||
});
|
||||
}));
|
||||
});
|
||||
},
|
||||
};
|
||||
|
@ -20,7 +20,8 @@ const request = (token, options) => networkSvc.request({
|
||||
const repoRequest = (token, owner, repo, options) => request(token, {
|
||||
...options,
|
||||
url: `https://api.github.com/repos/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}/${options.url}`,
|
||||
});
|
||||
})
|
||||
.then(res => res.body);
|
||||
|
||||
const getCommitMessage = (name, path) => {
|
||||
const message = store.getters['data/computedSettings'].github[name];
|
||||
@ -28,95 +29,131 @@ const getCommitMessage = (name, path) => {
|
||||
};
|
||||
|
||||
export default {
|
||||
startOauth2(scopes, sub = null, silent = false) {
|
||||
return networkSvc.startOauth2(
|
||||
|
||||
/**
|
||||
* https://developer.github.com/apps/building-oauth-apps/authorization-options-for-oauth-apps/
|
||||
*/
|
||||
async startOauth2(scopes, sub = null, silent = false) {
|
||||
const { code } = await networkSvc.startOauth2(
|
||||
'https://github.com/login/oauth/authorize',
|
||||
{
|
||||
client_id: clientId,
|
||||
scope: scopes.join(' '),
|
||||
},
|
||||
silent,
|
||||
)
|
||||
);
|
||||
|
||||
// Exchange code with token
|
||||
.then(data => networkSvc.request({
|
||||
const accessToken = (await networkSvc.request({
|
||||
method: 'GET',
|
||||
url: 'oauth2/githubToken',
|
||||
params: {
|
||||
clientId,
|
||||
code: data.code,
|
||||
code,
|
||||
},
|
||||
})
|
||||
.then(res => res.body))
|
||||
})).body;
|
||||
|
||||
// Call the user info endpoint
|
||||
.then(accessToken => networkSvc.request({
|
||||
const user = (await networkSvc.request({
|
||||
method: 'GET',
|
||||
url: 'https://api.github.com/user',
|
||||
params: {
|
||||
access_token: accessToken,
|
||||
},
|
||||
})
|
||||
.then((res) => {
|
||||
})).body;
|
||||
|
||||
// Check the returned sub consistency
|
||||
if (sub && `${res.body.id}` !== sub) {
|
||||
if (sub && `${user.id}` !== sub) {
|
||||
throw new Error('GitHub account ID not expected.');
|
||||
}
|
||||
|
||||
// Build token object including scopes and sub
|
||||
const token = {
|
||||
scopes,
|
||||
accessToken,
|
||||
name: res.body.login,
|
||||
sub: `${res.body.id}`,
|
||||
name: user.login,
|
||||
sub: `${user.id}`,
|
||||
repoFullAccess: scopes.indexOf('repo') !== -1,
|
||||
};
|
||||
|
||||
// Add token to githubTokens
|
||||
store.dispatch('data/setGithubToken', token);
|
||||
return token;
|
||||
}));
|
||||
},
|
||||
addAccount(repoFullAccess = false) {
|
||||
async addAccount(repoFullAccess = false) {
|
||||
return this.startOauth2(getScopes({ repoFullAccess }));
|
||||
},
|
||||
getUser(userId) {
|
||||
return networkSvc.request({
|
||||
|
||||
/**
|
||||
* Getting a user from its userId is not feasible with API v3.
|
||||
* Using an undocumented endpoint...
|
||||
*/
|
||||
async getUser(userId) {
|
||||
const user = (await networkSvc.request({
|
||||
url: `https://api.github.com/user/${userId}`,
|
||||
params: {
|
||||
t: Date.now(), // Prevent from caching
|
||||
},
|
||||
})
|
||||
.then((res) => {
|
||||
})).body;
|
||||
store.commit('userInfo/addItem', {
|
||||
id: `gh:${res.body.id}`,
|
||||
name: res.body.login,
|
||||
imageUrl: res.body.avatar_url || '',
|
||||
});
|
||||
return res.body;
|
||||
id: `gh:${user.id}`,
|
||||
name: user.login,
|
||||
imageUrl: user.avatar_url || '',
|
||||
});
|
||||
return user;
|
||||
},
|
||||
getTree(token, owner, repo, sha) {
|
||||
return repoRequest(token, owner, repo, {
|
||||
url: `git/trees/${encodeURIComponent(sha)}?recursive=1`,
|
||||
})
|
||||
.then((res) => {
|
||||
if (res.body.truncated) {
|
||||
|
||||
/**
|
||||
* https://developer.github.com/v3/repos/commits/#get-a-single-commit
|
||||
* https://developer.github.com/v3/git/trees/#get-a-tree
|
||||
*/
|
||||
async getTree({
|
||||
token,
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
}) {
|
||||
const { commit } = (await repoRequest(token, owner, repo, {
|
||||
url: `commits/${encodeURIComponent(branch)}`,
|
||||
})).body;
|
||||
const { tree, truncated } = (await repoRequest(token, owner, repo, {
|
||||
url: `git/trees/${encodeURIComponent(commit.tree.sha)}?recursive=1`,
|
||||
})).body;
|
||||
if (truncated) {
|
||||
throw new Error('Git tree too big. Please remove some files in the repository.');
|
||||
}
|
||||
return res.body.tree;
|
||||
});
|
||||
return tree;
|
||||
},
|
||||
getHeadTree(token, owner, repo, branch) {
|
||||
return repoRequest(token, owner, repo, {
|
||||
url: `commits/${encodeURIComponent(branch)}`,
|
||||
})
|
||||
.then(res => this.getTree(token, owner, repo, res.body.commit.tree.sha));
|
||||
},
|
||||
getCommits(token, owner, repo, sha, path) {
|
||||
|
||||
/**
|
||||
* https://developer.github.com/v3/repos/commits/#list-commits-on-a-repository
|
||||
*/
|
||||
async getCommits({
|
||||
token,
|
||||
owner,
|
||||
repo,
|
||||
sha,
|
||||
path,
|
||||
}) {
|
||||
return repoRequest(token, owner, repo, {
|
||||
url: 'commits',
|
||||
params: { sha, path },
|
||||
})
|
||||
.then(res => res.body);
|
||||
});
|
||||
},
|
||||
uploadFile(token, owner, repo, branch, path, content, sha) {
|
||||
|
||||
/**
|
||||
* https://developer.github.com/v3/repos/contents/#create-a-file
|
||||
* https://developer.github.com/v3/repos/contents/#update-a-file
|
||||
*/
|
||||
async uploadFile({
|
||||
token,
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
path,
|
||||
content,
|
||||
sha,
|
||||
}) {
|
||||
return repoRequest(token, owner, repo, {
|
||||
method: 'PUT',
|
||||
url: `contents/${encodeURIComponent(path)}`,
|
||||
@ -126,10 +163,20 @@ export default {
|
||||
sha,
|
||||
branch,
|
||||
},
|
||||
})
|
||||
.then(res => res.body);
|
||||
});
|
||||
},
|
||||
removeFile(token, owner, repo, branch, path, sha) {
|
||||
|
||||
/**
|
||||
* https://developer.github.com/v3/repos/contents/#delete-a-file
|
||||
*/
|
||||
async removeFile({
|
||||
token,
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
path,
|
||||
sha,
|
||||
}) {
|
||||
return repoRequest(token, owner, repo, {
|
||||
method: 'DELETE',
|
||||
url: `contents/${encodeURIComponent(path)}`,
|
||||
@ -138,21 +185,42 @@ export default {
|
||||
sha,
|
||||
branch,
|
||||
},
|
||||
})
|
||||
.then(res => res.body);
|
||||
});
|
||||
},
|
||||
downloadFile(token, owner, repo, branch, path) {
|
||||
return repoRequest(token, owner, repo, {
|
||||
|
||||
/**
|
||||
* https://developer.github.com/v3/repos/contents/#get-contents
|
||||
*/
|
||||
async downloadFile({
|
||||
token,
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
path,
|
||||
}) {
|
||||
const body = await repoRequest(token, owner, repo, {
|
||||
url: `contents/${encodeURIComponent(path)}`,
|
||||
params: { ref: branch },
|
||||
})
|
||||
.then(res => ({
|
||||
sha: res.body.sha,
|
||||
content: utils.decodeBase64(res.body.content),
|
||||
}));
|
||||
});
|
||||
return {
|
||||
sha: body.sha,
|
||||
content: utils.decodeBase64(body.content),
|
||||
};
|
||||
},
|
||||
uploadGist(token, description, filename, content, isPublic, gistId) {
|
||||
return request(token, gistId ? {
|
||||
|
||||
/**
|
||||
* https://developer.github.com/v3/gists/#create-a-gist
|
||||
* https://developer.github.com/v3/gists/#edit-a-gist
|
||||
*/
|
||||
async uploadGist({
|
||||
token,
|
||||
description,
|
||||
filename,
|
||||
content,
|
||||
isPublic,
|
||||
gistId,
|
||||
}) {
|
||||
const { body } = await request(token, gistId ? {
|
||||
method: 'PATCH',
|
||||
url: `https://api.github.com/gists/${gistId}`,
|
||||
body: {
|
||||
@ -175,19 +243,24 @@ export default {
|
||||
},
|
||||
public: isPublic,
|
||||
},
|
||||
})
|
||||
.then(res => res.body);
|
||||
});
|
||||
return body;
|
||||
},
|
||||
downloadGist(token, gistId, filename) {
|
||||
return request(token, {
|
||||
|
||||
/**
|
||||
* https://developer.github.com/v3/gists/#get-a-single-gist
|
||||
*/
|
||||
async downloadGist({
|
||||
token,
|
||||
gistId,
|
||||
filename,
|
||||
}) {
|
||||
const result = (await request(token, {
|
||||
url: `https://api.github.com/gists/${gistId}`,
|
||||
})
|
||||
.then((res) => {
|
||||
const result = res.body.files[filename];
|
||||
})).body.files[filename];
|
||||
if (!result) {
|
||||
throw new Error('Gist file not found.');
|
||||
}
|
||||
return result.content;
|
||||
});
|
||||
},
|
||||
};
|
||||
|
@ -6,8 +6,6 @@ const clientId = GOOGLE_CLIENT_ID;
|
||||
const apiKey = 'AIzaSyC_M4RA9pY6XmM9pmFxlT59UPMO7aHr9kk';
|
||||
const appsDomain = null;
|
||||
const tokenExpirationMargin = 5 * 60 * 1000; // 5 min (Google tokens expire after 1h)
|
||||
let gapi;
|
||||
let google;
|
||||
|
||||
const driveAppDataScopes = ['https://www.googleapis.com/auth/drive.appdata'];
|
||||
const getDriveScopes = token => [token.driveFullAccess
|
||||
@ -17,8 +15,6 @@ const getDriveScopes = token => [token.driveFullAccess
|
||||
const bloggerScopes = ['https://www.googleapis.com/auth/blogger'];
|
||||
const photosScopes = ['https://www.googleapis.com/auth/photos'];
|
||||
|
||||
const libraries = ['picker'];
|
||||
|
||||
const checkIdToken = (idToken) => {
|
||||
try {
|
||||
const token = idToken.split('.');
|
||||
@ -43,15 +39,16 @@ export default {
|
||||
driveState,
|
||||
driveActionFolder: null,
|
||||
driveActionFiles: [],
|
||||
request(token, options) {
|
||||
return networkSvc.request({
|
||||
async $request(token, options) {
|
||||
try {
|
||||
return (await networkSvc.request({
|
||||
...options,
|
||||
headers: {
|
||||
...options.headers || {},
|
||||
Authorization: `Bearer ${token.accessToken}`,
|
||||
},
|
||||
}, true)
|
||||
.catch((err) => {
|
||||
}, true)).body;
|
||||
} catch (err) {
|
||||
const { reason } = (((err.body || {}).error || {}).errors || [])[0] || {};
|
||||
if (reason === 'authError') {
|
||||
// Mark the token as revoked and get a new one
|
||||
@ -60,13 +57,172 @@ export default {
|
||||
expiresOn: 0,
|
||||
});
|
||||
// Refresh token and retry
|
||||
return this.refreshToken(token, token.scopes)
|
||||
.then(refreshedToken => this.request(refreshedToken, options));
|
||||
const refreshedToken = await this.refreshToken(token, token.scopes);
|
||||
return this.$request(refreshedToken, options);
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
},
|
||||
uploadFileInternal(
|
||||
|
||||
/**
|
||||
* https://developers.google.com/identity/protocols/OpenIDConnect
|
||||
*/
|
||||
async startOauth2(scopes, sub = null, silent = false) {
|
||||
const { accessToken, expiresIn, idToken } = await networkSvc.startOauth2(
|
||||
'https://accounts.google.com/o/oauth2/v2/auth',
|
||||
{
|
||||
client_id: clientId,
|
||||
response_type: 'token id_token',
|
||||
scope: ['openid', ...scopes].join(' '),
|
||||
hd: appsDomain,
|
||||
login_hint: sub,
|
||||
prompt: silent ? 'none' : null,
|
||||
nonce: utils.uid(),
|
||||
},
|
||||
silent,
|
||||
);
|
||||
|
||||
// Call the token info endpoint
|
||||
const { body } = await networkSvc.request({
|
||||
method: 'POST',
|
||||
url: 'https://www.googleapis.com/oauth2/v3/tokeninfo',
|
||||
params: {
|
||||
access_token: accessToken,
|
||||
},
|
||||
}, true);
|
||||
|
||||
// Check the returned client ID consistency
|
||||
if (body.aud !== clientId) {
|
||||
throw new Error('Client ID inconsistent.');
|
||||
}
|
||||
// Check the returned sub consistency
|
||||
if (sub && `${body.sub}` !== sub) {
|
||||
throw new Error('Google account ID not expected.');
|
||||
}
|
||||
|
||||
// Build token object including scopes and sub
|
||||
const existingToken = store.getters['data/googleTokens'][body.sub];
|
||||
const token = {
|
||||
scopes,
|
||||
accessToken,
|
||||
expiresOn: Date.now() + (expiresIn * 1000),
|
||||
idToken,
|
||||
sub: body.sub,
|
||||
name: (existingToken || {}).name || 'Unknown',
|
||||
isLogin: !store.getters['workspace/mainWorkspaceToken'] &&
|
||||
scopes.indexOf('https://www.googleapis.com/auth/drive.appdata') !== -1,
|
||||
isSponsor: false,
|
||||
isDrive: scopes.indexOf('https://www.googleapis.com/auth/drive') !== -1 ||
|
||||
scopes.indexOf('https://www.googleapis.com/auth/drive.file') !== -1,
|
||||
isBlogger: scopes.indexOf('https://www.googleapis.com/auth/blogger') !== -1,
|
||||
isPhotos: scopes.indexOf('https://www.googleapis.com/auth/photos') !== -1,
|
||||
driveFullAccess: scopes.indexOf('https://www.googleapis.com/auth/drive') !== -1,
|
||||
};
|
||||
|
||||
try {
|
||||
// Call the user info endpoint
|
||||
token.name = (await this.getUser(token.sub)).displayName;
|
||||
} catch (err) {
|
||||
if (err.status === 404) {
|
||||
store.dispatch('notification/info', 'Please activate Google Plus to change your account name and photo.');
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
if (existingToken) {
|
||||
// We probably retrieved a new token with restricted scopes.
|
||||
// That's no problem, token will be refreshed later with merged scopes.
|
||||
// Restore flags
|
||||
Object.assign(token, {
|
||||
isLogin: existingToken.isLogin || token.isLogin,
|
||||
isSponsor: existingToken.isSponsor,
|
||||
isDrive: existingToken.isDrive || token.isDrive,
|
||||
isBlogger: existingToken.isBlogger || token.isBlogger,
|
||||
isPhotos: existingToken.isPhotos || token.isPhotos,
|
||||
driveFullAccess: existingToken.driveFullAccess || token.driveFullAccess,
|
||||
});
|
||||
}
|
||||
|
||||
if (token.isLogin) {
|
||||
try {
|
||||
token.isSponsor = (await networkSvc.request({
|
||||
method: 'GET',
|
||||
url: 'userInfo',
|
||||
params: {
|
||||
idToken: token.idToken,
|
||||
},
|
||||
})).body.sponsorUntil > Date.now();
|
||||
} catch (err) {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
// Add token to googleTokens
|
||||
store.dispatch('data/setGoogleToken', token);
|
||||
return token;
|
||||
},
|
||||
async refreshToken(token, scopes = []) {
|
||||
const { sub } = token;
|
||||
const lastToken = store.getters['data/googleTokens'][sub];
|
||||
const mergedScopes = [...new Set([
|
||||
...scopes,
|
||||
...lastToken.scopes,
|
||||
])];
|
||||
|
||||
if (
|
||||
// If we already have permissions for the requested scopes
|
||||
mergedScopes.length === lastToken.scopes.length &&
|
||||
// And lastToken is not expired
|
||||
lastToken.expiresOn > Date.now() + tokenExpirationMargin &&
|
||||
// And in case of a login token, ID token is still valid
|
||||
(!lastToken.isLogin || checkIdToken(lastToken.idToken))
|
||||
) {
|
||||
return lastToken;
|
||||
}
|
||||
|
||||
// New scopes are requested or existing token is about to expire.
|
||||
// Try to get a new token in background
|
||||
try {
|
||||
return await this.startOauth2(mergedScopes, sub, true);
|
||||
} catch (err) {
|
||||
// If it fails try to popup a window
|
||||
if (store.state.offline) {
|
||||
throw err;
|
||||
}
|
||||
await store.dispatch('modal/providerRedirection', { providerName: 'Google' });
|
||||
return this.startOauth2(mergedScopes, sub);
|
||||
}
|
||||
},
|
||||
signin() {
|
||||
return this.startOauth2(driveAppDataScopes);
|
||||
},
|
||||
addDriveAccount(fullAccess = false, sub = null) {
|
||||
return this.startOauth2(getDriveScopes({ driveFullAccess: fullAccess }), sub);
|
||||
},
|
||||
addBloggerAccount() {
|
||||
return this.startOauth2(bloggerScopes);
|
||||
},
|
||||
addPhotosAccount() {
|
||||
return this.startOauth2(photosScopes);
|
||||
},
|
||||
async getSponsorship(token) {
|
||||
const refreshedToken = await this.refreshToken(token);
|
||||
return networkSvc.request({
|
||||
method: 'GET',
|
||||
url: 'userInfo',
|
||||
params: {
|
||||
idToken: refreshedToken.idToken,
|
||||
},
|
||||
}, true);
|
||||
},
|
||||
|
||||
/**
|
||||
* https://developers.google.com/drive/v3/reference/files/create
|
||||
* https://developers.google.com/drive/v3/reference/files/update
|
||||
* https://developers.google.com/drive/v3/web/simple-upload
|
||||
*/
|
||||
async $uploadFile({
|
||||
refreshedToken,
|
||||
name,
|
||||
parents,
|
||||
@ -76,10 +232,9 @@ export default {
|
||||
fileId = null,
|
||||
oldParents = null,
|
||||
ifNotTooLate = cb => res => cb(res),
|
||||
) {
|
||||
return Promise.resolve()
|
||||
}) {
|
||||
// Refreshing a token can take a while if an oauth window pops up, make sure it's not too late
|
||||
.then(ifNotTooLate(() => {
|
||||
return ifNotTooLate(() => {
|
||||
const options = {
|
||||
method: 'POST',
|
||||
url: 'https://www.googleapis.com/drive/v3/files',
|
||||
@ -118,7 +273,7 @@ export default {
|
||||
'https://www.googleapis.com/',
|
||||
'https://www.googleapis.com/upload/',
|
||||
);
|
||||
return this.request(refreshedToken, {
|
||||
return this.$request(refreshedToken, {
|
||||
...options,
|
||||
params: {
|
||||
...params,
|
||||
@ -128,41 +283,123 @@ export default {
|
||||
'Content-Type': `multipart/mixed; boundary="${boundary}"`,
|
||||
},
|
||||
body: multipartRequestBody,
|
||||
}).then(res => res.body);
|
||||
});
|
||||
}
|
||||
if (mediaType) {
|
||||
metadata.mimeType = mediaType;
|
||||
}
|
||||
return this.request(refreshedToken, {
|
||||
return this.$request(refreshedToken, {
|
||||
...options,
|
||||
body: metadata,
|
||||
params,
|
||||
}).then(res => res.body);
|
||||
}));
|
||||
});
|
||||
});
|
||||
},
|
||||
downloadFileInternal(refreshedToken, id) {
|
||||
return this.request(refreshedToken, {
|
||||
async uploadFile({
|
||||
token,
|
||||
name,
|
||||
parents,
|
||||
appProperties,
|
||||
media,
|
||||
mediaType,
|
||||
fileId,
|
||||
oldParents,
|
||||
ifNotTooLate,
|
||||
}) {
|
||||
const refreshedToken = await this.refreshToken(token, getDriveScopes(token));
|
||||
return this.$uploadFile({
|
||||
refreshedToken,
|
||||
name,
|
||||
parents,
|
||||
appProperties,
|
||||
media,
|
||||
mediaType,
|
||||
fileId,
|
||||
oldParents,
|
||||
ifNotTooLate,
|
||||
});
|
||||
},
|
||||
async uploadAppDataFile({
|
||||
token,
|
||||
name,
|
||||
media,
|
||||
fileId,
|
||||
ifNotTooLate,
|
||||
}) {
|
||||
const refreshedToken = await this.refreshToken(token, driveAppDataScopes);
|
||||
return this.$uploadFile({
|
||||
refreshedToken,
|
||||
name,
|
||||
parents: ['appDataFolder'],
|
||||
media,
|
||||
fileId,
|
||||
ifNotTooLate,
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* https://developers.google.com/drive/v3/reference/files/get
|
||||
*/
|
||||
async getFile(token, id) {
|
||||
const refreshedToken = await this.refreshToken(token, getDriveScopes(token));
|
||||
return this.$request(refreshedToken, {
|
||||
method: 'GET',
|
||||
url: `https://www.googleapis.com/drive/v3/files/${id}`,
|
||||
params: {
|
||||
fields: 'id,name,mimeType,appProperties,teamDriveId',
|
||||
supportsTeamDrives: true,
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* https://developers.google.com/drive/v3/web/manage-downloads
|
||||
*/
|
||||
async $downloadFile(refreshedToken, id) {
|
||||
return this.$request(refreshedToken, {
|
||||
method: 'GET',
|
||||
url: `https://www.googleapis.com/drive/v3/files/${id}?alt=media`,
|
||||
raw: true,
|
||||
}).then(res => res.body);
|
||||
});
|
||||
},
|
||||
removeFileInternal(refreshedToken, id, ifNotTooLate = cb => res => cb(res)) {
|
||||
return Promise.resolve()
|
||||
async downloadFile(token, id) {
|
||||
const refreshedToken = await this.refreshToken(token, getDriveScopes(token));
|
||||
return this.$downloadFile(refreshedToken, id);
|
||||
},
|
||||
async downloadAppDataFile(token, id) {
|
||||
const refreshedToken = await this.refreshToken(token, driveAppDataScopes);
|
||||
return this.$downloadFile(refreshedToken, id);
|
||||
},
|
||||
|
||||
/**
|
||||
* https://developers.google.com/drive/v3/reference/files/delete
|
||||
*/
|
||||
async $removeFile(refreshedToken, id, ifNotTooLate = cb => res => cb(res)) {
|
||||
// Refreshing a token can take a while if an oauth window pops up, so check if it's too late
|
||||
.then(ifNotTooLate(() => this.request(refreshedToken, {
|
||||
return ifNotTooLate(() => this.$request(refreshedToken, {
|
||||
method: 'DELETE',
|
||||
url: `https://www.googleapis.com/drive/v3/files/${id}`,
|
||||
params: {
|
||||
supportsTeamDrives: true,
|
||||
},
|
||||
})));
|
||||
}));
|
||||
},
|
||||
getFileRevisionsInternal(refreshedToken, id) {
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
const revisions = [];
|
||||
const getPage = pageToken => this.request(refreshedToken, {
|
||||
async removeFile(token, id, ifNotTooLate) {
|
||||
const refreshedToken = await this.refreshToken(token, getDriveScopes(token));
|
||||
return this.$removeFile(refreshedToken, id, ifNotTooLate);
|
||||
},
|
||||
async removeAppDataFile(token, id, ifNotTooLate = cb => res => cb(res)) {
|
||||
const refreshedToken = await this.refreshToken(token, driveAppDataScopes);
|
||||
return this.$removeFile(refreshedToken, id, ifNotTooLate);
|
||||
},
|
||||
|
||||
/**
|
||||
* https://developers.google.com/drive/v3/reference/revisions/list
|
||||
*/
|
||||
async $getFileRevisions(refreshedToken, id) {
|
||||
const allRevisions = [];
|
||||
const getPage = async (pageToken) => {
|
||||
const { revisions, nextPageToken } = await this.$request(refreshedToken, {
|
||||
method: 'GET',
|
||||
url: `https://www.googleapis.com/drive/v3/files/${id}/revisions`,
|
||||
params: {
|
||||
@ -170,214 +407,70 @@ export default {
|
||||
pageSize: 1000,
|
||||
fields: 'nextPageToken,revisions(id,modifiedTime,lastModifyingUser/permissionId,lastModifyingUser/displayName,lastModifyingUser/photoLink)',
|
||||
},
|
||||
})
|
||||
.then((res) => {
|
||||
res.body.revisions.forEach((revision) => {
|
||||
});
|
||||
revisions.forEach((revision) => {
|
||||
store.commit('userInfo/addItem', {
|
||||
id: revision.lastModifyingUser.permissionId,
|
||||
name: revision.lastModifyingUser.displayName,
|
||||
imageUrl: revision.lastModifyingUser.photoLink,
|
||||
});
|
||||
revisions.push(revision);
|
||||
allRevisions.push(revision);
|
||||
});
|
||||
if (res.body.nextPageToken) {
|
||||
return getPage(res.body.nextPageToken);
|
||||
if (nextPageToken) {
|
||||
return getPage(nextPageToken);
|
||||
}
|
||||
return revisions;
|
||||
});
|
||||
|
||||
return allRevisions;
|
||||
};
|
||||
return getPage();
|
||||
},
|
||||
async getFileRevisions(token, id) {
|
||||
const refreshedToken = await this.refreshToken(token, getDriveScopes(token));
|
||||
return this.$getFileRevisions(refreshedToken, id);
|
||||
},
|
||||
async getAppDataFileRevisions(token, id) {
|
||||
const refreshedToken = await this.refreshToken(token, driveAppDataScopes);
|
||||
return this.$getFileRevisions(refreshedToken, id);
|
||||
},
|
||||
|
||||
/**
|
||||
* https://developers.google.com/drive/v3/reference/revisions/get
|
||||
*/
|
||||
async $downloadFileRevision(refreshedToken, id, revisionId) {
|
||||
return this.$request(refreshedToken, {
|
||||
method: 'GET',
|
||||
url: `https://www.googleapis.com/drive/v3/files/${id}/revisions/${revisionId}?alt=media`,
|
||||
raw: true,
|
||||
});
|
||||
},
|
||||
downloadFileRevisionInternal(refreshedToken, fileId, revisionId) {
|
||||
return Promise.resolve()
|
||||
.then(() => this.request(refreshedToken, {
|
||||
method: 'GET',
|
||||
url: `https://www.googleapis.com/drive/v3/files/${fileId}/revisions/${revisionId}?alt=media`,
|
||||
raw: true,
|
||||
}).then(res => res.body));
|
||||
async downloadFileRevision(token, fileId, revisionId) {
|
||||
const refreshedToken = await this.refreshToken(token, getDriveScopes(token));
|
||||
return this.$downloadFileRevision(refreshedToken, fileId, revisionId);
|
||||
},
|
||||
getUser(userId) {
|
||||
return networkSvc.request({
|
||||
async downloadAppDataFileRevision(token, fileId, revisionId) {
|
||||
const refreshedToken = await this.refreshToken(token, driveAppDataScopes);
|
||||
return this.$downloadFileRevision(refreshedToken, fileId, revisionId);
|
||||
},
|
||||
|
||||
/**
|
||||
* https://developers.google.com/+/web/api/rest/latest/people/get
|
||||
*/
|
||||
async getUser(userId) {
|
||||
const { body } = await networkSvc.request({
|
||||
method: 'GET',
|
||||
url: `https://www.googleapis.com/plus/v1/people/${userId}?key=${apiKey}`,
|
||||
}, true)
|
||||
.then((res) => {
|
||||
}, true);
|
||||
store.commit('userInfo/addItem', {
|
||||
id: `go:${res.body.id}`,
|
||||
name: res.body.displayName,
|
||||
imageUrl: (res.body.image.url || '').replace(/\bsz?=\d+$/, 'sz=40'),
|
||||
});
|
||||
return res.body;
|
||||
id: `go:${body.id}`,
|
||||
name: body.displayName,
|
||||
imageUrl: (body.image.url || '').replace(/\bsz?=\d+$/, 'sz=40'),
|
||||
});
|
||||
return body;
|
||||
},
|
||||
startOauth2(scopes, sub = null, silent = false) {
|
||||
return networkSvc.startOauth2(
|
||||
'https://accounts.google.com/o/oauth2/v2/auth',
|
||||
{
|
||||
client_id: clientId,
|
||||
response_type: 'token id_token',
|
||||
scope: ['openid', ...scopes].join(' '),
|
||||
hd: appsDomain,
|
||||
login_hint: sub,
|
||||
prompt: silent ? 'none' : null,
|
||||
nonce: utils.uid(),
|
||||
},
|
||||
silent,
|
||||
)
|
||||
// Call the token info endpoint
|
||||
.then(data => networkSvc.request({
|
||||
method: 'POST',
|
||||
url: 'https://www.googleapis.com/oauth2/v3/tokeninfo',
|
||||
params: {
|
||||
access_token: data.accessToken,
|
||||
},
|
||||
}, true).then((res) => {
|
||||
// Check the returned client ID consistency
|
||||
if (res.body.aud !== clientId) {
|
||||
throw new Error('Client ID inconsistent.');
|
||||
}
|
||||
// Check the returned sub consistency
|
||||
if (sub && `${res.body.sub}` !== sub) {
|
||||
throw new Error('Google account ID not expected.');
|
||||
}
|
||||
// Build token object including scopes and sub
|
||||
return {
|
||||
scopes,
|
||||
accessToken: data.accessToken,
|
||||
expiresOn: Date.now() + (data.expiresIn * 1000),
|
||||
idToken: data.idToken,
|
||||
sub: `${res.body.sub}`,
|
||||
isLogin: !store.getters['workspace/mainWorkspaceToken'] &&
|
||||
scopes.indexOf('https://www.googleapis.com/auth/drive.appdata') !== -1,
|
||||
isSponsor: false,
|
||||
isDrive: scopes.indexOf('https://www.googleapis.com/auth/drive') !== -1 ||
|
||||
scopes.indexOf('https://www.googleapis.com/auth/drive.file') !== -1,
|
||||
isBlogger: scopes.indexOf('https://www.googleapis.com/auth/blogger') !== -1,
|
||||
isPhotos: scopes.indexOf('https://www.googleapis.com/auth/photos') !== -1,
|
||||
driveFullAccess: scopes.indexOf('https://www.googleapis.com/auth/drive') !== -1,
|
||||
};
|
||||
}))
|
||||
// Call the user info endpoint
|
||||
.then(token => this.getUser(token.sub)
|
||||
.catch((err) => {
|
||||
if (err.status === 404) {
|
||||
store.dispatch('notification/info', 'Please activate Google Plus to change your account name and photo!');
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
})
|
||||
.then((user = {}) => {
|
||||
const existingToken = store.getters['data/googleTokens'][token.sub];
|
||||
// Add name to token
|
||||
token.name = user.displayName || (existingToken && existingToken.name) || 'Unknown';
|
||||
if (existingToken) {
|
||||
// We probably retrieved a new token with restricted scopes.
|
||||
// That's no problem, token will be refreshed later with merged scopes.
|
||||
// Restore flags
|
||||
Object.assign(token, {
|
||||
isLogin: existingToken.isLogin || token.isLogin,
|
||||
isSponsor: existingToken.isSponsor,
|
||||
isDrive: existingToken.isDrive || token.isDrive,
|
||||
isBlogger: existingToken.isBlogger || token.isBlogger,
|
||||
isPhotos: existingToken.isPhotos || token.isPhotos,
|
||||
driveFullAccess: existingToken.driveFullAccess || token.driveFullAccess,
|
||||
});
|
||||
}
|
||||
return token.isLogin && networkSvc.request({
|
||||
method: 'GET',
|
||||
url: 'userInfo',
|
||||
params: {
|
||||
idToken: token.idToken,
|
||||
},
|
||||
})
|
||||
.then((res) => {
|
||||
token.isSponsor = res.body.sponsorUntil > Date.now();
|
||||
}, () => {
|
||||
// Ignore error
|
||||
});
|
||||
})
|
||||
.then(() => {
|
||||
// Add token to googleTokens
|
||||
store.dispatch('data/setGoogleToken', token);
|
||||
return token;
|
||||
}));
|
||||
},
|
||||
refreshToken(token, scopes = []) {
|
||||
const { sub } = token;
|
||||
const lastToken = store.getters['data/googleTokens'][sub];
|
||||
const mergedScopes = [...new Set([
|
||||
...scopes,
|
||||
...lastToken.scopes,
|
||||
])];
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
if (
|
||||
// If we already have permissions for the requested scopes
|
||||
mergedScopes.length === lastToken.scopes.length &&
|
||||
// And lastToken is not expired
|
||||
lastToken.expiresOn > Date.now() + tokenExpirationMargin &&
|
||||
// And in case of a login token, ID token is still valid
|
||||
(!lastToken.isLogin || checkIdToken(lastToken.idToken))
|
||||
) {
|
||||
return lastToken;
|
||||
}
|
||||
// New scopes are requested or existing token is going to expire.
|
||||
// Try to get a new token in background
|
||||
return this.startOauth2(mergedScopes, sub, true)
|
||||
// If it fails try to popup a window
|
||||
.catch((err) => {
|
||||
if (store.state.offline) {
|
||||
throw err;
|
||||
}
|
||||
return store.dispatch('modal/providerRedirection', {
|
||||
providerName: 'Google',
|
||||
onResolve: () => this.startOauth2(mergedScopes, sub),
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
loadClientScript() {
|
||||
if (gapi) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
return networkSvc.loadScript('https://apis.google.com/js/api.js')
|
||||
.then(() => Promise.all(libraries
|
||||
.map(library => new Promise((resolve, reject) => window.gapi.load(library, {
|
||||
callback: resolve,
|
||||
onerror: reject,
|
||||
timeout: 30000,
|
||||
ontimeout: reject,
|
||||
})))))
|
||||
.then(() => {
|
||||
({ gapi } = window);
|
||||
({ google } = window);
|
||||
});
|
||||
},
|
||||
getSponsorship(token) {
|
||||
return this.refreshToken(token)
|
||||
.then(refreshedToken => networkSvc.request({
|
||||
method: 'GET',
|
||||
url: 'userInfo',
|
||||
params: {
|
||||
idToken: refreshedToken.idToken,
|
||||
},
|
||||
}, true));
|
||||
},
|
||||
signin() {
|
||||
return this.startOauth2(driveAppDataScopes);
|
||||
},
|
||||
addDriveAccount(fullAccess = false, sub = null) {
|
||||
return this.startOauth2(getDriveScopes({ driveFullAccess: fullAccess }), sub);
|
||||
},
|
||||
addBloggerAccount() {
|
||||
return this.startOauth2(bloggerScopes);
|
||||
},
|
||||
addPhotosAccount() {
|
||||
return this.startOauth2(photosScopes);
|
||||
},
|
||||
getChanges(token, startPageToken, isAppData, teamDriveId = null) {
|
||||
/**
|
||||
* https://developers.google.com/drive/v3/reference/changes/list
|
||||
*/
|
||||
async getChanges(token, startPageToken, isAppData, teamDriveId = null) {
|
||||
const result = {
|
||||
changes: [],
|
||||
};
|
||||
@ -385,9 +478,13 @@ export default {
|
||||
if (!isAppData) {
|
||||
fileFields += ',file/parents,file/mimeType,file/appProperties';
|
||||
}
|
||||
return this.refreshToken(token, isAppData ? driveAppDataScopes : getDriveScopes(token))
|
||||
.then((refreshedToken) => {
|
||||
const getPage = (pageToken = '1') => this.request(refreshedToken, {
|
||||
const refreshedToken = await this.refreshToken(
|
||||
token,
|
||||
isAppData ? driveAppDataScopes : getDriveScopes(token),
|
||||
);
|
||||
|
||||
const getPage = async (pageToken = '1') => {
|
||||
const { changes, nextPageToken, newStartPageToken } = await this.$request(refreshedToken, {
|
||||
method: 'GET',
|
||||
url: 'https://www.googleapis.com/drive/v3/changes',
|
||||
params: {
|
||||
@ -399,104 +496,23 @@ export default {
|
||||
includeTeamDriveItems: !!teamDriveId,
|
||||
teamDriveId,
|
||||
},
|
||||
})
|
||||
.then((res) => {
|
||||
result.changes = result.changes.concat(res.body.changes.filter(item => item.fileId));
|
||||
if (res.body.nextPageToken) {
|
||||
return getPage(res.body.nextPageToken);
|
||||
});
|
||||
result.changes = [...result.changes, ...changes.filter(item => item.fileId)];
|
||||
if (nextPageToken) {
|
||||
return getPage(nextPageToken);
|
||||
}
|
||||
result.startPageToken = res.body.newStartPageToken;
|
||||
result.startPageToken = newStartPageToken;
|
||||
return result;
|
||||
});
|
||||
|
||||
};
|
||||
return getPage(startPageToken);
|
||||
});
|
||||
},
|
||||
uploadFile(
|
||||
token,
|
||||
name,
|
||||
parents,
|
||||
appProperties,
|
||||
media,
|
||||
mediaType,
|
||||
fileId,
|
||||
oldParents,
|
||||
ifNotTooLate,
|
||||
) {
|
||||
return this.refreshToken(token, getDriveScopes(token))
|
||||
.then(refreshedToken => this.uploadFileInternal(
|
||||
refreshedToken,
|
||||
name,
|
||||
parents,
|
||||
appProperties,
|
||||
media,
|
||||
mediaType,
|
||||
fileId,
|
||||
oldParents,
|
||||
ifNotTooLate,
|
||||
));
|
||||
},
|
||||
uploadAppDataFile(token, name, media, fileId, ifNotTooLate) {
|
||||
return this.refreshToken(token, driveAppDataScopes)
|
||||
.then(refreshedToken => this.uploadFileInternal(
|
||||
refreshedToken,
|
||||
name,
|
||||
['appDataFolder'],
|
||||
undefined,
|
||||
media,
|
||||
undefined,
|
||||
fileId,
|
||||
undefined,
|
||||
ifNotTooLate,
|
||||
));
|
||||
},
|
||||
getFile(token, id) {
|
||||
return this.refreshToken(token, getDriveScopes(token))
|
||||
.then(refreshedToken => this.request(refreshedToken, {
|
||||
method: 'GET',
|
||||
url: `https://www.googleapis.com/drive/v3/files/${id}`,
|
||||
params: {
|
||||
fields: 'id,name,mimeType,appProperties,teamDriveId',
|
||||
supportsTeamDrives: true,
|
||||
},
|
||||
}))
|
||||
.then(res => res.body);
|
||||
},
|
||||
downloadFile(token, id) {
|
||||
return this.refreshToken(token, getDriveScopes(token))
|
||||
.then(refreshedToken => this.downloadFileInternal(refreshedToken, id));
|
||||
},
|
||||
downloadAppDataFile(token, id) {
|
||||
return this.refreshToken(token, driveAppDataScopes)
|
||||
.then(refreshedToken => this.downloadFileInternal(refreshedToken, id));
|
||||
},
|
||||
removeFile(token, id, ifNotTooLate) {
|
||||
return this.refreshToken(token, getDriveScopes(token))
|
||||
.then(refreshedToken => this.removeFileInternal(refreshedToken, id, ifNotTooLate));
|
||||
},
|
||||
removeAppDataFile(token, id, ifNotTooLate = cb => res => cb(res)) {
|
||||
return this.refreshToken(token, driveAppDataScopes)
|
||||
.then(refreshedToken => this.removeFileInternal(refreshedToken, id, ifNotTooLate));
|
||||
},
|
||||
getFileRevisions(token, id) {
|
||||
return this.refreshToken(token, getDriveScopes(token))
|
||||
.then(refreshedToken => this.getFileRevisionsInternal(refreshedToken, id));
|
||||
},
|
||||
getAppDataFileRevisions(token, id) {
|
||||
return this.refreshToken(token, driveAppDataScopes)
|
||||
.then(refreshedToken => this.getFileRevisionsInternal(refreshedToken, id));
|
||||
},
|
||||
downloadFileRevision(token, fileId, revisionId) {
|
||||
return this.refreshToken(token, getDriveScopes(token))
|
||||
.then(refreshedToken => this
|
||||
.downloadFileRevisionInternal(refreshedToken, fileId, revisionId));
|
||||
},
|
||||
downloadAppDataFileRevision(token, fileId, revisionId) {
|
||||
return this.refreshToken(token, driveAppDataScopes)
|
||||
.then(refreshedToken => this
|
||||
.downloadFileRevisionInternal(refreshedToken, fileId, revisionId));
|
||||
},
|
||||
uploadBlogger(
|
||||
|
||||
/**
|
||||
* https://developers.google.com/blogger/docs/3.0/reference/blogs/getByUrl
|
||||
* https://developers.google.com/blogger/docs/3.0/reference/posts/insert
|
||||
* https://developers.google.com/blogger/docs/3.0/reference/posts/update
|
||||
*/
|
||||
async uploadBlogger({
|
||||
token,
|
||||
blogUrl,
|
||||
blogId,
|
||||
@ -507,30 +523,28 @@ export default {
|
||||
isDraft,
|
||||
published,
|
||||
isPage,
|
||||
) {
|
||||
return this.refreshToken(token, bloggerScopes)
|
||||
.then(refreshedToken => Promise.resolve()
|
||||
.then(() => {
|
||||
if (blogId) {
|
||||
return blogId;
|
||||
}
|
||||
return this.request(refreshedToken, {
|
||||
}) {
|
||||
const refreshedToken = await this.refreshToken(token, bloggerScopes);
|
||||
|
||||
// Get the blog ID
|
||||
const blog = { id: blogId };
|
||||
if (!blog.id) {
|
||||
blog.id = (await this.$request(refreshedToken, {
|
||||
url: 'https://www.googleapis.com/blogger/v3/blogs/byurl',
|
||||
params: {
|
||||
url: blogUrl,
|
||||
},
|
||||
}).then(res => res.body.id);
|
||||
})
|
||||
.then((resolvedBlogId) => {
|
||||
})).id;
|
||||
}
|
||||
|
||||
// Create/update the post/page
|
||||
const path = isPage ? 'pages' : 'posts';
|
||||
const options = {
|
||||
let options = {
|
||||
method: 'POST',
|
||||
url: `https://www.googleapis.com/blogger/v3/blogs/${resolvedBlogId}/${path}/`,
|
||||
url: `https://www.googleapis.com/blogger/v3/blogs/${blog.id}/${path}/`,
|
||||
body: {
|
||||
kind: isPage ? 'blogger#page' : 'blogger#post',
|
||||
blog: {
|
||||
id: resolvedBlogId,
|
||||
},
|
||||
blog,
|
||||
title,
|
||||
content,
|
||||
},
|
||||
@ -547,14 +561,13 @@ export default {
|
||||
options.url += postId;
|
||||
options.body.id = postId;
|
||||
}
|
||||
return this.request(refreshedToken, options)
|
||||
.then(res => res.body);
|
||||
})
|
||||
.then((post) => {
|
||||
const post = await this.$request(refreshedToken, options);
|
||||
if (isPage) {
|
||||
return post;
|
||||
}
|
||||
const options = {
|
||||
|
||||
// Revert/publish post
|
||||
options = {
|
||||
method: 'POST',
|
||||
url: `https://www.googleapis.com/blogger/v3/blogs/${post.blog.id}/posts/${post.id}/`,
|
||||
params: {},
|
||||
@ -567,15 +580,26 @@ export default {
|
||||
options.params.publishDate = published.toISOString();
|
||||
}
|
||||
}
|
||||
return this.request(refreshedToken, options)
|
||||
.then(res => res.body);
|
||||
}));
|
||||
return this.$request(refreshedToken, options);
|
||||
},
|
||||
openPicker(token, type = 'doc') {
|
||||
|
||||
/**
|
||||
* https://developers.google.com/picker/docs/
|
||||
*/
|
||||
async openPicker(token, type = 'doc') {
|
||||
const scopes = type === 'img' ? photosScopes : getDriveScopes(token);
|
||||
return this.loadClientScript()
|
||||
.then(() => this.refreshToken(token, scopes))
|
||||
.then(refreshedToken => new Promise((resolve) => {
|
||||
if (!window.google) {
|
||||
await networkSvc.loadScript('https://apis.google.com/js/api.js');
|
||||
await new Promise((resolve, reject) => window.gapi.load('picker', {
|
||||
callback: resolve,
|
||||
onerror: reject,
|
||||
timeout: 30000,
|
||||
ontimeout: reject,
|
||||
}));
|
||||
}
|
||||
const refreshedToken = await this.refreshToken(token, scopes);
|
||||
const { google } = window;
|
||||
return Promise((resolve) => {
|
||||
let picker;
|
||||
const pickerBuilder = new google.picker.PickerBuilder()
|
||||
.setOAuthToken(refreshedToken.accessToken)
|
||||
@ -636,6 +660,6 @@ export default {
|
||||
}
|
||||
picker = pickerBuilder.build();
|
||||
picker.setVisible(true);
|
||||
}));
|
||||
});
|
||||
},
|
||||
};
|
||||
|
@ -10,11 +10,15 @@ const request = (token, options) => networkSvc.request({
|
||||
...options.headers || {},
|
||||
Authorization: `Bearer ${token.accessToken}`,
|
||||
},
|
||||
});
|
||||
})
|
||||
.then(res => res.body);
|
||||
|
||||
export default {
|
||||
startOauth2(sub = null, silent = false) {
|
||||
return networkSvc.startOauth2(
|
||||
/**
|
||||
* https://developer.wordpress.com/docs/oauth2/
|
||||
*/
|
||||
async startOauth2(sub = null, silent = false) {
|
||||
const { accessToken, expiresIn } = await networkSvc.startOauth2(
|
||||
'https://public-api.wordpress.com/oauth2/authorize',
|
||||
{
|
||||
client_id: clientId,
|
||||
@ -22,49 +26,49 @@ export default {
|
||||
scope: 'global',
|
||||
},
|
||||
silent,
|
||||
)
|
||||
);
|
||||
|
||||
// Call the user info endpoint
|
||||
.then(data => request({ accessToken: data.accessToken }, {
|
||||
const body = await request({ accessToken }, {
|
||||
url: 'https://public-api.wordpress.com/rest/v1.1/me',
|
||||
})
|
||||
.then((res) => {
|
||||
});
|
||||
|
||||
// Check the returned sub consistency
|
||||
if (sub && `${res.body.ID}` !== sub) {
|
||||
if (sub && `${body.ID}` !== sub) {
|
||||
throw new Error('WordPress account ID not expected.');
|
||||
}
|
||||
// Build token object including scopes and sub
|
||||
const token = {
|
||||
accessToken: data.accessToken,
|
||||
expiresOn: Date.now() + (data.expiresIn * 1000),
|
||||
name: res.body.display_name,
|
||||
sub: `${res.body.ID}`,
|
||||
accessToken,
|
||||
expiresOn: Date.now() + (expiresIn * 1000),
|
||||
name: body.display_name,
|
||||
sub: `${body.ID}`,
|
||||
};
|
||||
// Add token to wordpressTokens
|
||||
store.dispatch('data/setWordpressToken', token);
|
||||
return token;
|
||||
}));
|
||||
},
|
||||
refreshToken(token) {
|
||||
async refreshToken(token) {
|
||||
const { sub } = token;
|
||||
const lastToken = store.getters['data/wordpressTokens'][sub];
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
if (lastToken.expiresOn > Date.now() + tokenExpirationMargin) {
|
||||
return lastToken;
|
||||
}
|
||||
// Existing token is going to expire.
|
||||
// Try to get a new token in background
|
||||
return store.dispatch('modal/providerRedirection', {
|
||||
providerName: 'WordPress',
|
||||
onResolve: () => this.startOauth2(sub),
|
||||
});
|
||||
});
|
||||
await store.dispatch('modal/providerRedirection', { providerName: 'WordPress' });
|
||||
return this.startOauth2(sub);
|
||||
},
|
||||
addAccount(fullAccess = false) {
|
||||
return this.startOauth2(fullAccess);
|
||||
},
|
||||
uploadPost(
|
||||
|
||||
/**
|
||||
* https://developer.wordpress.com/docs/api/1.2/post/sites/%24site/posts/new/
|
||||
* https://developer.wordpress.com/docs/api/1.2/post/sites/%24site/posts/%24post_ID/
|
||||
*/
|
||||
async uploadPost({
|
||||
token,
|
||||
domain,
|
||||
siteId,
|
||||
@ -78,9 +82,9 @@ export default {
|
||||
featuredImage,
|
||||
status,
|
||||
date,
|
||||
) {
|
||||
return this.refreshToken(token)
|
||||
.then(refreshedToken => request(refreshedToken, {
|
||||
}) {
|
||||
const refreshedToken = await this.refreshToken(token);
|
||||
await request(refreshedToken, {
|
||||
method: 'POST',
|
||||
url: `https://public-api.wordpress.com/rest/v1.2/sites/${siteId || domain}/posts/${postId || 'new'}`,
|
||||
body: {
|
||||
@ -94,7 +98,6 @@ export default {
|
||||
status,
|
||||
date: date && date.toISOString(),
|
||||
},
|
||||
})
|
||||
.then(res => res.body));
|
||||
});
|
||||
},
|
||||
};
|
||||
|
@ -7,11 +7,16 @@ const request = (token, options) => networkSvc.request({
|
||||
...options.headers || {},
|
||||
Authorization: `Bearer ${token.accessToken}`,
|
||||
},
|
||||
});
|
||||
})
|
||||
.then(res => res.body);
|
||||
|
||||
|
||||
export default {
|
||||
startOauth2(subdomain, clientId, sub = null, silent = false) {
|
||||
return networkSvc.startOauth2(
|
||||
/**
|
||||
* https://support.zendesk.com/hc/en-us/articles/203663836-Using-OAuth-authentication-with-your-application
|
||||
*/
|
||||
async startOauth2(subdomain, clientId, sub = null, silent = false) {
|
||||
const { accessToken } = await networkSvc.startOauth2(
|
||||
`https://${subdomain}.zendesk.com/oauth/authorizations/new`,
|
||||
{
|
||||
client_id: clientId,
|
||||
@ -19,33 +24,39 @@ export default {
|
||||
scope: 'read hc:write',
|
||||
},
|
||||
silent,
|
||||
)
|
||||
);
|
||||
|
||||
// Call the user info endpoint
|
||||
.then(({ accessToken }) => request({ accessToken }, {
|
||||
const { user } = await request({ accessToken }, {
|
||||
url: `https://${subdomain}.zendesk.com/api/v2/users/me.json`,
|
||||
})
|
||||
.then((res) => {
|
||||
const uniqueSub = `${subdomain}/${res.body.user.id}`;
|
||||
});
|
||||
const uniqueSub = `${subdomain}/${user.id}`;
|
||||
|
||||
// Check the returned sub consistency
|
||||
if (sub && uniqueSub !== sub) {
|
||||
throw new Error('Zendesk account ID not expected.');
|
||||
}
|
||||
|
||||
// Build token object including scopes and sub
|
||||
const token = {
|
||||
accessToken,
|
||||
name: res.body.user.name,
|
||||
name: user.name,
|
||||
subdomain,
|
||||
sub: uniqueSub,
|
||||
};
|
||||
|
||||
// Add token to zendeskTokens
|
||||
store.dispatch('data/setZendeskToken', token);
|
||||
return token;
|
||||
}));
|
||||
},
|
||||
addAccount(subdomain, clientId) {
|
||||
return this.startOauth2(subdomain, clientId);
|
||||
},
|
||||
uploadArticle(
|
||||
|
||||
/**
|
||||
* https://developer.zendesk.com/rest_api/docs/help_center/articles
|
||||
*/
|
||||
async uploadArticle({
|
||||
token,
|
||||
sectionId,
|
||||
articleId,
|
||||
@ -54,20 +65,25 @@ export default {
|
||||
labels,
|
||||
locale,
|
||||
isDraft,
|
||||
) {
|
||||
}) {
|
||||
const article = {
|
||||
title,
|
||||
body: content,
|
||||
locale,
|
||||
draft: isDraft,
|
||||
};
|
||||
|
||||
if (articleId) {
|
||||
return request(token, {
|
||||
// Update article
|
||||
await request(token, {
|
||||
method: 'PUT',
|
||||
url: `https://${token.subdomain}.zendesk.com/api/v2/help_center/articles/${articleId}/translations/${locale}.json`,
|
||||
body: { translation: article },
|
||||
})
|
||||
.then(() => labels && request(token, {
|
||||
});
|
||||
|
||||
// Add labels
|
||||
if (labels) {
|
||||
await request(token, {
|
||||
method: 'PUT',
|
||||
url: `https://${token.subdomain}.zendesk.com/api/v2/help_center/articles/${articleId}.json`,
|
||||
body: {
|
||||
@ -75,17 +91,20 @@ export default {
|
||||
label_names: labels,
|
||||
},
|
||||
},
|
||||
}))
|
||||
.then(() => articleId);
|
||||
});
|
||||
}
|
||||
return articleId;
|
||||
}
|
||||
|
||||
// Create new article
|
||||
if (labels) {
|
||||
article.label_names = labels;
|
||||
}
|
||||
return request(token, {
|
||||
const body = await request(token, {
|
||||
method: 'POST',
|
||||
url: `https://${token.subdomain}.zendesk.com/api/v2/help_center/sections/${sectionId}/articles.json`,
|
||||
body: { article },
|
||||
})
|
||||
.then(res => `${res.body.article.id}`);
|
||||
});
|
||||
return `${body.article.id}`;
|
||||
},
|
||||
};
|
||||
|
@ -14,27 +14,18 @@ export default new Provider({
|
||||
const token = this.getToken(location);
|
||||
return `${location.postId} — ${location.domain} — ${token.name}`;
|
||||
},
|
||||
publish(token, html, metadata, publishLocation) {
|
||||
return wordpressHelper.uploadPost(
|
||||
async publish(token, html, metadata, publishLocation) {
|
||||
const post = await wordpressHelper.uploadPost({
|
||||
...publishLocation,
|
||||
...metadata,
|
||||
token,
|
||||
publishLocation.domain,
|
||||
publishLocation.siteId,
|
||||
publishLocation.postId,
|
||||
metadata.title,
|
||||
html,
|
||||
metadata.tags,
|
||||
metadata.categories,
|
||||
metadata.excerpt,
|
||||
metadata.author,
|
||||
metadata.featuredImage,
|
||||
metadata.status,
|
||||
metadata.date,
|
||||
)
|
||||
.then(post => ({
|
||||
content: html,
|
||||
});
|
||||
return {
|
||||
...publishLocation,
|
||||
siteId: `${post.site_ID}`,
|
||||
postId: `${post.ID}`,
|
||||
}));
|
||||
};
|
||||
},
|
||||
makeLocation(token, domain, postId) {
|
||||
const location = {
|
||||
|
@ -15,21 +15,19 @@ export default new Provider({
|
||||
const token = this.getToken(location);
|
||||
return `${location.articleId} — ${token.name} — ${token.subdomain}`;
|
||||
},
|
||||
publish(token, html, metadata, publishLocation) {
|
||||
return zendeskHelper.uploadArticle(
|
||||
async publish(token, html, metadata, publishLocation) {
|
||||
const articleId = await zendeskHelper.uploadArticle({
|
||||
...publishLocation,
|
||||
token,
|
||||
publishLocation.sectionId,
|
||||
publishLocation.articleId,
|
||||
metadata.title,
|
||||
html,
|
||||
metadata.tags,
|
||||
publishLocation.locale,
|
||||
metadata.status === 'draft',
|
||||
)
|
||||
.then(articleId => ({
|
||||
title: metadata.title,
|
||||
content: html,
|
||||
labels: metadata.tags,
|
||||
isDraft: metadata.status === 'draft',
|
||||
});
|
||||
return {
|
||||
...publishLocation,
|
||||
articleId,
|
||||
}));
|
||||
};
|
||||
},
|
||||
makeLocation(token, sectionId, locale, articleId) {
|
||||
const location = {
|
||||
|
@ -38,12 +38,11 @@ const ensureDate = (value, defaultValue) => {
|
||||
return new Date(`${value}`);
|
||||
};
|
||||
|
||||
function publish(publishLocation) {
|
||||
const publish = async (publishLocation) => {
|
||||
const { fileId } = publishLocation;
|
||||
const template = store.getters['data/allTemplates'][publishLocation.templateId];
|
||||
return exportSvc.applyTemplate(fileId, template)
|
||||
.then(html => localDbSvc.loadItem(`${fileId}/content`)
|
||||
.then((content) => {
|
||||
const html = await exportSvc.applyTemplate(fileId, template);
|
||||
const content = await localDbSvc.loadItem(`${fileId}/content`);
|
||||
const file = store.state.file.itemMap[fileId];
|
||||
const properties = utils.computeProperties(content.properties);
|
||||
const provider = providerRegistry.providers[publishLocation.providerId];
|
||||
@ -59,25 +58,21 @@ function publish(publishLocation) {
|
||||
date: ensureDate(properties.date, new Date()),
|
||||
};
|
||||
return provider.publish(token, html, metadata, publishLocation);
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
function publishFile(fileId) {
|
||||
const publishFile = async (fileId) => {
|
||||
let counter = 0;
|
||||
return loadContent(fileId)
|
||||
.then(() => {
|
||||
await loadContent(fileId);
|
||||
const publishLocations = [
|
||||
...store.getters['publishLocation/filteredGroupedByFileId'][fileId] || [],
|
||||
];
|
||||
const publishOneContentLocation = () => {
|
||||
const publishLocation = publishLocations.shift();
|
||||
if (!publishLocation) {
|
||||
return null;
|
||||
}
|
||||
return store.dispatch('queue/doWithLocation', {
|
||||
try {
|
||||
await utils.awaitSequence(publishLocations, async (publishLocation) => {
|
||||
await store.dispatch('queue/doWithLocation', {
|
||||
location: publishLocation,
|
||||
promise: publish(publishLocation)
|
||||
.then((publishLocationToStore) => {
|
||||
action: async () => {
|
||||
const publishLocationToStore = await publish(publishLocation);
|
||||
try {
|
||||
// Replace publish location if modified
|
||||
if (utils.serializeObject(publishLocation) !==
|
||||
utils.serializeObject(publishLocationToStore)
|
||||
@ -85,33 +80,24 @@ function publishFile(fileId) {
|
||||
store.commit('publishLocation/patchItem', publishLocationToStore);
|
||||
}
|
||||
counter += 1;
|
||||
return publishOneContentLocation();
|
||||
}, (err) => {
|
||||
} catch (err) {
|
||||
if (store.state.offline) {
|
||||
throw err;
|
||||
}
|
||||
console.error(err); // eslint-disable-line no-console
|
||||
store.dispatch('notification/error', err);
|
||||
return publishOneContentLocation();
|
||||
}),
|
||||
}
|
||||
},
|
||||
});
|
||||
});
|
||||
};
|
||||
return publishOneContentLocation();
|
||||
})
|
||||
.then(() => {
|
||||
const file = store.state.file.itemMap[fileId];
|
||||
store.dispatch('notification/info', `"${file.name}" was published to ${counter} location(s).`);
|
||||
})
|
||||
.then(
|
||||
() => localDbSvc.unloadContents(),
|
||||
err => localDbSvc.unloadContents()
|
||||
.then(() => {
|
||||
throw err;
|
||||
}),
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
await localDbSvc.unloadContents();
|
||||
}
|
||||
};
|
||||
|
||||
function requestPublish() {
|
||||
const requestPublish = () => {
|
||||
// No publish in light mode
|
||||
if (store.state.light) {
|
||||
return;
|
||||
@ -135,21 +121,21 @@ function requestPublish() {
|
||||
intervalId = utils.setInterval(() => attempt(), 1000);
|
||||
attempt();
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
function createPublishLocation(publishLocation) {
|
||||
const createPublishLocation = (publishLocation) => {
|
||||
publishLocation.id = utils.uid();
|
||||
const currentFile = store.getters['file/current'];
|
||||
publishLocation.fileId = currentFile.id;
|
||||
store.dispatch(
|
||||
'queue/enqueue',
|
||||
() => publish(publishLocation)
|
||||
.then((publishLocationToStore) => {
|
||||
async () => {
|
||||
const publishLocationToStore = await publish(publishLocation);
|
||||
store.commit('publishLocation/setItem', publishLocationToStore);
|
||||
store.dispatch('notification/info', `A new publication location was added to "${currentFile.name}".`);
|
||||
}),
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export default {
|
||||
requestPublish,
|
||||
|
@ -1,11 +1,12 @@
|
||||
function SectionDimension(startOffset, endOffset) {
|
||||
class SectionDimension {
|
||||
constructor(startOffset, endOffset) {
|
||||
this.startOffset = startOffset;
|
||||
this.endOffset = endOffset;
|
||||
this.height = endOffset - startOffset;
|
||||
}
|
||||
}
|
||||
|
||||
function dimensionNormalizer(dimensionName) {
|
||||
return (editorSvc) => {
|
||||
const dimensionNormalizer = dimensionName => (editorSvc) => {
|
||||
const dimensionList = editorSvc.previewCtx.sectionDescList
|
||||
.map(sectionDesc => sectionDesc[dimensionName]);
|
||||
let dimension;
|
||||
@ -33,14 +34,13 @@ function dimensionNormalizer(dimensionName) {
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const normalizeEditorDimensions = dimensionNormalizer('editorDimension');
|
||||
const normalizePreviewDimensions = dimensionNormalizer('previewDimension');
|
||||
const normalizeTocDimensions = dimensionNormalizer('tocDimension');
|
||||
|
||||
function measureSectionDimensions(editorSvc) {
|
||||
const measureSectionDimensions = (editorSvc) => {
|
||||
let editorSectionOffset = 0;
|
||||
let previewSectionOffset = 0;
|
||||
let tocSectionOffset = 0;
|
||||
@ -106,7 +106,7 @@ function measureSectionDimensions(editorSvc) {
|
||||
normalizeEditorDimensions(editorSvc);
|
||||
normalizePreviewDimensions(editorSvc);
|
||||
normalizeTocDimensions(editorSvc);
|
||||
}
|
||||
};
|
||||
|
||||
export default {
|
||||
measureSectionDimensions,
|
||||
|
@ -8,20 +8,19 @@ let lastCheck = 0;
|
||||
const appId = 'ESTHdCYOi18iLhhO';
|
||||
let monetize;
|
||||
|
||||
const getMonetize = () => Promise.resolve()
|
||||
.then(() => networkSvc.loadScript('https://cdn.monetizejs.com/api/js/latest/monetize.min.js'))
|
||||
.then(() => {
|
||||
const getMonetize = async () => {
|
||||
await networkSvc.loadScript('https://cdn.monetizejs.com/api/js/latest/monetize.min.js');
|
||||
monetize = monetize || new window.MonetizeJS({
|
||||
applicationID: appId,
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const isGoogleSponsor = () => {
|
||||
const sponsorToken = store.getters['workspace/sponsorToken'];
|
||||
return sponsorToken && sponsorToken.isSponsor;
|
||||
};
|
||||
|
||||
const checkPayment = () => {
|
||||
const checkPayment = async () => {
|
||||
const currentDate = Date.now();
|
||||
if (!isGoogleSponsor()
|
||||
&& networkSvc.isUserActive()
|
||||
@ -30,15 +29,15 @@ const checkPayment = () => {
|
||||
&& lastCheck + checkPaymentEvery < currentDate
|
||||
) {
|
||||
lastCheck = currentDate;
|
||||
getMonetize()
|
||||
.then(() => monetize.getPaymentsImmediate((err, payments) => {
|
||||
await getMonetize();
|
||||
monetize.getPaymentsImmediate((err, payments) => {
|
||||
const isSponsor = payments && payments.app === appId && (
|
||||
(payments.chargeOption && payments.chargeOption.alias === 'once') ||
|
||||
(payments.subscriptionOption && payments.subscriptionOption.alias === 'yearly'));
|
||||
if (isSponsor !== store.state.monetizeSponsor) {
|
||||
store.commit('setMonetizeSponsor', isSponsor);
|
||||
}
|
||||
}));
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@ -46,12 +45,11 @@ export default {
|
||||
init: () => {
|
||||
utils.setInterval(checkPayment, 2000);
|
||||
},
|
||||
getToken() {
|
||||
async getToken() {
|
||||
if (isGoogleSponsor() || store.state.offline) {
|
||||
return Promise.resolve();
|
||||
return null;
|
||||
}
|
||||
return getMonetize()
|
||||
.then(() => new Promise(resolve =>
|
||||
monetize.getTokenImmediate((err, result) => resolve(result))));
|
||||
await getMonetize();
|
||||
return new Promise(resolve => monetize.getTokenImmediate((err, result) => resolve(result)));
|
||||
},
|
||||
};
|
||||
|
@ -50,36 +50,36 @@ const isSyncPossible = () => !store.state.offline &&
|
||||
/**
|
||||
* Return true if we are the many window, ie we have the lastSyncActivity lock.
|
||||
*/
|
||||
function isSyncWindow() {
|
||||
const isSyncWindow = () => {
|
||||
const storedLastSyncActivity = getLastStoredSyncActivity();
|
||||
return lastSyncActivity === storedLastSyncActivity ||
|
||||
Date.now() > inactivityThreshold + storedLastSyncActivity;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return true if auto sync can start, ie if lastSyncActivity is old enough.
|
||||
*/
|
||||
function isAutoSyncReady() {
|
||||
const isAutoSyncReady = () => {
|
||||
let { autoSyncEvery } = store.getters['data/computedSettings'];
|
||||
if (autoSyncEvery < minAutoSyncEvery) {
|
||||
autoSyncEvery = minAutoSyncEvery;
|
||||
}
|
||||
return Date.now() > autoSyncEvery + getLastStoredSyncActivity();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Update the lastSyncActivity, assuming we have the lock.
|
||||
*/
|
||||
function setLastSyncActivity() {
|
||||
const setLastSyncActivity = () => {
|
||||
const currentDate = Date.now();
|
||||
lastSyncActivity = currentDate;
|
||||
localStorage.setItem(store.getters['workspace/lastSyncActivityKey'], currentDate);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Upgrade hashes if syncedContent is from an old version
|
||||
*/
|
||||
function upgradeSyncedContent(syncedContent) {
|
||||
const upgradeSyncedContent = (syncedContent) => {
|
||||
if (syncedContent.v) {
|
||||
return syncedContent;
|
||||
}
|
||||
@ -100,12 +100,12 @@ function upgradeSyncedContent(syncedContent) {
|
||||
syncHistory,
|
||||
v: 1,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Clean a syncedContent.
|
||||
*/
|
||||
function cleanSyncedContent(syncedContent) {
|
||||
const cleanSyncedContent = (syncedContent) => {
|
||||
// Clean syncHistory from removed syncLocations
|
||||
Object.keys(syncedContent.syncHistory).forEach((syncLocationId) => {
|
||||
if (syncLocationId !== 'main' && !store.state.syncLocation.itemMap[syncLocationId]) {
|
||||
@ -123,12 +123,12 @@ function cleanSyncedContent(syncedContent) {
|
||||
delete syncedContent.historyData[hash];
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Apply changes retrieved from the main provider. Update sync data accordingly.
|
||||
*/
|
||||
function applyChanges(changes) {
|
||||
const applyChanges = (changes) => {
|
||||
const storeItemMap = { ...store.getters.allItemMap };
|
||||
const syncData = { ...store.getters['data/syncData'] };
|
||||
let saveSyncData = false;
|
||||
@ -170,12 +170,12 @@ function applyChanges(changes) {
|
||||
if (saveSyncData) {
|
||||
store.dispatch('data/setSyncData', syncData);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a sync location by uploading the current file content.
|
||||
*/
|
||||
function createSyncLocation(syncLocation) {
|
||||
const createSyncLocation = (syncLocation) => {
|
||||
syncLocation.id = utils.uid();
|
||||
const currentFile = store.getters['file/current'];
|
||||
const fileId = currentFile.id;
|
||||
@ -184,15 +184,14 @@ function createSyncLocation(syncLocation) {
|
||||
const content = utils.deepCopy(store.getters['content/current']);
|
||||
store.dispatch(
|
||||
'queue/enqueue',
|
||||
() => {
|
||||
async () => {
|
||||
const provider = providerRegistry.providers[syncLocation.providerId];
|
||||
const token = provider.getToken(syncLocation);
|
||||
return provider.uploadContent(token, {
|
||||
const syncLocationToStore = await provider.uploadContent(token, {
|
||||
...content,
|
||||
history: [content.hash],
|
||||
}, syncLocation)
|
||||
.then(syncLocationToStore => localDbSvc.loadSyncedContent(fileId)
|
||||
.then(() => {
|
||||
}, syncLocation);
|
||||
await localDbSvc.loadSyncedContent(fileId);
|
||||
const newSyncedContent = utils.deepCopy(upgradeSyncedContent(store.state.syncedContent.itemMap[`${fileId}/syncedContent`]));
|
||||
const newSyncHistoryItem = [];
|
||||
newSyncedContent.syncHistory[syncLocation.id] = newSyncHistoryItem;
|
||||
@ -203,10 +202,9 @@ function createSyncLocation(syncLocation) {
|
||||
store.commit('syncedContent/patchItem', newSyncedContent);
|
||||
store.commit('syncLocation/setItem', syncLocationToStore);
|
||||
store.dispatch('notification/info', `A new synchronized location was added to "${currentFile.name}".`);
|
||||
}));
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
// Prevent from sending new data too long after old data has been fetched
|
||||
const tooLateChecker = (timeout) => {
|
||||
@ -219,33 +217,13 @@ const tooLateChecker = (timeout) => {
|
||||
};
|
||||
};
|
||||
|
||||
class SyncContext {
|
||||
restart = false;
|
||||
attempted = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync one file with all its locations.
|
||||
*/
|
||||
function syncFile(fileId, syncContext = new SyncContext()) {
|
||||
syncContext.attempted[`${fileId}/content`] = true;
|
||||
|
||||
return localDbSvc.loadSyncedContent(fileId)
|
||||
.then(() => localDbSvc.loadItem(`${fileId}/content`)
|
||||
.catch(() => {})) // Item may not exist if content has not been downloaded yet
|
||||
.then(() => {
|
||||
const getFile = () => store.state.file.itemMap[fileId];
|
||||
const getContent = () => store.state.content.itemMap[`${fileId}/content`];
|
||||
const getSyncedContent = () => upgradeSyncedContent(store.state.syncedContent.itemMap[`${fileId}/syncedContent`]);
|
||||
const getSyncHistoryItem = syncLocationId => getSyncedContent().syncHistory[syncLocationId];
|
||||
|
||||
const isTempFile = () => {
|
||||
const isTempFile = (fileId) => {
|
||||
if (store.getters['data/syncDataByItemId'][`${fileId}/content`]) {
|
||||
// If file has already been synced, it's not a temp file
|
||||
return false;
|
||||
}
|
||||
const file = getFile();
|
||||
const content = getContent();
|
||||
const file = store.state.file.itemMap[fileId];
|
||||
const content = store.state.content.itemMap[`${fileId}/content`];
|
||||
if (!file || !content) {
|
||||
return false;
|
||||
}
|
||||
@ -257,7 +235,7 @@ function syncFile(fileId, syncContext = new SyncContext()) {
|
||||
...store.getters['publishLocation/filteredGroupedByFileId'][fileId] || [],
|
||||
];
|
||||
if (locations.length) {
|
||||
// If file has explicit sync/publish locations, it's not a temp file
|
||||
// If file has sync/publish locations, it's not a temp file
|
||||
return false;
|
||||
}
|
||||
// Return true if it's a welcome file that has no discussion
|
||||
@ -265,41 +243,59 @@ function syncFile(fileId, syncContext = new SyncContext()) {
|
||||
const hash = utils.hash(content.text);
|
||||
const hasDiscussions = Object.keys(content.discussions).length;
|
||||
return file.name === 'Welcome file' && welcomeFileHashes[hash] && !hasDiscussions;
|
||||
};
|
||||
};
|
||||
|
||||
if (isTempFile()) {
|
||||
return null;
|
||||
class SyncContext {
|
||||
restart = false;
|
||||
attempted = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync one file with all its locations.
|
||||
*/
|
||||
const syncFile = async (fileId, syncContext = new SyncContext()) => {
|
||||
syncContext.attempted[`${fileId}/content`] = true;
|
||||
|
||||
await localDbSvc.loadSyncedContent(fileId);
|
||||
try {
|
||||
await localDbSvc.loadItem(`${fileId}/content`);
|
||||
} catch (e) {
|
||||
// Item may not exist if content has not been downloaded yet
|
||||
}
|
||||
const getContent = () => store.state.content.itemMap[`${fileId}/content`];
|
||||
const getSyncedContent = () => upgradeSyncedContent(store.state.syncedContent.itemMap[`${fileId}/syncedContent`]);
|
||||
const getSyncHistoryItem = syncLocationId => getSyncedContent().syncHistory[syncLocationId];
|
||||
|
||||
try {
|
||||
if (isTempFile(fileId)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const attemptedLocations = {};
|
||||
const syncOneContentLocation = () => {
|
||||
const syncLocations = [
|
||||
...store.getters['syncLocation/filteredGroupedByFileId'][fileId] || [],
|
||||
];
|
||||
if (isWorkspaceSyncPossible()) {
|
||||
syncLocations.unshift({ id: 'main', providerId: workspaceProvider.id, fileId });
|
||||
}
|
||||
let result;
|
||||
syncLocations.some((syncLocation) => {
|
||||
|
||||
await utils.awaitSequence(syncLocations, async (syncLocation) => {
|
||||
const provider = providerRegistry.providers[syncLocation.providerId];
|
||||
if (
|
||||
// Skip if it has been attempted already
|
||||
!attemptedLocations[syncLocation.id] &&
|
||||
// Skip temp file
|
||||
!isTempFile()
|
||||
) {
|
||||
attemptedLocations[syncLocation.id] = true;
|
||||
const token = provider && provider.getToken(syncLocation);
|
||||
result = token && store.dispatch('queue/doWithLocation', {
|
||||
location: syncLocation,
|
||||
promise: provider.downloadContent(token, syncLocation)
|
||||
.then((serverContent = null) => {
|
||||
if (!provider) {
|
||||
return;
|
||||
}
|
||||
const token = provider.getToken(syncLocation);
|
||||
if (!token) {
|
||||
return;
|
||||
}
|
||||
|
||||
const doSyncLocation = async () => {
|
||||
const serverContent = await provider.downloadContent(token, syncLocation);
|
||||
const syncedContent = getSyncedContent();
|
||||
const syncHistoryItem = getSyncHistoryItem(syncLocation.id);
|
||||
let mergedContent = (() => {
|
||||
const clientContent = utils.deepCopy(getContent());
|
||||
if (!clientContent) {
|
||||
return utils.deepCopy(serverContent);
|
||||
return utils.deepCopy(serverContent || null);
|
||||
}
|
||||
if (!serverContent) {
|
||||
// Sync location has not been created yet
|
||||
@ -314,11 +310,10 @@ function syncFile(fileId, syncContext = new SyncContext()) {
|
||||
return clientContent;
|
||||
}
|
||||
// Perform a merge with last merged content if any, or a simple fusion otherwise
|
||||
let lastMergedContent;
|
||||
serverContent.history.some((hash) => {
|
||||
lastMergedContent = syncedContent.historyData[hash];
|
||||
return lastMergedContent;
|
||||
});
|
||||
let lastMergedContent = utils.someResult(
|
||||
serverContent.history,
|
||||
hash => syncedContent.historyData[hash],
|
||||
);
|
||||
if (!lastMergedContent && syncHistoryItem) {
|
||||
lastMergedContent = syncedContent.historyData[syncHistoryItem[LAST_MERGED]];
|
||||
}
|
||||
@ -326,7 +321,7 @@ function syncFile(fileId, syncContext = new SyncContext()) {
|
||||
})();
|
||||
|
||||
if (!mergedContent) {
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
|
||||
// Update or set content in store
|
||||
@ -338,7 +333,7 @@ function syncFile(fileId, syncContext = new SyncContext()) {
|
||||
comments: mergedContent.comments,
|
||||
});
|
||||
|
||||
// Retrieve content with new `hash` and freeze it
|
||||
// Retrieve content with its new hash value and freeze it
|
||||
mergedContent = utils.deepCopy(getContent());
|
||||
|
||||
// Make merged content history
|
||||
@ -385,15 +380,15 @@ function syncFile(fileId, syncContext = new SyncContext()) {
|
||||
|
||||
if (skipUpload) {
|
||||
// Server content and merged content are equal, skip content upload
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
|
||||
// Upload merged content
|
||||
return provider.uploadContent(token, {
|
||||
const syncLocationToStore = await provider.uploadContent(token, {
|
||||
...mergedContent,
|
||||
history: mergedContentHistory.slice(0, maxContentHistory),
|
||||
}, syncLocation, tooLateChecker(restartContentSyncAfter))
|
||||
.then((syncLocationToStore) => {
|
||||
}, syncLocation, tooLateChecker(restartContentSyncAfter));
|
||||
|
||||
// Replace sync location if modified
|
||||
if (utils.serializeObject(syncLocation) !==
|
||||
utils.serializeObject(syncLocationToStore)
|
||||
@ -407,57 +402,49 @@ function syncFile(fileId, syncContext = new SyncContext()) {
|
||||
) {
|
||||
syncContext.restart = true;
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
};
|
||||
|
||||
await store.dispatch('queue/doWithLocation', {
|
||||
location: syncLocation,
|
||||
action: async () => {
|
||||
try {
|
||||
await doSyncLocation();
|
||||
} catch (err) {
|
||||
if (store.state.offline || (err && err.message === 'TOO_LATE')) {
|
||||
throw err;
|
||||
}
|
||||
console.error(err); // eslint-disable-line no-console
|
||||
store.dispatch('notification/error', err);
|
||||
}),
|
||||
})
|
||||
.then(() => syncOneContentLocation());
|
||||
}
|
||||
return result;
|
||||
},
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
return syncOneContentLocation();
|
||||
})
|
||||
.then(
|
||||
() => localDbSvc.unloadContents(),
|
||||
err => localDbSvc.unloadContents()
|
||||
.then(() => {
|
||||
throw err;
|
||||
}),
|
||||
)
|
||||
.catch((err) => {
|
||||
});
|
||||
} catch (err) {
|
||||
if (err && err.message === 'TOO_LATE') {
|
||||
// Restart sync
|
||||
return syncFile(fileId, syncContext);
|
||||
await syncFile(fileId, syncContext);
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
await localDbSvc.unloadContents();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Sync a data item, typically settings, workspaces and templates.
|
||||
*/
|
||||
function syncDataItem(dataId) {
|
||||
const syncDataItem = async (dataId) => {
|
||||
const getItem = () => store.state.data.itemMap[dataId]
|
||||
|| store.state.data.lsItemMap[dataId];
|
||||
|
||||
const item = getItem();
|
||||
const syncData = store.getters['data/syncDataByItemId'][dataId];
|
||||
// Sync if item hash and syncData hash are inconsistent
|
||||
// Sync if item hash and syncData hash are out of sync
|
||||
if (syncData && item && item.hash === syncData.hash) {
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
|
||||
return workspaceProvider.downloadData(dataId)
|
||||
.then((serverItem = null) => {
|
||||
const serverItem = await workspaceProvider.downloadData(dataId);
|
||||
const dataSyncData = store.getters['data/dataSyncData'][dataId];
|
||||
let mergedItem = (() => {
|
||||
const clientItem = utils.deepCopy(getItem());
|
||||
@ -485,7 +472,7 @@ function syncDataItem(dataId) {
|
||||
})();
|
||||
|
||||
if (!mergedItem) {
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
|
||||
// Update item in store
|
||||
@ -497,28 +484,23 @@ function syncDataItem(dataId) {
|
||||
// Retrieve item with new `hash` and freeze it
|
||||
mergedItem = utils.deepCopy(getItem());
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
if (serverItem && serverItem.hash === mergedItem.hash) {
|
||||
return null;
|
||||
return;
|
||||
}
|
||||
return workspaceProvider.uploadData(mergedItem, tooLateChecker(restartContentSyncAfter));
|
||||
})
|
||||
.then(() => store.dispatch('data/patchDataSyncData', {
|
||||
await workspaceProvider.uploadData(mergedItem, tooLateChecker(restartContentSyncAfter));
|
||||
store.dispatch('data/patchDataSyncData', {
|
||||
[dataId]: utils.deepCopy(store.getters['data/syncDataByItemId'][dataId]),
|
||||
}));
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Sync the whole workspace with the main provider and the current file explicit locations.
|
||||
*/
|
||||
function syncWorkspace() {
|
||||
const syncWorkspace = async () => {
|
||||
try {
|
||||
const workspace = store.getters['workspace/currentWorkspace'];
|
||||
const syncContext = new SyncContext();
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
// Store the sub in the DB since it's not safely stored in the token
|
||||
const syncToken = store.getters['workspace/syncToken'];
|
||||
const localSettings = store.getters['data/localSettings'];
|
||||
@ -529,9 +511,8 @@ function syncWorkspace() {
|
||||
} else if (localSettings.syncSub !== syncToken.sub) {
|
||||
throw new Error('Synchronization failed due to token inconsistency.');
|
||||
}
|
||||
})
|
||||
.then(() => workspaceProvider.getChanges())
|
||||
.then((changes) => {
|
||||
|
||||
const changes = await workspaceProvider.getChanges();
|
||||
// Apply changes
|
||||
applyChanges(changes);
|
||||
if (workspaceProvider.onChangesApplied) {
|
||||
@ -542,7 +523,7 @@ function syncWorkspace() {
|
||||
const ifNotTooLate = tooLateChecker(restartSyncAfter);
|
||||
|
||||
// Called until no item to save
|
||||
const saveNextItem = ifNotTooLate(() => {
|
||||
const saveNextItem = ifNotTooLate(async () => {
|
||||
const storeItemMap = {
|
||||
...store.state.file.itemMap,
|
||||
...store.state.folder.itemMap,
|
||||
@ -551,8 +532,9 @@ function syncWorkspace() {
|
||||
// Deal with contents and data later
|
||||
};
|
||||
const syncDataByItemId = store.getters['data/syncDataByItemId'];
|
||||
let promise;
|
||||
Object.entries(storeItemMap).some(([id, item]) => {
|
||||
const [changedItem, syncDataToUpdate] = utils.someResult(
|
||||
Object.entries(storeItemMap),
|
||||
([id, item]) => {
|
||||
const existingSyncData = syncDataByItemId[id];
|
||||
if ((!existingSyncData || existingSyncData.hash !== item.hash)
|
||||
// Add file/folder if parent has been added
|
||||
@ -560,25 +542,30 @@ function syncWorkspace() {
|
||||
// Add file if content has been added
|
||||
&& (item.type !== 'file' || syncDataByItemId[`${id}/content`])
|
||||
) {
|
||||
promise = workspaceProvider
|
||||
return [item, existingSyncData];
|
||||
}
|
||||
return null;
|
||||
},
|
||||
) || [];
|
||||
|
||||
if (changedItem) {
|
||||
const resultSyncData = await workspaceProvider
|
||||
.saveSimpleItem(
|
||||
// Use deepCopy to freeze objects
|
||||
utils.deepCopy(item),
|
||||
utils.deepCopy(existingSyncData),
|
||||
utils.deepCopy(changedItem),
|
||||
utils.deepCopy(syncDataToUpdate),
|
||||
ifNotTooLate,
|
||||
)
|
||||
.then(resultSyncData => store.dispatch('data/patchSyncData', {
|
||||
);
|
||||
store.dispatch('data/patchSyncData', {
|
||||
[resultSyncData.id]: resultSyncData,
|
||||
}))
|
||||
.then(() => saveNextItem());
|
||||
});
|
||||
await saveNextItem();
|
||||
}
|
||||
return promise;
|
||||
});
|
||||
return promise;
|
||||
});
|
||||
await saveNextItem();
|
||||
|
||||
// Called until no item to remove
|
||||
const removeNextItem = ifNotTooLate(() => {
|
||||
const removeNextItem = ifNotTooLate(async () => {
|
||||
const storeItemMap = {
|
||||
...store.state.file.itemMap,
|
||||
...store.state.folder.itemMap,
|
||||
@ -587,37 +574,41 @@ function syncWorkspace() {
|
||||
...store.state.content.itemMap,
|
||||
};
|
||||
const syncData = store.getters['data/syncData'];
|
||||
let promise;
|
||||
Object.entries(syncData).some(([, existingSyncData]) => {
|
||||
if (!storeItemMap[existingSyncData.itemId] &&
|
||||
const syncDataToRemove = utils.deepCopy(utils.someResult(
|
||||
Object.values(syncData),
|
||||
existingSyncData => !storeItemMap[existingSyncData.itemId]
|
||||
// We don't want to delete data items, especially on first sync
|
||||
existingSyncData.type !== 'data' &&
|
||||
&& existingSyncData.type !== 'data'
|
||||
// Remove content only if file has been removed
|
||||
(existingSyncData.type !== 'content' || !storeItemMap[existingSyncData.itemId.split('/')[0]])
|
||||
) {
|
||||
&& (existingSyncData.type !== 'content'
|
||||
|| !storeItemMap[existingSyncData.itemId.split('/')[0]])
|
||||
&& existingSyncData,
|
||||
));
|
||||
|
||||
if (syncDataToRemove) {
|
||||
// Use deepCopy to freeze objects
|
||||
const syncDataToRemove = utils.deepCopy(existingSyncData);
|
||||
promise = workspaceProvider
|
||||
.removeItem(syncDataToRemove, ifNotTooLate)
|
||||
.then(() => {
|
||||
await workspaceProvider.removeItem(syncDataToRemove, ifNotTooLate);
|
||||
const syncDataCopy = { ...store.getters['data/syncData'] };
|
||||
delete syncDataCopy[syncDataToRemove.id];
|
||||
store.dispatch('data/setSyncData', syncDataCopy);
|
||||
})
|
||||
.then(() => removeNextItem());
|
||||
await removeNextItem();
|
||||
}
|
||||
return promise;
|
||||
});
|
||||
return promise;
|
||||
});
|
||||
await removeNextItem();
|
||||
|
||||
// Sync settings and workspaces only in the main workspace
|
||||
if (workspace.id === 'main') {
|
||||
await syncDataItem('settings');
|
||||
await syncDataItem('workspaces');
|
||||
}
|
||||
await syncDataItem('templates');
|
||||
|
||||
const getOneFileIdToSync = () => {
|
||||
const contentIds = [...new Set([
|
||||
...Object.keys(localDbSvc.hashMap.content),
|
||||
...store.getters['file/items'].map(file => `${file.id}/content`),
|
||||
])];
|
||||
let fileId;
|
||||
contentIds.some((contentId) => {
|
||||
return utils.someResult(contentIds, (contentId) => {
|
||||
// Get content hash from itemMap or from localDbSvc if not loaded
|
||||
const loadedContent = store.state.content.itemMap[contentId];
|
||||
const hash = loadedContent ? loadedContent.hash : localDbSvc.hashMap.content[contentId];
|
||||
@ -628,88 +619,64 @@ function syncWorkspace() {
|
||||
// And if syncData does not exist or if content hash and syncData hash are inconsistent
|
||||
(!syncData || syncData.hash !== hash)
|
||||
) {
|
||||
[fileId] = contentId.split('/');
|
||||
const [fileId] = contentId.split('/');
|
||||
return fileId;
|
||||
}
|
||||
return fileId;
|
||||
});
|
||||
return fileId;
|
||||
};
|
||||
|
||||
const syncNextFile = () => {
|
||||
const fileId = getOneFileIdToSync();
|
||||
if (!fileId) {
|
||||
return null;
|
||||
}
|
||||
return syncFile(fileId, syncContext)
|
||||
.then(() => syncNextFile());
|
||||
});
|
||||
};
|
||||
|
||||
const onSyncEnd = () => Promise.resolve(workspaceProvider.onSyncEnd
|
||||
&& workspaceProvider.onSyncEnd());
|
||||
const syncNextFile = async () => {
|
||||
const fileId = getOneFileIdToSync();
|
||||
if (fileId) {
|
||||
await syncFile(fileId, syncContext);
|
||||
await syncNextFile();
|
||||
}
|
||||
};
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => saveNextItem())
|
||||
.then(() => removeNextItem())
|
||||
// Sync settings only in the main workspace
|
||||
.then(() => workspace.id === 'main' && syncDataItem('settings'))
|
||||
// Sync workspaces only in the main workspace
|
||||
.then(() => workspace.id === 'main' && syncDataItem('workspaces'))
|
||||
.then(() => syncDataItem('templates'))
|
||||
.then(() => {
|
||||
const currentFileId = store.getters['file/current'].id;
|
||||
if (currentFileId) {
|
||||
// Sync current file first
|
||||
return syncFile(currentFileId, syncContext)
|
||||
.then(() => syncNextFile());
|
||||
await syncFile(currentFileId, syncContext);
|
||||
}
|
||||
return syncNextFile();
|
||||
})
|
||||
.then(
|
||||
() => onSyncEnd(),
|
||||
err => onSyncEnd().then(() => {
|
||||
throw err;
|
||||
}, () => {
|
||||
throw err;
|
||||
}),
|
||||
)
|
||||
.then(
|
||||
() => {
|
||||
await syncNextFile();
|
||||
|
||||
if (syncContext.restart) {
|
||||
// Restart sync
|
||||
return syncWorkspace();
|
||||
await syncWorkspace();
|
||||
}
|
||||
return null;
|
||||
},
|
||||
(err) => {
|
||||
} catch (err) {
|
||||
if (err && err.message === 'TOO_LATE') {
|
||||
// Restart sync
|
||||
return syncWorkspace();
|
||||
}
|
||||
await syncWorkspace();
|
||||
} else {
|
||||
throw err;
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
if (workspaceProvider.onSyncEnd) {
|
||||
workspaceProvider.onSyncEnd();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Enqueue a sync task, if possible.
|
||||
*/
|
||||
function requestSync() {
|
||||
const requestSync = () => {
|
||||
// No sync in light mode
|
||||
if (store.state.light) {
|
||||
return;
|
||||
}
|
||||
|
||||
store.dispatch('queue/enqueueSyncRequest', () => new Promise((resolve, reject) => {
|
||||
store.dispatch('queue/enqueueSyncRequest', async () => {
|
||||
let intervalId;
|
||||
const attempt = () => {
|
||||
const attempt = async () => {
|
||||
// Only start syncing when these conditions are met
|
||||
if (networkSvc.isUserActive() && isSyncWindow()) {
|
||||
clearInterval(intervalId);
|
||||
if (!isSyncPossible()) {
|
||||
// Cancel sync
|
||||
reject(new Error('Sync not possible.'));
|
||||
return;
|
||||
throw new Error('Sync not possible.');
|
||||
}
|
||||
|
||||
// Determine if we have to clean files
|
||||
@ -729,25 +696,17 @@ function requestSync() {
|
||||
// Call setLastSyncActivity periodically
|
||||
intervalId = utils.setInterval(() => setLastSyncActivity(), 1000);
|
||||
setLastSyncActivity();
|
||||
const cleaner = cb => (res) => {
|
||||
clearInterval(intervalId);
|
||||
cb(res);
|
||||
};
|
||||
|
||||
Promise.resolve()
|
||||
.then(() => {
|
||||
try {
|
||||
if (isWorkspaceSyncPossible()) {
|
||||
return syncWorkspace();
|
||||
}
|
||||
if (hasCurrentFileSyncLocations()) {
|
||||
await syncWorkspace();
|
||||
} else if (hasCurrentFileSyncLocations()) {
|
||||
// Only sync current file if workspace sync is unavailable.
|
||||
// We could also sync files that are out-of-sync but it would
|
||||
// We could sync all files that are out-of-sync but it would
|
||||
// require to load all the syncedContent objects from the DB.
|
||||
return syncFile(store.getters['file/current'].id);
|
||||
await syncFile(store.getters['file/current'].id);
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.then(() => {
|
||||
|
||||
// Clean files
|
||||
Object.entries(fileHashesToClean).forEach(([fileId, fileHash]) => {
|
||||
const file = store.state.file.itemMap[fileId];
|
||||
@ -755,44 +714,48 @@ function requestSync() {
|
||||
fileSvc.deleteFile(fileId);
|
||||
}
|
||||
});
|
||||
})
|
||||
.then(cleaner(resolve), cleaner(reject));
|
||||
} finally {
|
||||
clearInterval(intervalId);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
intervalId = utils.setInterval(() => attempt(), 1000);
|
||||
attempt();
|
||||
}));
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export default {
|
||||
init() {
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
async init() {
|
||||
// Load workspaces and tokens from localStorage
|
||||
localDbSvc.syncLocalStorage();
|
||||
|
||||
// Try to find a suitable action provider
|
||||
actionProvider = providerRegistry.providers[utils.queryParams.providerId];
|
||||
return actionProvider && actionProvider.initAction && actionProvider.initAction();
|
||||
})
|
||||
.then(() => {
|
||||
if (actionProvider && actionProvider.initAction) {
|
||||
await actionProvider.initAction();
|
||||
}
|
||||
|
||||
// Try to find a suitable workspace sync provider
|
||||
workspaceProvider = providerRegistry.providers[utils.queryParams.providerId];
|
||||
if (!workspaceProvider || !workspaceProvider.initWorkspace) {
|
||||
workspaceProvider = googleDriveAppDataProvider;
|
||||
}
|
||||
return workspaceProvider.initWorkspace();
|
||||
})
|
||||
.then(workspace => store.dispatch('workspace/setCurrentWorkspaceId', workspace.id))
|
||||
.then(() => localDbSvc.init())
|
||||
.then(() => {
|
||||
const workspace = await workspaceProvider.initWorkspace();
|
||||
store.dispatch('workspace/setCurrentWorkspaceId', workspace.id);
|
||||
await localDbSvc.init();
|
||||
|
||||
// Try to find a suitable action provider
|
||||
actionProvider = providerRegistry.providers[utils.queryParams.providerId] || actionProvider;
|
||||
return actionProvider && actionProvider.performAction && actionProvider.performAction()
|
||||
.then(newSyncLocation => newSyncLocation && this.createSyncLocation(newSyncLocation));
|
||||
})
|
||||
.then(() => tempFileSvc.init())
|
||||
.then(() => {
|
||||
if (actionProvider && actionProvider.performAction) {
|
||||
const newSyncLocation = await actionProvider.performAction();
|
||||
if (newSyncLocation) {
|
||||
this.createSyncLocation(newSyncLocation);
|
||||
}
|
||||
}
|
||||
|
||||
await tempFileSvc.init();
|
||||
|
||||
if (!store.state.light) {
|
||||
// Sync periodically
|
||||
utils.setInterval(() => {
|
||||
@ -813,7 +776,6 @@ export default {
|
||||
}
|
||||
}, 5000);
|
||||
}
|
||||
});
|
||||
},
|
||||
isSyncPossible,
|
||||
requestSync,
|
||||
|
@ -25,20 +25,19 @@ export default {
|
||||
}
|
||||
this.closed = true;
|
||||
},
|
||||
init() {
|
||||
async init() {
|
||||
if (!origin || !window.parent) {
|
||||
return Promise.resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
store.commit('setLight', true);
|
||||
|
||||
return fileSvc.createFile({
|
||||
const file = await fileSvc.createFile({
|
||||
name: fileName || utils.getHostname(origin),
|
||||
text: contentText || '\n',
|
||||
properties: contentProperties,
|
||||
parentId: 'temp',
|
||||
}, true)
|
||||
.then((file) => {
|
||||
}, true);
|
||||
|
||||
const fileItemMap = store.state.file.itemMap;
|
||||
|
||||
// Sanitize file creations
|
||||
@ -94,6 +93,5 @@ export default {
|
||||
// Watch preview refresh and file name changes
|
||||
editorSvc.$on('previewCtx', onChange);
|
||||
store.watch(() => store.getters['file/current'].name, onChange);
|
||||
});
|
||||
},
|
||||
};
|
||||
|
@ -16,7 +16,7 @@ export default {
|
||||
promised[id] = true;
|
||||
store.commit('userInfo/addItem', { id, name, imageUrl });
|
||||
},
|
||||
getInfo(userId) {
|
||||
async getInfo(userId) {
|
||||
if (!promised[userId]) {
|
||||
const [type, sub] = parseUserId(userId);
|
||||
|
||||
@ -33,27 +33,26 @@ export default {
|
||||
if (!store.state.offline) {
|
||||
promised[userId] = true;
|
||||
switch (type) {
|
||||
case 'github': {
|
||||
return githubHelper.getUser(sub)
|
||||
.catch((err) => {
|
||||
case 'github':
|
||||
try {
|
||||
await githubHelper.getUser(sub);
|
||||
} catch (err) {
|
||||
if (err.status !== 404) {
|
||||
promised[userId] = false;
|
||||
}
|
||||
});
|
||||
}
|
||||
break;
|
||||
case 'google':
|
||||
default: {
|
||||
return googleHelper.getUser(sub)
|
||||
.catch((err) => {
|
||||
default:
|
||||
try {
|
||||
await googleHelper.getUser(sub);
|
||||
} catch (err) {
|
||||
if (err.status !== 404) {
|
||||
promised[userId] = false;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
};
|
||||
|
@ -224,6 +224,14 @@ export default {
|
||||
};
|
||||
return runWithNextValue();
|
||||
},
|
||||
someResult(values, func) {
|
||||
let result;
|
||||
values.some((value) => {
|
||||
result = func(value);
|
||||
return result;
|
||||
});
|
||||
return result;
|
||||
},
|
||||
parseQueryParams,
|
||||
addQueryParams(url = '', params = {}, hash = false) {
|
||||
const keys = Object.keys(params).filter(key => params[key] != null);
|
||||
|
@ -31,11 +31,11 @@ module.mutations = {
|
||||
|
||||
module.getters = {
|
||||
...module.getters,
|
||||
current: (state, getters, rootState, rootGetters) => {
|
||||
if (state.revisionContent) {
|
||||
return state.revisionContent;
|
||||
current: ({ itemMap, revisionContent }, getters, rootState, rootGetters) => {
|
||||
if (revisionContent) {
|
||||
return revisionContent;
|
||||
}
|
||||
return state.itemMap[`${rootGetters['file/current'].id}/content`] || empty();
|
||||
return itemMap[`${rootGetters['file/current'].id}/content`] || empty();
|
||||
},
|
||||
currentChangeTrigger: (state, getters) => {
|
||||
const { current } = getters;
|
||||
@ -45,11 +45,9 @@ module.getters = {
|
||||
current.hash,
|
||||
]);
|
||||
},
|
||||
currentProperties: (state, getters) => utils.computeProperties(getters.current.properties),
|
||||
isCurrentEditable: (state, getters, rootState, rootGetters) =>
|
||||
!state.revisionContent &&
|
||||
getters.current.id &&
|
||||
rootGetters['layout/styles'].showEditor,
|
||||
currentProperties: (state, { current }) => utils.computeProperties(current.properties),
|
||||
isCurrentEditable: ({ revisionContent }, { current }, rootState, rootGetters) =>
|
||||
!revisionContent && current.id && rootGetters['layout/styles'].showEditor,
|
||||
};
|
||||
|
||||
module.actions = {
|
||||
@ -76,7 +74,7 @@ module.actions = {
|
||||
});
|
||||
}
|
||||
},
|
||||
restoreRevision({
|
||||
async restoreRevision({
|
||||
state,
|
||||
getters,
|
||||
commit,
|
||||
@ -84,8 +82,7 @@ module.actions = {
|
||||
}) {
|
||||
const { revisionContent } = state;
|
||||
if (revisionContent) {
|
||||
dispatch('modal/fileRestoration', null, { root: true })
|
||||
.then(() => {
|
||||
await dispatch('modal/fileRestoration', null, { root: true });
|
||||
// Close revision
|
||||
commit('setRevisionContent');
|
||||
const currentContent = utils.deepCopy(getters.current);
|
||||
@ -108,7 +105,6 @@ module.actions = {
|
||||
text: revisionContent.originalText,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
@ -5,8 +5,8 @@ const module = moduleTemplate(empty, true);
|
||||
|
||||
module.getters = {
|
||||
...module.getters,
|
||||
current: (state, getters, rootState, rootGetters) =>
|
||||
state.itemMap[`${rootGetters['file/current'].id}/contentState`] || empty(),
|
||||
current: ({ itemMap }, getters, rootState, rootGetters) =>
|
||||
itemMap[`${rootGetters['file/current'].id}/contentState`] || empty(),
|
||||
};
|
||||
|
||||
module.actions = {
|
||||
|
@ -104,7 +104,7 @@ export default {
|
||||
lsItemMap: {},
|
||||
},
|
||||
mutations: {
|
||||
setItem: (state, value) => {
|
||||
setItem: ({ itemMap, lsItemMap }, value) => {
|
||||
// Create an empty item and override its data field
|
||||
const emptyItem = empty(value.id);
|
||||
const data = typeof value.data === 'object'
|
||||
@ -118,19 +118,19 @@ export default {
|
||||
});
|
||||
|
||||
// Store item in itemMap or lsItemMap if its stored in the localStorage
|
||||
Vue.set(lsItemIdSet.has(item.id) ? state.lsItemMap : state.itemMap, item.id, item);
|
||||
Vue.set(lsItemIdSet.has(item.id) ? lsItemMap : itemMap, item.id, item);
|
||||
},
|
||||
deleteItem(state, id) {
|
||||
deleteItem({ itemMap }, id) {
|
||||
// Only used by localDbSvc to clean itemMap from object moved to localStorage
|
||||
Vue.delete(state.itemMap, id);
|
||||
Vue.delete(itemMap, id);
|
||||
},
|
||||
},
|
||||
getters: {
|
||||
workspaces: getter('workspaces'),
|
||||
sanitizedWorkspaces: (state, getters, rootState, rootGetters) => {
|
||||
sanitizedWorkspaces: (state, { workspaces }, rootState, rootGetters) => {
|
||||
const sanitizedWorkspaces = {};
|
||||
const mainWorkspaceToken = rootGetters['workspace/mainWorkspaceToken'];
|
||||
Object.entries(getters.workspaces).forEach(([id, workspace]) => {
|
||||
Object.entries(workspaces).forEach(([id, workspace]) => {
|
||||
const sanitizedWorkspace = {
|
||||
id,
|
||||
providerId: mainWorkspaceToken && 'googleDriveAppData',
|
||||
@ -146,9 +146,9 @@ export default {
|
||||
return sanitizedWorkspaces;
|
||||
},
|
||||
settings: getter('settings'),
|
||||
computedSettings: (state, getters) => {
|
||||
const customSettings = yaml.safeLoad(getters.settings);
|
||||
const settings = yaml.safeLoad(defaultSettings);
|
||||
computedSettings: (state, { settings }) => {
|
||||
const customSettings = yaml.safeLoad(settings);
|
||||
const parsedSettings = yaml.safeLoad(defaultSettings);
|
||||
const override = (obj, opt) => {
|
||||
const objType = Object.prototype.toString.call(obj);
|
||||
const optType = Object.prototype.toString.call(opt);
|
||||
@ -166,44 +166,44 @@ export default {
|
||||
});
|
||||
return obj;
|
||||
};
|
||||
return override(settings, customSettings);
|
||||
return override(parsedSettings, customSettings);
|
||||
},
|
||||
localSettings: getter('localSettings'),
|
||||
layoutSettings: getter('layoutSettings'),
|
||||
templates: getter('templates'),
|
||||
allTemplates: (state, getters) => ({
|
||||
...getters.templates,
|
||||
allTemplates: (state, { templates }) => ({
|
||||
...templates,
|
||||
...additionalTemplates,
|
||||
}),
|
||||
lastCreated: getter('lastCreated'),
|
||||
lastOpened: getter('lastOpened'),
|
||||
lastOpenedIds: (state, getters, rootState) => {
|
||||
const lastOpened = {
|
||||
...getters.lastOpened,
|
||||
lastOpenedIds: (state, { lastOpened }, rootState) => {
|
||||
const result = {
|
||||
...lastOpened,
|
||||
};
|
||||
const currentFileId = rootState.file.currentId;
|
||||
if (currentFileId && !lastOpened[currentFileId]) {
|
||||
lastOpened[currentFileId] = Date.now();
|
||||
if (currentFileId && !result[currentFileId]) {
|
||||
result[currentFileId] = Date.now();
|
||||
}
|
||||
return Object.keys(lastOpened)
|
||||
return Object.keys(result)
|
||||
.filter(id => rootState.file.itemMap[id])
|
||||
.sort((id1, id2) => lastOpened[id2] - lastOpened[id1])
|
||||
.sort((id1, id2) => result[id2] - result[id1])
|
||||
.slice(0, 20);
|
||||
},
|
||||
syncData: getter('syncData'),
|
||||
syncDataByItemId: (state, getters) => {
|
||||
syncDataByItemId: (state, { syncData }) => {
|
||||
const result = {};
|
||||
Object.entries(getters.syncData).forEach(([, value]) => {
|
||||
Object.entries(syncData).forEach(([, value]) => {
|
||||
result[value.itemId] = value;
|
||||
});
|
||||
return result;
|
||||
},
|
||||
syncDataByType: (state, getters) => {
|
||||
syncDataByType: (state, { syncData }) => {
|
||||
const result = {};
|
||||
utils.types.forEach((type) => {
|
||||
result[type] = {};
|
||||
});
|
||||
Object.entries(getters.syncData).forEach(([, item]) => {
|
||||
Object.entries(syncData).forEach(([, item]) => {
|
||||
if (result[item.type]) {
|
||||
result[item.type][item.itemId] = item;
|
||||
}
|
||||
@ -212,12 +212,12 @@ export default {
|
||||
},
|
||||
dataSyncData: getter('dataSyncData'),
|
||||
tokens: getter('tokens'),
|
||||
googleTokens: (state, getters) => getters.tokens.google || {},
|
||||
couchdbTokens: (state, getters) => getters.tokens.couchdb || {},
|
||||
dropboxTokens: (state, getters) => getters.tokens.dropbox || {},
|
||||
githubTokens: (state, getters) => getters.tokens.github || {},
|
||||
wordpressTokens: (state, getters) => getters.tokens.wordpress || {},
|
||||
zendeskTokens: (state, getters) => getters.tokens.zendesk || {},
|
||||
googleTokens: (state, { tokens }) => tokens.google || {},
|
||||
couchdbTokens: (state, { tokens }) => tokens.couchdb || {},
|
||||
dropboxTokens: (state, { tokens }) => tokens.dropbox || {},
|
||||
githubTokens: (state, { tokens }) => tokens.github || {},
|
||||
wordpressTokens: (state, { tokens }) => tokens.wordpress || {},
|
||||
zendeskTokens: (state, { tokens }) => tokens.zendesk || {},
|
||||
},
|
||||
actions: {
|
||||
setWorkspaces: setter('workspaces'),
|
||||
|
@ -59,8 +59,8 @@ export default {
|
||||
},
|
||||
},
|
||||
getters: {
|
||||
newDiscussion: state =>
|
||||
state.currentDiscussionId === state.newDiscussionId && state.newDiscussion,
|
||||
newDiscussion: ({ currentDiscussionId, newDiscussionId, newDiscussion }) =>
|
||||
currentDiscussionId === newDiscussionId && newDiscussion,
|
||||
currentFileDiscussionLastComments: (state, getters, rootState, rootGetters) => {
|
||||
const { discussions, comments } = rootGetters['content/current'];
|
||||
const discussionLastComments = {};
|
||||
@ -74,14 +74,18 @@ export default {
|
||||
});
|
||||
return discussionLastComments;
|
||||
},
|
||||
currentFileDiscussions: (state, getters, rootState, rootGetters) => {
|
||||
currentFileDiscussions: (
|
||||
{ newDiscussionId },
|
||||
{ newDiscussion, currentFileDiscussionLastComments },
|
||||
rootState,
|
||||
rootGetters,
|
||||
) => {
|
||||
const currentFileDiscussions = {};
|
||||
const { newDiscussion } = getters;
|
||||
if (newDiscussion) {
|
||||
currentFileDiscussions[state.newDiscussionId] = newDiscussion;
|
||||
currentFileDiscussions[newDiscussionId] = newDiscussion;
|
||||
}
|
||||
const { discussions } = rootGetters['content/current'];
|
||||
Object.entries(getters.currentFileDiscussionLastComments)
|
||||
Object.entries(currentFileDiscussionLastComments)
|
||||
.sort(([, lastComment1], [, lastComment2]) =>
|
||||
lastComment1.created - lastComment2.created)
|
||||
.forEach(([discussionId]) => {
|
||||
@ -89,17 +93,22 @@ export default {
|
||||
});
|
||||
return currentFileDiscussions;
|
||||
},
|
||||
currentDiscussion: (state, getters) =>
|
||||
getters.currentFileDiscussions[state.currentDiscussionId],
|
||||
currentDiscussion: ({ currentDiscussionId }, { currentFileDiscussions }) =>
|
||||
currentFileDiscussions[currentDiscussionId],
|
||||
previousDiscussionId: idShifter(-1),
|
||||
nextDiscussionId: idShifter(1),
|
||||
currentDiscussionComments: (state, getters, rootState, rootGetters) => {
|
||||
currentDiscussionComments: (
|
||||
{ currentDiscussionId },
|
||||
{ currentDiscussion },
|
||||
rootState,
|
||||
rootGetters,
|
||||
) => {
|
||||
const comments = {};
|
||||
if (getters.currentDiscussion) {
|
||||
if (currentDiscussion) {
|
||||
const contentComments = rootGetters['content/current'].comments;
|
||||
Object.entries(contentComments)
|
||||
.filter(([, comment]) =>
|
||||
comment.discussionId === state.currentDiscussionId)
|
||||
comment.discussionId === currentDiscussionId)
|
||||
.sort(([, comment1], [, comment2]) =>
|
||||
comment1.created - comment2.created)
|
||||
.forEach(([commentId, comment]) => {
|
||||
@ -108,10 +117,12 @@ export default {
|
||||
}
|
||||
return comments;
|
||||
},
|
||||
currentDiscussionLastCommentId: (state, getters) =>
|
||||
Object.keys(getters.currentDiscussionComments).pop(),
|
||||
currentDiscussionLastComment: (state, getters) =>
|
||||
getters.currentDiscussionComments[getters.currentDiscussionLastCommentId],
|
||||
currentDiscussionLastCommentId: (state, { currentDiscussionComments }) =>
|
||||
Object.keys(currentDiscussionComments).pop(),
|
||||
currentDiscussionLastComment: (
|
||||
state,
|
||||
{ currentDiscussionComments, currentDiscussionLastCommentId },
|
||||
) => currentDiscussionComments[currentDiscussionLastCommentId],
|
||||
},
|
||||
actions: {
|
||||
cancelNewComment({ commit, getters }) {
|
||||
@ -120,15 +131,15 @@ export default {
|
||||
commit('setCurrentDiscussionId', getters.nextDiscussionId);
|
||||
}
|
||||
},
|
||||
createNewDiscussion({ commit, dispatch, rootGetters }, selection) {
|
||||
async createNewDiscussion({ commit, dispatch, rootGetters }, selection) {
|
||||
const loginToken = rootGetters['workspace/loginToken'];
|
||||
if (!loginToken) {
|
||||
dispatch('modal/signInForComment', {
|
||||
onResolve: () => googleHelper.signin()
|
||||
.then(() => syncSvc.requestSync())
|
||||
.then(() => dispatch('createNewDiscussion', selection)),
|
||||
}, { root: true })
|
||||
.catch(() => { /* Cancel */ });
|
||||
try {
|
||||
await dispatch('modal/signInForComment', null, { root: true });
|
||||
await googleHelper.signin();
|
||||
syncSvc.requestSync();
|
||||
await dispatch('createNewDiscussion', selection);
|
||||
} catch (e) { /* cancel */ }
|
||||
} else if (selection) {
|
||||
let text = rootGetters['content/current'].text.slice(selection.start, selection.end).trim();
|
||||
const maxLength = 80;
|
||||
|
@ -44,11 +44,11 @@ const fakeFileNode = new Node(emptyFile());
|
||||
fakeFileNode.item.id = 'fake';
|
||||
fakeFileNode.noDrag = true;
|
||||
|
||||
function getParent(node, getters) {
|
||||
if (node.isNil) {
|
||||
function getParent({ item, isNil }, { nodeMap, rootNode }) {
|
||||
if (isNil) {
|
||||
return nilFileNode;
|
||||
}
|
||||
return getters.nodeMap[node.item.parentId] || getters.rootNode;
|
||||
return nodeMap[item.parentId] || rootNode;
|
||||
}
|
||||
|
||||
function getFolder(node, getters) {
|
||||
@ -67,6 +67,21 @@ export default {
|
||||
newChildNode: nilFileNode,
|
||||
openNodes: {},
|
||||
},
|
||||
mutations: {
|
||||
setSelectedId: setter('selectedId'),
|
||||
setEditingId: setter('editingId'),
|
||||
setDragSourceId: setter('dragSourceId'),
|
||||
setDragTargetId: setter('dragTargetId'),
|
||||
setNewItem(state, item) {
|
||||
state.newChildNode = item ? new Node(item, [], item.type === 'folder') : nilFileNode;
|
||||
},
|
||||
setNewItemName(state, name) {
|
||||
state.newChildNode.item.name = name;
|
||||
},
|
||||
toggleOpenNode(state, id) {
|
||||
Vue.set(state.openNodes, id, !state.openNodes[id]);
|
||||
},
|
||||
},
|
||||
getters: {
|
||||
nodeStructure: (state, getters, rootState, rootGetters) => {
|
||||
const rootNode = new Node(emptyFolder(), [], true, true);
|
||||
@ -138,41 +153,26 @@ export default {
|
||||
rootNode,
|
||||
};
|
||||
},
|
||||
nodeMap: (state, getters) => getters.nodeStructure.nodeMap,
|
||||
rootNode: (state, getters) => getters.nodeStructure.rootNode,
|
||||
nodeMap: (state, { nodeStructure }) => nodeStructure.nodeMap,
|
||||
rootNode: (state, { nodeStructure }) => nodeStructure.rootNode,
|
||||
newChildNodeParent: (state, getters) => getParent(state.newChildNode, getters),
|
||||
selectedNode: (state, getters) => getters.nodeMap[state.selectedId] || nilFileNode,
|
||||
selectedNode: ({ selectedId }, { nodeMap }) => nodeMap[selectedId] || nilFileNode,
|
||||
selectedNodeFolder: (state, getters) => getFolder(getters.selectedNode, getters),
|
||||
editingNode: (state, getters) => getters.nodeMap[state.editingId] || nilFileNode,
|
||||
dragSourceNode: (state, getters) => getters.nodeMap[state.dragSourceId] || nilFileNode,
|
||||
dragTargetNode: (state, getters) => {
|
||||
if (state.dragTargetId === 'fake') {
|
||||
editingNode: ({ editingId }, { nodeMap }) => nodeMap[editingId] || nilFileNode,
|
||||
dragSourceNode: ({ dragSourceId }, { nodeMap }) => nodeMap[dragSourceId] || nilFileNode,
|
||||
dragTargetNode: ({ dragTargetId }, { nodeMap }) => {
|
||||
if (dragTargetId === 'fake') {
|
||||
return fakeFileNode;
|
||||
}
|
||||
return getters.nodeMap[state.dragTargetId] || nilFileNode;
|
||||
return nodeMap[dragTargetId] || nilFileNode;
|
||||
},
|
||||
dragTargetNodeFolder: (state, getters) => {
|
||||
if (state.dragTargetId === 'fake') {
|
||||
dragTargetNodeFolder: ({ dragTargetId }, getters) => {
|
||||
if (dragTargetId === 'fake') {
|
||||
return getters.rootNode;
|
||||
}
|
||||
return getFolder(getters.dragTargetNode, getters);
|
||||
},
|
||||
},
|
||||
mutations: {
|
||||
setSelectedId: setter('selectedId'),
|
||||
setEditingId: setter('editingId'),
|
||||
setDragSourceId: setter('dragSourceId'),
|
||||
setDragTargetId: setter('dragTargetId'),
|
||||
setNewItem(state, item) {
|
||||
state.newChildNode = item ? new Node(item, [], item.type === 'folder') : nilFileNode;
|
||||
},
|
||||
setNewItemName(state, name) {
|
||||
state.newChildNode.item.name = name;
|
||||
},
|
||||
toggleOpenNode(state, id) {
|
||||
Vue.set(state.openNodes, id, !state.openNodes[id]);
|
||||
},
|
||||
},
|
||||
actions: {
|
||||
openNode({
|
||||
state,
|
||||
|
@ -10,10 +10,10 @@ module.state = {
|
||||
|
||||
module.getters = {
|
||||
...module.getters,
|
||||
current: state => state.itemMap[state.currentId] || empty(),
|
||||
isCurrentTemp: (state, getters) => getters.current.parentId === 'temp',
|
||||
lastOpened: (state, getters, rootState, rootGetters) =>
|
||||
state.itemMap[rootGetters['data/lastOpenedIds'][0]] || getters.items[0] || empty(),
|
||||
current: ({ itemMap, currentId }) => itemMap[currentId] || empty(),
|
||||
isCurrentTemp: (state, { current }) => current.parentId === 'temp',
|
||||
lastOpened: ({ itemMap }, { items }, rootState, rootGetters) =>
|
||||
itemMap[rootGetters['data/lastOpenedIds'][0]] || items[0] || empty(),
|
||||
};
|
||||
|
||||
module.mutations = {
|
||||
|
@ -54,13 +54,33 @@ const store = new Vuex.Store({
|
||||
minuteCounter: 0,
|
||||
monetizeSponsor: false,
|
||||
},
|
||||
mutations: {
|
||||
setLight: (state, value) => {
|
||||
state.light = value;
|
||||
},
|
||||
setOffline: (state, value) => {
|
||||
state.offline = value;
|
||||
},
|
||||
updateLastOfflineCheck: (state) => {
|
||||
state.lastOfflineCheck = Date.now();
|
||||
},
|
||||
updateMinuteCounter: (state) => {
|
||||
state.minuteCounter += 1;
|
||||
},
|
||||
setMonetizeSponsor: (state, value) => {
|
||||
state.monetizeSponsor = value;
|
||||
},
|
||||
setGoogleSponsor: (state, value) => {
|
||||
state.googleSponsor = value;
|
||||
},
|
||||
},
|
||||
getters: {
|
||||
allItemMap: (state) => {
|
||||
const result = {};
|
||||
utils.types.forEach(type => Object.assign(result, state[type].itemMap));
|
||||
return result;
|
||||
},
|
||||
itemPaths: (state) => {
|
||||
itemPaths: (state, getters) => {
|
||||
const result = {};
|
||||
const folderMap = state.folder.itemMap;
|
||||
const getPath = (item) => {
|
||||
@ -84,8 +104,10 @@ const store = new Vuex.Store({
|
||||
result[item.id] = itemPath;
|
||||
return itemPath;
|
||||
};
|
||||
|
||||
[...state.folder.items, ...state.file.items].forEach(item => getPath(item));
|
||||
[
|
||||
...getters['folder/items'],
|
||||
...getters['file/items'],
|
||||
].forEach(item => getPath(item));
|
||||
return result;
|
||||
},
|
||||
pathItems: (state, { allItemMap, itemPaths }) => {
|
||||
@ -97,29 +119,9 @@ const store = new Vuex.Store({
|
||||
});
|
||||
return result;
|
||||
},
|
||||
isSponsor: (state, getters) => {
|
||||
isSponsor: ({ light, monetizeSponsor }, getters) => {
|
||||
const sponsorToken = getters['workspace/sponsorToken'];
|
||||
return state.light || state.monetizeSponsor || (sponsorToken && sponsorToken.isSponsor);
|
||||
},
|
||||
},
|
||||
mutations: {
|
||||
setLight: (state, value) => {
|
||||
state.light = value;
|
||||
},
|
||||
setOffline: (state, value) => {
|
||||
state.offline = value;
|
||||
},
|
||||
updateLastOfflineCheck: (state) => {
|
||||
state.lastOfflineCheck = Date.now();
|
||||
},
|
||||
updateMinuteCounter: (state) => {
|
||||
state.minuteCounter += 1;
|
||||
},
|
||||
setMonetizeSponsor: (state, value) => {
|
||||
state.monetizeSponsor = value;
|
||||
},
|
||||
setGoogleSponsor: (state, value) => {
|
||||
state.googleSponsor = value;
|
||||
return light || monetizeSponsor || (sponsorToken && sponsorToken.isSponsor);
|
||||
},
|
||||
},
|
||||
actions: {
|
||||
|
@ -12,22 +12,22 @@ export default (empty) => {
|
||||
|
||||
module.getters = {
|
||||
...module.getters,
|
||||
groupedByFileId: (state, getters) => {
|
||||
groupedByFileId: (state, { items }) => {
|
||||
const groups = {};
|
||||
getters.items.forEach(item => addToGroup(groups, item));
|
||||
items.forEach(item => addToGroup(groups, item));
|
||||
return groups;
|
||||
},
|
||||
filteredGroupedByFileId: (state, getters) => {
|
||||
filteredGroupedByFileId: (state, { items }) => {
|
||||
const groups = {};
|
||||
getters.items.filter((item) => {
|
||||
items.filter((item) => {
|
||||
// Filter items that we can't use
|
||||
const provider = providerRegistry.providers[item.providerId];
|
||||
return provider && provider.getToken(item);
|
||||
}).forEach(item => addToGroup(groups, item));
|
||||
return groups;
|
||||
},
|
||||
current: (state, getters, rootState, rootGetters) => {
|
||||
const locations = getters.filteredGroupedByFileId[rootGetters['file/current'].id] || [];
|
||||
current: (state, { filteredGroupedByFileId }, rootState, rootGetters) => {
|
||||
const locations = filteredGroupedByFileId[rootGetters['file/current'].id] || [];
|
||||
return locations.map((location) => {
|
||||
const provider = providerRegistry.providers[location.providerId];
|
||||
return {
|
||||
|
@ -13,39 +13,28 @@ export default {
|
||||
},
|
||||
},
|
||||
getters: {
|
||||
config: state => !state.hidden && state.stack[0],
|
||||
config: ({ hidden, stack }) => !hidden && stack[0],
|
||||
},
|
||||
actions: {
|
||||
open({ commit, state }, param) {
|
||||
return new Promise((resolve, reject) => {
|
||||
async open({ commit, state }, param) {
|
||||
const config = typeof param === 'object' ? { ...param } : { type: param };
|
||||
const clean = () => commit('setStack', state.stack.filter((otherConfig => otherConfig !== config)));
|
||||
config.resolve = (result) => {
|
||||
clean();
|
||||
if (config.onResolve) {
|
||||
// Call onResolve immediately (mostly to prevent browsers from blocking popup windows)
|
||||
config.onResolve(result)
|
||||
.then(res => resolve(res));
|
||||
} else {
|
||||
resolve(result);
|
||||
}
|
||||
};
|
||||
config.reject = (error) => {
|
||||
clean();
|
||||
reject(error);
|
||||
};
|
||||
try {
|
||||
return await new Promise((resolve, reject) => {
|
||||
config.resolve = resolve;
|
||||
config.reject = reject;
|
||||
commit('setStack', [config, ...state.stack]);
|
||||
});
|
||||
} finally {
|
||||
commit('setStack', state.stack.filter((otherConfig => otherConfig !== config)));
|
||||
}
|
||||
},
|
||||
hideUntil({ commit }, promise) {
|
||||
async hideUntil({ commit }, promise) {
|
||||
try {
|
||||
commit('setHidden', true);
|
||||
return promise.then((res) => {
|
||||
return await promise;
|
||||
} finally {
|
||||
commit('setHidden', false);
|
||||
return res;
|
||||
}, (err) => {
|
||||
commit('setHidden', false);
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
},
|
||||
folderDeletion: ({ dispatch }, item) => dispatch('open', {
|
||||
content: `<p>You are about to delete the folder <b>${item.name}</b>. Its files will be moved to Trash. Are you sure?</p>`,
|
||||
@ -105,39 +94,34 @@ export default {
|
||||
resolveText: 'Yes, clean',
|
||||
rejectText: 'No',
|
||||
}),
|
||||
providerRedirection: ({ dispatch }, { providerName, onResolve }) => dispatch('open', {
|
||||
providerRedirection: ({ dispatch }, { providerName }) => dispatch('open', {
|
||||
content: `<p>You are about to navigate to the <b>${providerName}</b> authorization page.</p>`,
|
||||
resolveText: 'Ok, go on',
|
||||
rejectText: 'Cancel',
|
||||
onResolve,
|
||||
}),
|
||||
workspaceGoogleRedirection: ({ dispatch }, { onResolve }) => dispatch('open', {
|
||||
workspaceGoogleRedirection: ({ dispatch }) => dispatch('open', {
|
||||
content: '<p>StackEdit needs full Google Drive access to open this workspace.</p>',
|
||||
resolveText: 'Ok, grant',
|
||||
rejectText: 'Cancel',
|
||||
onResolve,
|
||||
}),
|
||||
signInForSponsorship: ({ dispatch }, { onResolve }) => dispatch('open', {
|
||||
signInForSponsorship: ({ dispatch }) => dispatch('open', {
|
||||
type: 'signInForSponsorship',
|
||||
content: `<p>You have to sign in with Google to sponsor.</p>
|
||||
<div class="modal__info"><b>Note:</b> This will sync your main workspace.</div>`,
|
||||
resolveText: 'Ok, sign in',
|
||||
rejectText: 'Cancel',
|
||||
onResolve,
|
||||
}),
|
||||
signInForComment: ({ dispatch }, { onResolve }) => dispatch('open', {
|
||||
signInForComment: ({ dispatch }) => dispatch('open', {
|
||||
content: `<p>You have to sign in with Google to start commenting.</p>
|
||||
<div class="modal__info"><b>Note:</b> This will sync your main workspace.</div>`,
|
||||
resolveText: 'Ok, sign in',
|
||||
rejectText: 'Cancel',
|
||||
onResolve,
|
||||
}),
|
||||
signInForHistory: ({ dispatch }, { onResolve }) => dispatch('open', {
|
||||
signInForHistory: ({ dispatch }) => dispatch('open', {
|
||||
content: `<p>You have to sign in with Google to enable revision history.</p>
|
||||
<div class="modal__info"><b>Note:</b> This will sync your main workspace.</div>`,
|
||||
resolveText: 'Ok, sign in',
|
||||
rejectText: 'Cancel',
|
||||
onResolve,
|
||||
}),
|
||||
sponsorOnly: ({ dispatch }) => dispatch('open', {
|
||||
content: '<p>This feature is restricted to sponsors as it relies on server resources.</p>',
|
||||
|
@ -11,7 +11,7 @@ export default (empty, simpleHash = false) => {
|
||||
itemMap: {},
|
||||
},
|
||||
getters: {
|
||||
items: state => Object.values(state.itemMap),
|
||||
items: ({ itemMap }) => Object.values(itemMap),
|
||||
},
|
||||
mutations: {
|
||||
setItem(state, value) {
|
||||
|
@ -71,16 +71,13 @@ export default {
|
||||
}));
|
||||
}
|
||||
},
|
||||
doWithLocation({ commit }, { location, promise }) {
|
||||
async doWithLocation({ commit }, { location, action }) {
|
||||
try {
|
||||
commit('setCurrentLocation', location);
|
||||
return promise
|
||||
.then((res) => {
|
||||
return await action();
|
||||
} finally {
|
||||
commit('setCurrentLocation', {});
|
||||
return res;
|
||||
}, (err) => {
|
||||
commit('setCurrentLocation', {});
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
|
@ -5,8 +5,8 @@ const module = moduleTemplate(empty, true);
|
||||
|
||||
module.getters = {
|
||||
...module.getters,
|
||||
current: (state, getters, rootState, rootGetters) =>
|
||||
state.itemMap[`${rootGetters['file/current'].id}/syncedContent`] || empty(),
|
||||
current: ({ itemMap }, getters, rootState, rootGetters) =>
|
||||
itemMap[`${rootGetters['file/current'].id}/syncedContent`] || empty(),
|
||||
};
|
||||
|
||||
export default module;
|
||||
|
@ -6,8 +6,8 @@ export default {
|
||||
itemMap: {},
|
||||
},
|
||||
mutations: {
|
||||
addItem: (state, item) => {
|
||||
Vue.set(state.itemMap, item.id, item);
|
||||
addItem: ({ itemMap }, item) => {
|
||||
Vue.set(itemMap, item.id, item);
|
||||
},
|
||||
},
|
||||
};
|
||||
|
@ -19,54 +19,50 @@ export default {
|
||||
const workspaces = rootGetters['data/sanitizedWorkspaces'];
|
||||
return workspaces.main;
|
||||
},
|
||||
currentWorkspace: (state, getters, rootState, rootGetters) => {
|
||||
currentWorkspace: ({ currentWorkspaceId }, { mainWorkspace }, rootState, rootGetters) => {
|
||||
const workspaces = rootGetters['data/sanitizedWorkspaces'];
|
||||
return workspaces[state.currentWorkspaceId] || getters.mainWorkspace;
|
||||
return workspaces[currentWorkspaceId] || mainWorkspace;
|
||||
},
|
||||
hasUniquePaths: (state, getters) => {
|
||||
const workspace = getters.currentWorkspace;
|
||||
return workspace.providerId === 'githubWorkspace';
|
||||
},
|
||||
lastSyncActivityKey: (state, getters) => `${getters.currentWorkspace.id}/lastSyncActivity`,
|
||||
lastFocusKey: (state, getters) => `${getters.currentWorkspace.id}/lastWindowFocus`,
|
||||
hasUniquePaths: (state, { currentWorkspace }) =>
|
||||
currentWorkspace.providerId === 'githubWorkspace',
|
||||
lastSyncActivityKey: (state, { currentWorkspace }) => `${currentWorkspace.id}/lastSyncActivity`,
|
||||
lastFocusKey: (state, { currentWorkspace }) => `${currentWorkspace.id}/lastWindowFocus`,
|
||||
mainWorkspaceToken: (state, getters, rootState, rootGetters) => {
|
||||
const googleTokens = rootGetters['data/googleTokens'];
|
||||
const loginSubs = Object.keys(googleTokens)
|
||||
.filter(sub => googleTokens[sub].isLogin);
|
||||
return googleTokens[loginSubs[0]];
|
||||
},
|
||||
syncToken: (state, getters, rootState, rootGetters) => {
|
||||
const workspace = getters.currentWorkspace;
|
||||
switch (workspace.providerId) {
|
||||
syncToken: (state, { currentWorkspace, mainWorkspaceToken }, rootState, rootGetters) => {
|
||||
switch (currentWorkspace.providerId) {
|
||||
case 'googleDriveWorkspace': {
|
||||
const googleTokens = rootGetters['data/googleTokens'];
|
||||
return googleTokens[workspace.sub];
|
||||
return googleTokens[currentWorkspace.sub];
|
||||
}
|
||||
case 'githubWorkspace': {
|
||||
const githubTokens = rootGetters['data/githubTokens'];
|
||||
return githubTokens[workspace.sub];
|
||||
return githubTokens[currentWorkspace.sub];
|
||||
}
|
||||
case 'couchdbWorkspace': {
|
||||
const couchdbTokens = rootGetters['data/couchdbTokens'];
|
||||
return couchdbTokens[workspace.id];
|
||||
return couchdbTokens[currentWorkspace.id];
|
||||
}
|
||||
default:
|
||||
return getters.mainWorkspaceToken;
|
||||
return mainWorkspaceToken;
|
||||
}
|
||||
},
|
||||
loginToken: (state, getters, rootState, rootGetters) => {
|
||||
const workspace = getters.currentWorkspace;
|
||||
switch (workspace.providerId) {
|
||||
loginToken: (state, { currentWorkspace, mainWorkspaceToken }, rootState, rootGetters) => {
|
||||
switch (currentWorkspace.providerId) {
|
||||
case 'googleDriveWorkspace': {
|
||||
const googleTokens = rootGetters['data/googleTokens'];
|
||||
return googleTokens[workspace.sub];
|
||||
return googleTokens[currentWorkspace.sub];
|
||||
}
|
||||
case 'githubWorkspace': {
|
||||
const githubTokens = rootGetters['data/githubTokens'];
|
||||
return githubTokens[workspace.sub];
|
||||
return githubTokens[currentWorkspace.sub];
|
||||
}
|
||||
default:
|
||||
return getters.mainWorkspaceToken;
|
||||
return mainWorkspaceToken;
|
||||
}
|
||||
},
|
||||
userId: (state, { loginToken }, rootState, rootGetters) => {
|
||||
@ -82,7 +78,7 @@ export default {
|
||||
});
|
||||
return prefix ? `${prefix}:${loginToken.sub}` : loginToken.sub;
|
||||
},
|
||||
sponsorToken: (state, getters) => getters.mainWorkspaceToken,
|
||||
sponsorToken: (state, { mainWorkspaceToken }) => mainWorkspaceToken,
|
||||
},
|
||||
actions: {
|
||||
setCurrentWorkspaceId: ({ commit, getters }, value) => {
|
||||
|
Loading…
Reference in New Issue
Block a user