Use of async/await

This commit is contained in:
Benoit Schweblin 2018-05-13 13:27:33 +00:00
parent e971082768
commit 597c747b00
69 changed files with 4194 additions and 4208 deletions

View File

@ -92,20 +92,19 @@ export default {
return !!this.$store.getters['modal/config'];
},
},
created() {
syncSvc.init()
.then(() => {
networkSvc.init();
sponsorSvc.init();
this.ready = true;
tempFileSvc.setReady();
})
.catch((err) => {
if (err && err.message !== 'reload') {
console.error(err); // eslint-disable-line no-console
this.$store.dispatch('notification/error', err);
}
});
async created() {
try {
await syncSvc.init();
await networkSvc.init();
await sponsorSvc.init();
this.ready = true;
tempFileSvc.setReady();
} catch (err) {
if (err && err.message !== 'reload') {
console.error(err); // eslint-disable-line no-console
this.$store.dispatch('notification/error', err);
}
}
},
};
</script>

View File

@ -97,29 +97,36 @@ export default {
}
return true;
},
submitNewChild(cancel) {
async submitNewChild(cancel) {
const { newChildNode } = this.$store.state.explorer;
if (!cancel && !newChildNode.isNil && newChildNode.item.name) {
if (newChildNode.isFolder) {
fileSvc.storeItem(newChildNode.item)
.then(item => this.select(item.id), () => { /* cancel */ });
} else {
fileSvc.createFile(newChildNode.item)
.then(item => this.select(item.id), () => { /* cancel */ });
try {
if (newChildNode.isFolder) {
const item = await fileSvc.storeItem(newChildNode.item);
this.select(item.id);
} else {
const item = await fileSvc.createFile(newChildNode.item);
this.select(item.id);
}
} catch (e) {
// Cancel
}
}
this.$store.commit('explorer/setNewItem', null);
},
submitEdit(cancel) {
async submitEdit(cancel) {
const { item } = this.$store.getters['explorer/editingNode'];
const value = this.editingValue;
this.setEditingId(null);
if (!cancel && item.id && value) {
fileSvc.storeItem({
...item,
name: value,
})
.catch(() => { /* cancel */ });
try {
await fileSvc.storeItem({
...item,
name: value,
});
} catch (e) {
// Cancel
}
}
},
setDragSourceId(evt) {
@ -140,22 +147,17 @@ export default {
&& !targetNode.isNil
&& sourceNode.item.id !== targetNode.item.id
) {
const patch = {
id: sourceNode.item.id,
fileSvc.storeItem({
...sourceNode.item,
parentId: targetNode.item.id,
};
if (sourceNode.isFolder) {
this.$store.commit('folder/patchItem', patch);
} else {
this.$store.commit('file/patchItem', patch);
}
});
}
},
onContextMenu(evt) {
async onContextMenu(evt) {
if (this.select(undefined, false)) {
evt.preventDefault();
evt.stopPropagation();
this.$store.dispatch('contextMenu/open', {
const item = await this.$store.dispatch('contextMenu/open', {
coordinates: {
left: evt.clientX,
top: evt.clientY,
@ -178,8 +180,8 @@ export default {
name: 'Delete',
perform: () => explorerSvc.deleteItem(),
}],
})
.then(item => item.perform());
});
item.perform();
}
},
},

View File

@ -175,10 +175,6 @@ export default {
background-color: rgba(160, 160, 160, 0.5);
overflow: auto;
hr {
margin: 0.5em 0;
}
p {
line-height: 1.5;
}

View File

@ -192,7 +192,7 @@ export default {
editorSvc.pagedownEditor.uiManager.doClick(name);
}
},
editTitle(toggle) {
async editTitle(toggle) {
this.titleFocus = toggle;
if (toggle) {
this.titleInputElt.setSelectionRange(0, this.titleInputElt.value.length);
@ -200,11 +200,14 @@ export default {
const title = this.title.trim();
this.title = this.$store.getters['file/current'].name;
if (title) {
fileSvc.storeItem({
...this.$store.getters['file/current'],
name: title,
})
.catch(() => { /* Cancel */ });
try {
await fileSvc.storeItem({
...this.$store.getters['file/current'],
name: title,
});
} catch (e) {
// Cancel
}
}
}
},

View File

@ -47,12 +47,13 @@ export default {
...mapMutations('discussion', [
'setIsCommenting',
]),
removeComment() {
this.$store.dispatch('modal/commentDeletion')
.then(
() => this.$store.dispatch('discussion/cleanCurrentFile', { filterComment: this.comment }),
() => { /* Cancel */ },
);
async removeComment() {
try {
await this.$store.dispatch('modal/commentDeletion');
this.$store.dispatch('discussion/cleanCurrentFile', { filterComment: this.comment });
} catch (e) {
// Cancel
}
},
},
mounted() {

View File

@ -93,14 +93,15 @@ export default {
.start();
}
},
removeDiscussion() {
this.$store.dispatch('modal/discussionDeletion')
.then(
() => this.$store.dispatch('discussion/cleanCurrentFile', {
filterDiscussion: this.currentDiscussion,
}),
() => { /* Cancel */ },
);
async removeDiscussion() {
try {
await this.$store.dispatch('modal/discussionDeletion');
this.$store.dispatch('discussion/cleanCurrentFile', {
filterDiscussion: this.currentDiscussion,
});
} catch (e) {
// Cancel
}
},
},
};

View File

@ -96,12 +96,13 @@ export default {
...mapMutations('content', [
'setRevisionContent',
]),
signin() {
return googleHelper.signin()
.then(
() => syncSvc.requestSync(),
() => { /* Cancel */ },
);
async signin() {
try {
await googleHelper.signin();
syncSvc.requestSync();
} catch (e) {
// Cancel
}
},
close() {
this.$store.dispatch('data/setSideBarPanel', 'menu');
@ -117,10 +118,15 @@ export default {
const currentFile = this.$store.getters['file/current'];
this.$store.dispatch(
'queue/enqueue',
() => Promise.resolve()
.then(() => this.workspaceProvider
.getRevisionContent(syncToken, currentFile.id, revision.id))
.then(resolve, reject),
async () => {
try {
const content = await this.workspaceProvider
.getRevisionContent(syncToken, currentFile.id, revision.id);
resolve(content);
} catch (e) {
reject(e);
}
},
);
});
revisionContentPromises[revision.id] = revisionContentPromise;
@ -181,9 +187,15 @@ export default {
revisionsPromise = new Promise((resolve, reject) => {
this.$store.dispatch(
'queue/enqueue',
() => Promise.resolve()
.then(() => this.workspaceProvider.listRevisions(syncToken, currentFile.id))
.then(resolve, reject),
async () => {
try {
const revisions = await this.workspaceProvider
.listRevisions(syncToken, currentFile.id);
resolve(revisions);
} catch (e) {
reject(e);
}
},
);
})
.catch(() => {

View File

@ -104,16 +104,20 @@ export default {
...mapActions('data', {
setPanel: 'setSideBarPanel',
}),
signin() {
return googleHelper.signin()
.then(
() => syncSvc.requestSync(),
() => { /* Cancel */ },
);
async signin() {
try {
await googleHelper.signin();
syncSvc.requestSync();
} catch (e) {
// Cancel
}
},
fileProperties() {
return this.$store.dispatch('modal/open', 'fileProperties')
.catch(() => { /* Cancel */ });
async fileProperties() {
try {
await this.$store.dispatch('modal/open', 'fileProperties');
} catch (e) {
// Cancel
}
},
print() {
window.print();

View File

@ -78,29 +78,33 @@ export default {
document.body.removeChild(iframeElt);
}, 60000);
},
settings() {
return this.$store.dispatch('modal/open', 'settings')
.then(
settings => this.$store.dispatch('data/setSettings', settings),
() => { /* Cancel */ },
);
async settings() {
try {
const settings = await this.$store.dispatch('modal/open', 'settings');
this.$store.dispatch('data/setSettings', settings);
} catch (e) {
// Cancel
}
},
templates() {
return this.$store.dispatch('modal/open', 'templates')
.then(
({ templates }) => this.$store.dispatch('data/setTemplates', templates),
() => { /* Cancel */ },
);
async templates() {
try {
const { templates } = await this.$store.dispatch('modal/open', 'templates');
this.$store.dispatch('data/setTemplates', templates);
} catch (e) {
// Cancel
}
},
reset() {
return this.$store.dispatch('modal/reset')
.then(() => {
window.location.href = '#reset=true';
window.location.reload();
});
async reset() {
try {
await this.$store.dispatch('modal/reset');
window.location.href = '#reset=true';
window.location.reload();
} catch (e) {
// Cancel
}
},
about() {
return this.$store.dispatch('modal/open', 'about');
this.$store.dispatch('modal/open', 'about');
},
},
};

View File

@ -118,12 +118,15 @@ const tokensToArray = (tokens, filter = () => true) => Object.keys(tokens)
.filter(token => filter(token))
.sort((token1, token2) => token1.name.localeCompare(token2.name));
const openPublishModal = (token, type) => store.dispatch('modal/open', {
type,
token,
}).then(publishLocation => publishSvc.createPublishLocation(publishLocation));
const onCancel = () => {};
const publishModalOpener = type => async (token) => {
try {
const publishLocation = await store.dispatch('modal/open', {
type,
token,
});
publishSvc.createPublishLocation(publishLocation);
} catch (e) { /* cancel */ }
};
export default {
components: {
@ -178,74 +181,48 @@ export default {
managePublish() {
return this.$store.dispatch('modal/open', 'publishManagement');
},
addGoogleDriveAccount() {
return this.$store.dispatch('modal/open', {
type: 'googleDriveAccount',
onResolve: () => googleHelper.addDriveAccount(!store.getters['data/localSettings'].googleDriveRestrictedAccess),
})
.catch(onCancel);
async addGoogleDriveAccount() {
try {
await this.$store.dispatch('modal/open', { type: 'googleDriveAccount' });
await googleHelper.addDriveAccount(!store.getters['data/localSettings'].googleDriveRestrictedAccess);
} catch (e) { /* cancel */ }
},
addDropboxAccount() {
return this.$store.dispatch('modal/open', {
type: 'dropboxAccount',
onResolve: () => dropboxHelper.addAccount(!store.getters['data/localSettings'].dropboxRestrictedAccess),
})
.catch(onCancel);
async addDropboxAccount() {
try {
await this.$store.dispatch('modal/open', { type: 'dropboxAccount' });
await dropboxHelper.addAccount(!store.getters['data/localSettings'].dropboxRestrictedAccess);
} catch (e) { /* cancel */ }
},
addGithubAccount() {
return this.$store.dispatch('modal/open', {
type: 'githubAccount',
onResolve: () => githubHelper.addAccount(store.getters['data/localSettings'].githubRepoFullAccess),
})
.catch(onCancel);
async addGithubAccount() {
try {
await this.$store.dispatch('modal/open', { type: 'githubAccount' });
await githubHelper.addAccount(store.getters['data/localSettings'].githubRepoFullAccess);
} catch (e) { /* cancel */ }
},
addWordpressAccount() {
return wordpressHelper.addAccount()
.catch(onCancel);
async addWordpressAccount() {
try {
await wordpressHelper.addAccount();
} catch (e) { /* cancel */ }
},
addBloggerAccount() {
return googleHelper.addBloggerAccount()
.catch(onCancel);
async addBloggerAccount() {
try {
await googleHelper.addBloggerAccount();
} catch (e) { /* cancel */ }
},
addZendeskAccount() {
return this.$store.dispatch('modal/open', {
type: 'zendeskAccount',
onResolve: ({ subdomain, clientId }) => zendeskHelper.addAccount(subdomain, clientId),
})
.catch(onCancel);
},
publishGoogleDrive(token) {
return openPublishModal(token, 'googleDrivePublish')
.catch(onCancel);
},
publishDropbox(token) {
return openPublishModal(token, 'dropboxPublish')
.catch(onCancel);
},
publishGithub(token) {
return openPublishModal(token, 'githubPublish')
.catch(onCancel);
},
publishGist(token) {
return openPublishModal(token, 'gistPublish')
.catch(onCancel);
},
publishWordpress(token) {
return openPublishModal(token, 'wordpressPublish')
.catch(onCancel);
},
publishBlogger(token) {
return openPublishModal(token, 'bloggerPublish')
.catch(onCancel);
},
publishBloggerPage(token) {
return openPublishModal(token, 'bloggerPagePublish')
.catch(onCancel);
},
publishZendesk(token) {
return openPublishModal(token, 'zendeskPublish')
.catch(onCancel);
async addZendeskAccount() {
try {
const { subdomain, clientId } = await this.$store.dispatch('modal/open', { type: 'zendeskAccount' });
await zendeskHelper.addAccount(subdomain, clientId);
} catch (e) { /* cancel */ }
},
publishGoogleDrive: publishModalOpener('googleDrivePublish'),
publishDropbox: publishModalOpener('dropboxPublish'),
publishGithub: publishModalOpener('githubPublish'),
publishGist: publishModalOpener('gistPublish'),
publishWordpress: publishModalOpener('wordpressPublish'),
publishBlogger: publishModalOpener('bloggerPublish'),
publishBloggerPage: publishModalOpener('bloggerPagePublish'),
publishZendesk: publishModalOpener('zendeskPublish'),
},
};
</script>

View File

@ -101,8 +101,6 @@ const openSyncModal = (token, type) => store.dispatch('modal/open', {
token,
}).then(syncLocation => syncSvc.createSyncLocation(syncLocation));
const onCancel = () => {};
export default {
components: {
MenuEntry,
@ -147,66 +145,79 @@ export default {
manageSync() {
return this.$store.dispatch('modal/open', 'syncManagement');
},
addGoogleDriveAccount() {
return this.$store.dispatch('modal/open', {
type: 'googleDriveAccount',
onResolve: () => googleHelper.addDriveAccount(!store.getters['data/localSettings'].googleDriveRestrictedAccess),
})
.catch(onCancel);
async addGoogleDriveAccount() {
try {
await this.$store.dispatch('modal/open', { type: 'googleDriveAccount' });
await googleHelper.addDriveAccount(!store.getters['data/localSettings'].googleDriveRestrictedAccess);
} catch (e) { /* cancel */ }
},
addDropboxAccount() {
return this.$store.dispatch('modal/open', {
type: 'dropboxAccount',
onResolve: () => dropboxHelper.addAccount(!store.getters['data/localSettings'].dropboxRestrictedAccess),
})
.catch(onCancel);
async addDropboxAccount() {
try {
await this.$store.dispatch('modal/open', { type: 'dropboxAccount' });
await dropboxHelper.addAccount(!store.getters['data/localSettings'].dropboxRestrictedAccess);
} catch (e) { /* cancel */ }
},
addGithubAccount() {
return this.$store.dispatch('modal/open', {
type: 'githubAccount',
onResolve: () => githubHelper.addAccount(store.getters['data/localSettings'].githubRepoFullAccess),
})
.catch(onCancel);
async addGithubAccount() {
try {
await this.$store.dispatch('modal/open', { type: 'githubAccount' });
await githubHelper.addAccount(store.getters['data/localSettings'].githubRepoFullAccess);
} catch (e) { /* cancel */ }
},
openGoogleDrive(token) {
return googleHelper.openPicker(token, 'doc')
.then(files => this.$store.dispatch(
'queue/enqueue',
() => googleDriveProvider.openFiles(token, files),
));
async openGoogleDrive(token) {
const files = await googleHelper.openPicker(token, 'doc');
this.$store.dispatch(
'queue/enqueue',
() => googleDriveProvider.openFiles(token, files),
);
},
openDropbox(token) {
return dropboxHelper.openChooser(token)
.then(paths => this.$store.dispatch(
'queue/enqueue',
() => dropboxProvider.openFiles(token, paths),
));
async openDropbox(token) {
const paths = await dropboxHelper.openChooser(token);
this.$store.dispatch(
'queue/enqueue',
() => dropboxProvider.openFiles(token, paths),
);
},
saveGoogleDrive(token) {
return openSyncModal(token, 'googleDriveSave')
.catch(onCancel);
async saveGoogleDrive(token) {
try {
await openSyncModal(token, 'googleDriveSave');
} catch (e) {
// Cancel
}
},
saveDropbox(token) {
return openSyncModal(token, 'dropboxSave')
.catch(onCancel);
async saveDropbox(token) {
try {
await openSyncModal(token, 'dropboxSave');
} catch (e) {
// Cancel
}
},
openGithub(token) {
return store.dispatch('modal/open', {
type: 'githubOpen',
token,
})
.then(syncLocation => this.$store.dispatch(
async openGithub(token) {
try {
const syncLocation = await store.dispatch('modal/open', {
type: 'githubOpen',
token,
});
this.$store.dispatch(
'queue/enqueue',
() => githubProvider.openFile(token, syncLocation),
));
);
} catch (e) {
// Cancel
}
},
saveGithub(token) {
return openSyncModal(token, 'githubSave')
.catch(onCancel);
async saveGithub(token) {
try {
await openSyncModal(token, 'githubSave');
} catch (e) {
// Cancel
}
},
saveGist(token) {
return openSyncModal(token, 'gistSync')
.catch(onCancel);
async saveGist(token) {
try {
await openSyncModal(token, 'gistSync');
} catch (e) {
// Cancel
}
},
},
};

View File

@ -31,8 +31,6 @@ import { mapGetters } from 'vuex';
import MenuEntry from './common/MenuEntry';
import googleHelper from '../../services/providers/helpers/googleHelper';
const onCancel = () => {};
export default {
components: {
MenuEntry,
@ -46,28 +44,37 @@ export default {
]),
},
methods: {
addCouchdbWorkspace() {
return this.$store.dispatch('modal/open', {
type: 'couchdbWorkspace',
})
.catch(onCancel);
async addCouchdbWorkspace() {
try {
this.$store.dispatch('modal/open', {
type: 'couchdbWorkspace',
});
} catch (e) {
// Cancel
}
},
addGithubWorkspace() {
return this.$store.dispatch('modal/open', {
type: 'githubWorkspace',
})
.catch(onCancel);
async addGithubWorkspace() {
try {
this.$store.dispatch('modal/open', {
type: 'githubWorkspace',
});
} catch (e) {
// Cancel
}
},
addGoogleDriveWorkspace() {
return googleHelper.addDriveAccount(true)
.then(token => this.$store.dispatch('modal/open', {
async addGoogleDriveWorkspace() {
try {
const token = await googleHelper.addDriveAccount(true);
this.$store.dispatch('modal/open', {
type: 'googleDriveWorkspace',
token,
}))
.catch(onCancel);
});
} catch (e) {
// Cancel
}
},
manageWorkspaces() {
return this.$store.dispatch('modal/open', 'workspaceManagement');
this.$store.dispatch('modal/open', 'workspaceManagement');
},
},
};

View File

@ -2,7 +2,7 @@
<modal-inner class="modal__inner-1--about-modal" aria-label="About">
<div class="modal__content">
<div class="logo-background"></div>
<small>v{{version}}<br>© 2013-2018 Dock5 Software</small>
<small>© 2013-2018 Dock5 Software<br>v{{version}}</small>
<hr>
StackEdit on <a target="_blank" href="https://github.com/benweet/stackedit/">GitHub</a>
<br>
@ -59,11 +59,12 @@ export default {
.logo-background {
height: 75px;
margin: 0.5rem 0;
margin: 0;
}
small {
display: block;
font-size: 0.75em;
}
hr {

View File

@ -41,6 +41,9 @@
</form-entry>
<form-entry label="Status">
<input slot="field" class="textfield" type="text" v-model.trim="status" @keydown.enter="resolve()">
<div class="form-entry__info">
<b>Example:</b> draft
</div>
</form-entry>
<form-entry label="Date" info="YYYY-MM-DD">
<input slot="field" class="textfield" type="text" v-model.trim="date" @keydown.enter="resolve()">

View File

@ -37,12 +37,13 @@ export default modalTemplate({
let timeoutId;
this.$watch('selectedTemplate', (selectedTemplate) => {
clearTimeout(timeoutId);
timeoutId = setTimeout(() => {
timeoutId = setTimeout(async () => {
const currentFile = this.$store.getters['file/current'];
exportSvc.applyTemplate(currentFile.id, this.allTemplates[selectedTemplate])
.then((html) => {
this.result = html;
});
const html = await exportSvc.applyTemplate(
currentFile.id,
this.allTemplates[selectedTemplate],
);
this.result = html;
}, 10);
}, {
immediate: true,

View File

@ -61,15 +61,17 @@ export default modalTemplate({
addGooglePhotosAccount() {
return googleHelper.addPhotosAccount();
},
openGooglePhotos(token) {
async openGooglePhotos(token) {
const { callback } = this.config;
this.config.reject();
googleHelper.openPicker(token, 'img')
.then(res => res[0] && this.$store.dispatch('modal/open', {
const res = await googleHelper.openPicker(token, 'img');
if (res[0]) {
this.$store.dispatch('modal/open', {
type: 'googlePhoto',
url: res[0].url,
callback,
}));
});
}
},
},
});

View File

@ -38,19 +38,20 @@ export default modalTemplate({
selectedFormat: 'pandocExportFormat',
},
methods: {
resolve() {
async resolve() {
this.config.resolve();
const currentFile = this.$store.getters['file/current'];
const currentContent = this.$store.getters['content/current'];
const { selectedFormat } = this;
this.$store.dispatch('queue/enqueue', () => Promise.all([
const [sponsorToken, token] = await this.$store.dispatch('queue/enqueue', () => Promise.all([
Promise.resolve().then(() => {
const sponsorToken = this.$store.getters['workspace/sponsorToken'];
return sponsorToken && googleHelper.refreshToken(sponsorToken);
const tokenToRefresh = this.$store.getters['workspace/sponsorToken'];
return tokenToRefresh && googleHelper.refreshToken(tokenToRefresh);
}),
sponsorSvc.getToken(),
])
.then(([sponsorToken, token]) => networkSvc.request({
]));
try {
const { body } = await networkSvc.request({
method: 'POST',
url: 'pandocExport',
params: {
@ -63,20 +64,16 @@ export default modalTemplate({
body: JSON.stringify(editorSvc.getPandocAst()),
blob: true,
timeout: 60000,
})
.then((res) => {
FileSaver.saveAs(res.body, `${currentFile.name}.${selectedFormat}`);
}, (err) => {
if (err.status !== 401) {
throw err;
}
this.$store.dispatch('modal/sponsorOnly')
.catch(() => { /* Cancel */ });
}))
.catch((err) => {
});
FileSaver.saveAs(body, `${currentFile.name}.${selectedFormat}`);
} catch (err) {
if (err.status === 401) {
this.$store.dispatch('modal/sponsorOnly');
} else {
console.error(err); // eslint-disable-line no-console
this.$store.dispatch('notification/error', err);
}));
}
}
},
},
});

View File

@ -33,22 +33,24 @@ export default modalTemplate({
selectedTemplate: 'pdfExportTemplate',
},
methods: {
resolve() {
async resolve() {
this.config.resolve();
const currentFile = this.$store.getters['file/current'];
this.$store.dispatch('queue/enqueue', () => Promise.all([
Promise.resolve().then(() => {
const sponsorToken = this.$store.getters['workspace/sponsorToken'];
return sponsorToken && googleHelper.refreshToken(sponsorToken);
}),
sponsorSvc.getToken(),
exportSvc.applyTemplate(
currentFile.id,
this.allTemplates[this.selectedTemplate],
true,
),
])
.then(([sponsorToken, token, html]) => networkSvc.request({
const [sponsorToken, token, html] = await this.$store
.dispatch('queue/enqueue', () => Promise.all([
Promise.resolve().then(() => {
const tokenToRefresh = this.$store.getters['workspace/sponsorToken'];
return tokenToRefresh && googleHelper.refreshToken(tokenToRefresh);
}),
sponsorSvc.getToken(),
exportSvc.applyTemplate(
currentFile.id,
this.allTemplates[this.selectedTemplate],
true,
),
]));
try {
const { body } = await networkSvc.request({
method: 'POST',
url: 'pdfExport',
params: {
@ -59,20 +61,16 @@ export default modalTemplate({
body: html,
blob: true,
timeout: 60000,
})
.then((res) => {
FileSaver.saveAs(res.body, `${currentFile.name}.pdf`);
}, (err) => {
if (err.status !== 401) {
throw err;
}
this.$store.dispatch('modal/sponsorOnly')
.catch(() => { /* Cancel */ });
}))
.catch((err) => {
});
FileSaver.saveAs(body, `${currentFile.name}.pdf`);
} catch (err) {
if (err.status === 401) {
this.$store.dispatch('modal/sponsorOnly');
} else {
console.error(err); // eslint-disable-line no-console
this.$store.dispatch('notification/error', err);
}));
}
}
},
},
});

View File

@ -75,12 +75,13 @@ export default {
}
this.editedId = null;
},
remove(id) {
return this.$store.dispatch('modal/removeWorkspace')
.then(
() => localDbSvc.removeWorkspace(id),
() => { /* Cancel */ },
);
async remove(id) {
try {
await this.$store.dispatch('modal/removeWorkspace');
localDbSvc.removeWorkspace(id);
} catch (e) {
// Cancel
}
},
},
};

View File

@ -29,20 +29,18 @@ export default {
},
},
methods: {
sponsor() {
Promise.resolve()
.then(() => !this.$store.getters['workspace/sponsorToken'] &&
// If user has to sign in
this.$store.dispatch('modal/signInForSponsorship', {
onResolve: () => googleHelper.signin()
.then(() => syncSvc.requestSync()),
}))
.then(() => {
if (!this.$store.getters.isSponsor) {
this.$store.dispatch('modal/open', 'sponsor');
}
})
.catch(() => { /* Cancel */ });
async sponsor() {
try {
if (!this.$store.getters['workspace/sponsorToken']) {
// User has to sign in
await this.$store.dispatch('modal/signInForSponsorship');
await googleHelper.signin();
syncSvc.requestSync();
}
if (!this.$store.getters.isSponsor) {
await this.$store.dispatch('modal/open', 'sponsor');
}
} catch (e) { /* cancel */ }
},
},
};

View File

@ -63,17 +63,15 @@ export default (desc) => {
return sortedTemplates;
};
// Make use of `function` to have `this` bound to the component
component.methods.configureTemplates = function () { // eslint-disable-line func-names
store.dispatch('modal/open', {
component.methods.configureTemplates = async function () { // eslint-disable-line func-names
const { templates, selectedId } = await store.dispatch('modal/open', {
type: 'templates',
selectedId: this.selectedTemplate,
})
.then(({ templates, selectedId }) => {
store.dispatch('data/setTemplates', templates);
store.dispatch('data/patchLocalSettings', {
[id]: selectedId,
});
});
});
store.dispatch('data/setTemplates', templates);
store.dispatch('data/patchLocalSettings', {
[id]: selectedId,
});
};
}
});

View File

@ -18,14 +18,12 @@ OfflinePluginRuntime.install({
// Tells to new SW to take control immediately
OfflinePluginRuntime.applyUpdate();
},
onUpdated: () => {
onUpdated: async () => {
if (!store.state.light) {
localDbSvc.sync()
.then(() => {
localStorage.updated = true;
// Reload the webpage to load into the new version
window.location.reload();
});
await localDbSvc.sync();
localStorage.updated = true;
// Reload the webpage to load into the new version
window.location.reload();
}
},
});

View File

@ -49,20 +49,26 @@ export default {
}
});
await utils.awaitSequence(Object.keys(folderNameMap), async externalId => fileSvc.storeItem({
id: folderIdMap[externalId],
type: 'folder',
name: folderNameMap[externalId],
parentId: folderIdMap[parentIdMap[externalId]],
}, true));
await utils.awaitSequence(
Object.keys(folderNameMap),
async externalId => fileSvc.setOrPatchItem({
id: folderIdMap[externalId],
type: 'folder',
name: folderNameMap[externalId],
parentId: folderIdMap[parentIdMap[externalId]],
}),
);
await utils.awaitSequence(Object.keys(fileNameMap), async externalId => fileSvc.createFile({
name: fileNameMap[externalId],
parentId: folderIdMap[parentIdMap[externalId]],
text: textMap[externalId],
properties: propertiesMap[externalId],
discussions: discussionsMap[externalId],
comments: commentsMap[externalId],
}, true));
await utils.awaitSequence(
Object.keys(fileNameMap),
async externalId => fileSvc.createFile({
name: fileNameMap[externalId],
parentId: folderIdMap[parentIdMap[externalId]],
text: textMap[externalId],
properties: propertiesMap[externalId],
discussions: discussionsMap[externalId],
comments: commentsMap[externalId],
}, true),
);
},
};

View File

@ -120,7 +120,7 @@ const editorSvc = Object.assign(new Vue(), editorSvcDiscussions, editorSvcUtils,
/**
* Refresh the preview with the result of `convert()`
*/
refreshPreview() {
async refreshPreview() {
const sectionDescList = [];
let sectionPreviewElt;
let sectionTocElt;
@ -222,10 +222,10 @@ const editorSvc = Object.assign(new Vue(), editorSvcDiscussions, editorSvcUtils,
img.onerror = resolve;
img.src = imgElt.src;
}));
await Promise.all(loadedPromises);
Promise.all(loadedPromises)
// Debounce if sections have already been measured
.then(() => this.measureSectionDimensions(!!this.previewCtxMeasured));
// Debounce if sections have already been measured
this.measureSectionDimensions(!!this.previewCtxMeasured);
},
/**

View File

@ -15,71 +15,74 @@ export default {
parentId,
});
},
deleteItem() {
async deleteItem() {
const selectedNode = store.getters['explorer/selectedNode'];
if (selectedNode.isNil) {
return Promise.resolve();
return;
}
if (selectedNode.isTrash || selectedNode.item.parentId === 'trash') {
return store.dispatch('modal/trashDeletion').catch(() => { /* Cancel */ });
try {
await store.dispatch('modal/trashDeletion');
} catch (e) {
// Cancel
}
return;
}
// See if we have a dialog to show
let modalAction;
// See if we have a confirmation dialog to show
let moveToTrash = true;
if (selectedNode.isTemp) {
modalAction = 'modal/tempFolderDeletion';
moveToTrash = false;
} else if (selectedNode.item.parentId === 'temp') {
modalAction = 'modal/tempFileDeletion';
moveToTrash = false;
} else if (selectedNode.isFolder) {
modalAction = 'modal/folderDeletion';
try {
if (selectedNode.isTemp) {
await store.dispatch('modal/tempFolderDeletion', selectedNode.item);
moveToTrash = false;
} else if (selectedNode.item.parentId === 'temp') {
await store.dispatch('modal/tempFileDeletion', selectedNode.item);
moveToTrash = false;
} else if (selectedNode.isFolder) {
await store.dispatch('modal/folderDeletion', selectedNode.item);
}
} catch (e) {
return; // cancel
}
return (modalAction
? store.dispatch(modalAction, selectedNode.item)
: Promise.resolve())
.then(() => {
const deleteFile = (id) => {
if (moveToTrash) {
store.commit('file/patchItem', {
id,
parentId: 'trash',
});
} else {
fileSvc.deleteFile(id);
}
};
const deleteFile = (id) => {
if (moveToTrash) {
store.commit('file/patchItem', {
id,
parentId: 'trash',
});
} else {
fileSvc.deleteFile(id);
}
};
if (selectedNode === store.getters['explorer/selectedNode']) {
const currentFileId = store.getters['file/current'].id;
let doClose = selectedNode.item.id === currentFileId;
if (selectedNode.isFolder) {
const recursiveDelete = (folderNode) => {
folderNode.folders.forEach(recursiveDelete);
folderNode.files.forEach((fileNode) => {
doClose = doClose || fileNode.item.id === currentFileId;
deleteFile(fileNode.item.id);
});
store.commit('folder/deleteItem', folderNode.item.id);
};
recursiveDelete(selectedNode);
} else {
deleteFile(selectedNode.item.id);
if (selectedNode === store.getters['explorer/selectedNode']) {
const currentFileId = store.getters['file/current'].id;
let doClose = selectedNode.item.id === currentFileId;
if (selectedNode.isFolder) {
const recursiveDelete = (folderNode) => {
folderNode.folders.forEach(recursiveDelete);
folderNode.files.forEach((fileNode) => {
doClose = doClose || fileNode.item.id === currentFileId;
deleteFile(fileNode.item.id);
});
store.commit('folder/deleteItem', folderNode.item.id);
};
recursiveDelete(selectedNode);
} else {
deleteFile(selectedNode.item.id);
}
if (doClose) {
// Close the current file by opening the last opened, not deleted one
store.getters['data/lastOpenedIds'].some((id) => {
const file = store.state.file.itemMap[id];
if (file.parentId === 'trash') {
return false;
}
if (doClose) {
// Close the current file by opening the last opened, not deleted one
store.getters['data/lastOpenedIds'].some((id) => {
const file = store.state.file.itemMap[id];
if (file.parentId === 'trash') {
return false;
}
store.commit('file/setCurrentId', id);
return true;
});
}
}
}, () => { /* Cancel */ });
store.commit('file/setCurrentId', id);
return true;
});
}
}
},
};

View File

@ -42,86 +42,83 @@ export default {
/**
* Apply the template to the file content
*/
applyTemplate(fileId, template = {
async applyTemplate(fileId, template = {
value: '{{{files.0.content.text}}}',
helpers: '',
}, pdf = false) {
const file = store.state.file.itemMap[fileId];
return localDbSvc.loadItem(`${fileId}/content`)
.then((content) => {
const properties = utils.computeProperties(content.properties);
const options = extensionSvc.getOptions(properties);
const converter = markdownConversionSvc.createConverter(options, true);
const parsingCtx = markdownConversionSvc.parseSections(converter, content.text);
const conversionCtx = markdownConversionSvc.convert(parsingCtx);
const html = conversionCtx.htmlSectionList.map(htmlSanitizer.sanitizeHtml).join('');
containerElt.innerHTML = html;
extensionSvc.sectionPreview(containerElt, options);
const content = await localDbSvc.loadItem(`${fileId}/content`);
const properties = utils.computeProperties(content.properties);
const options = extensionSvc.getOptions(properties);
const converter = markdownConversionSvc.createConverter(options, true);
const parsingCtx = markdownConversionSvc.parseSections(converter, content.text);
const conversionCtx = markdownConversionSvc.convert(parsingCtx);
const html = conversionCtx.htmlSectionList.map(htmlSanitizer.sanitizeHtml).join('');
containerElt.innerHTML = html;
extensionSvc.sectionPreview(containerElt, options);
// Unwrap tables
containerElt.querySelectorAll('.table-wrapper').cl_each((wrapperElt) => {
while (wrapperElt.firstChild) {
wrapperElt.parentNode.insertBefore(wrapperElt.firstChild, wrapperElt.nextSibling);
}
wrapperElt.parentNode.removeChild(wrapperElt);
});
// Unwrap tables
containerElt.querySelectorAll('.table-wrapper').cl_each((wrapperElt) => {
while (wrapperElt.firstChild) {
wrapperElt.parentNode.insertBefore(wrapperElt.firstChild, wrapperElt.nextSibling);
}
wrapperElt.parentNode.removeChild(wrapperElt);
});
// Make TOC
const headings = containerElt.querySelectorAll('h1,h2,h3,h4,h5,h6').cl_map(headingElt => ({
title: headingElt.textContent,
anchor: headingElt.id,
level: parseInt(headingElt.tagName.slice(1), 10),
children: [],
}));
const toc = groupHeadings(headings);
const view = {
pdf,
files: [{
name: file.name,
content: {
text: content.text,
properties,
yamlProperties: content.properties,
html: containerElt.innerHTML,
toc,
},
}],
};
containerElt.innerHTML = '';
// Make TOC
const headings = containerElt.querySelectorAll('h1,h2,h3,h4,h5,h6').cl_map(headingElt => ({
title: headingElt.textContent,
anchor: headingElt.id,
level: parseInt(headingElt.tagName.slice(1), 10),
children: [],
}));
const toc = groupHeadings(headings);
const view = {
pdf,
files: [{
name: file.name,
content: {
text: content.text,
properties,
yamlProperties: content.properties,
html: containerElt.innerHTML,
toc,
},
}],
};
containerElt.innerHTML = '';
// Run template conversion in a Worker to prevent attacks from helpers
const worker = new TemplateWorker();
return new Promise((resolve, reject) => {
const timeoutId = setTimeout(() => {
worker.terminate();
reject(new Error('Template generation timeout.'));
}, 10000);
worker.addEventListener('message', (e) => {
clearTimeout(timeoutId);
worker.terminate();
// e.data can contain unsafe data if helpers attempts to call postMessage
const [err, result] = e.data;
if (err) {
reject(new Error(`${err}`));
} else {
resolve(`${result}`);
}
});
worker.postMessage([template.value, view, template.helpers]);
});
// Run template conversion in a Worker to prevent attacks from helpers
const worker = new TemplateWorker();
return new Promise((resolve, reject) => {
const timeoutId = setTimeout(() => {
worker.terminate();
reject(new Error('Template generation timeout.'));
}, 10000);
worker.addEventListener('message', (e) => {
clearTimeout(timeoutId);
worker.terminate();
// e.data can contain unsafe data if helpers attempts to call postMessage
const [err, result] = e.data;
if (err) {
reject(new Error(`${err}`));
} else {
resolve(`${result}`);
}
});
worker.postMessage([template.value, view, template.helpers]);
});
},
/**
* Export a file to disk.
*/
exportToDisk(fileId, type, template) {
async exportToDisk(fileId, type, template) {
const file = store.state.file.itemMap[fileId];
return this.applyTemplate(fileId, template)
.then((html) => {
const blob = new Blob([html], {
type: 'text/plain;charset=utf-8',
});
FileSaver.saveAs(blob, `${file.name}.${type}`);
});
const html = await this.applyTemplate(fileId, template);
const blob = new Blob([html], {
type: 'text/plain;charset=utf-8',
});
FileSaver.saveAs(blob, `${file.name}.${type}`);
},
};

View File

@ -7,7 +7,7 @@ export default {
/**
* Create a file in the store with the specified fields.
*/
createFile({
async createFile({
name,
parentId,
text,
@ -29,77 +29,99 @@ export default {
discussions: discussions || {},
comments: comments || {},
};
const nameStripped = file.name !== utils.defaultName && file.name !== name;
// Check if there is a path conflict
const workspaceUniquePaths = store.getters['workspace/hasUniquePaths'];
let pathConflict;
if (workspaceUniquePaths) {
const parentPath = store.getters.itemPaths[file.parentId] || '';
const path = parentPath + file.name;
pathConflict = !!store.getters.pathItems[path];
// Show warning dialogs
if (!background) {
// If name is being stripped
if (file.name !== utils.defaultName && file.name !== name) {
await store.dispatch('modal/stripName', name);
}
// Check if there is already a file with that path
if (workspaceUniquePaths) {
const parentPath = store.getters.itemPaths[file.parentId] || '';
const path = parentPath + file.name;
if (store.getters.pathItems[path]) {
await store.dispatch('modal/pathConflict', name);
}
}
}
// Show warning dialogs and then save in the store
return Promise.resolve()
.then(() => !background && nameStripped && store.dispatch('modal/stripName', name))
.then(() => !background && pathConflict && store.dispatch('modal/pathConflict', name))
.then(() => {
store.commit('content/setItem', content);
store.commit('file/setItem', file);
if (workspaceUniquePaths) {
this.makePathUnique(id);
}
return store.state.file.itemMap[id];
});
// Save file and content in the store
store.commit('content/setItem', content);
store.commit('file/setItem', file);
if (workspaceUniquePaths) {
this.makePathUnique(id);
}
// Return the new file item
return store.state.file.itemMap[id];
},
/**
* Make sanity checks and then create/update the folder/file in the store.
*/
async storeItem(item, background = false) {
async storeItem(item) {
const id = item.id || utils.uid();
const sanitizedName = utils.sanitizeName(item.name);
if (item.type === 'folder' && forbiddenFolderNameMatcher.exec(sanitizedName)) {
if (background) {
return null;
}
await store.dispatch('modal/unauthorizedName', item.name);
throw new Error('Unauthorized name.');
}
const workspaceUniquePaths = store.getters['workspace/hasUniquePaths'];
// Show warning dialogs
if (!background) {
// If name has been stripped
if (sanitizedName !== utils.defaultName && sanitizedName !== item.name) {
await store.dispatch('modal/stripName', item.name);
// If name has been stripped
if (sanitizedName !== utils.defaultName && sanitizedName !== item.name) {
await store.dispatch('modal/stripName', item.name);
}
// Check if there is a path conflict
if (store.getters['workspace/hasUniquePaths']) {
const parentPath = store.getters.itemPaths[item.parentId] || '';
const path = parentPath + sanitizedName;
const pathItems = store.getters.pathItems[path] || [];
if (pathItems.some(itemWithSamePath => itemWithSamePath.id !== id)) {
await store.dispatch('modal/pathConflict', item.name);
}
// Check if there is a path conflict
if (workspaceUniquePaths) {
const parentPath = store.getters.itemPaths[item.parentId] || '';
const path = parentPath + sanitizedName;
const pathItems = store.getters.pathItems[path] || [];
if (pathItems.some(itemWithSamePath => itemWithSamePath.id !== id)) {
await store.dispatch('modal/pathConflict', item.name);
}
}
return this.setOrPatchItem({
...item,
id,
});
},
/**
* Create/update the folder/file in the store and make sure its path is unique.
*/
setOrPatchItem(patch) {
const item = {
...store.getters.allItemMap[patch.id] || patch,
};
if (!item.id) {
return null;
}
if (patch.parentId !== undefined) {
item.parentId = patch.parentId || null;
}
if (patch.name) {
const sanitizedName = utils.sanitizeName(patch.name);
if (item.type !== 'folder' || !forbiddenFolderNameMatcher.exec(sanitizedName)) {
item.name = sanitizedName;
}
}
// Save item in the store
store.commit(`${item.type}/setItem`, {
id,
parentId: item.parentId || null,
name: sanitizedName,
});
store.commit(`${item.type}/setItem`, item);
// Ensure path uniqueness
if (workspaceUniquePaths) {
this.makePathUnique(id);
if (store.getters['workspace/hasUniquePaths']) {
this.makePathUnique(item.id);
}
return store.getters.allItemMap[id];
return store.getters.allItemMap[item.id];
},
/**

View File

@ -136,7 +136,7 @@ const localDbSvc = {
* localDb will be finished. Effectively, open a transaction, then read and apply all changes
* from the DB since the previous transaction, then write all the changes from the store.
*/
sync() {
async sync() {
return new Promise((resolve, reject) => {
// Create the DB transaction
this.connection.createTx((tx) => {
@ -275,7 +275,7 @@ const localDbSvc = {
/**
* Retrieve an item from the DB and put it in the store.
*/
loadItem(id) {
async loadItem(id) {
// Check if item is in the store
const itemInStore = store.getters.allItemMap[id];
if (itemInStore) {
@ -307,181 +307,165 @@ const localDbSvc = {
/**
* Unload from the store contents that haven't been opened recently
*/
unloadContents() {
return this.sync()
.then(() => {
// Keep only last opened files in memory
const lastOpenedFileIdSet = new Set(store.getters['data/lastOpenedIds']);
Object.keys(contentTypes).forEach((type) => {
store.getters[`${type}/items`].forEach((item) => {
const [fileId] = item.id.split('/');
if (!lastOpenedFileIdSet.has(fileId)) {
// Remove item from the store
store.commit(`${type}/deleteItem`, item.id);
}
});
});
async unloadContents() {
await this.sync();
// Keep only last opened files in memory
const lastOpenedFileIdSet = new Set(store.getters['data/lastOpenedIds']);
Object.keys(contentTypes).forEach((type) => {
store.getters[`${type}/items`].forEach((item) => {
const [fileId] = item.id.split('/');
if (!lastOpenedFileIdSet.has(fileId)) {
// Remove item from the store
store.commit(`${type}/deleteItem`, item.id);
}
});
});
},
/**
* Drop the database and clean the localStorage for the specified workspaceId.
*/
removeWorkspace(id) {
async removeWorkspace(id) {
const workspaces = {
...store.getters['data/workspaces'],
};
delete workspaces[id];
store.dispatch('data/setWorkspaces', workspaces);
this.syncLocalStorage();
return new Promise((resolve, reject) => {
await new Promise((resolve, reject) => {
const dbName = getDbName(id);
const request = indexedDB.deleteDatabase(dbName);
request.onerror = reject;
request.onsuccess = resolve;
})
.then(() => {
localStorage.removeItem(`${id}/lastSyncActivity`);
localStorage.removeItem(`${id}/lastWindowFocus`);
});
});
localStorage.removeItem(`${id}/lastSyncActivity`);
localStorage.removeItem(`${id}/lastWindowFocus`);
},
/**
* Create the connection and start syncing.
*/
init() {
return Promise.resolve()
.then(() => {
// Reset the app if reset flag was passed
if (resetApp) {
return Promise.all(Object.keys(store.getters['data/workspaces'])
.map(workspaceId => localDbSvc.removeWorkspace(workspaceId)))
.then(() => utils.localStorageDataIds.forEach((id) => {
// Clean data stored in localStorage
localStorage.removeItem(`data/${id}`);
}))
.then(() => {
window.location.reload();
throw new Error('reload');
});
}
async init() {
// Reset the app if reset flag was passed
if (resetApp) {
await Promise.all(Object.keys(store.getters['data/workspaces'])
.map(workspaceId => localDbSvc.removeWorkspace(workspaceId)));
utils.localStorageDataIds.forEach((id) => {
// Clean data stored in localStorage
localStorage.removeItem(`data/${id}`);
});
window.location.reload();
throw new Error('reload');
}
// Create the connection
this.connection = new Connection();
// Create the connection
this.connection = new Connection();
// Load the DB
return localDbSvc.sync();
})
.then(() => {
// If exportWorkspace parameter was provided
if (exportWorkspace) {
const backup = JSON.stringify(store.getters.allItemMap);
const blob = new Blob([backup], {
type: 'text/plain;charset=utf-8',
});
FileSaver.saveAs(blob, 'StackEdit workspace.json');
return;
}
// Load the DB
await localDbSvc.sync();
// Save welcome file content hash if not done already
const hash = utils.hash(welcomeFile);
const { welcomeFileHashes } = store.getters['data/localSettings'];
if (!welcomeFileHashes[hash]) {
store.dispatch('data/patchLocalSettings', {
welcomeFileHashes: {
...welcomeFileHashes,
[hash]: 1,
},
});
}
// If exportWorkspace parameter was provided
if (exportWorkspace) {
const backup = JSON.stringify(store.getters.allItemMap);
const blob = new Blob([backup], {
type: 'text/plain;charset=utf-8',
});
FileSaver.saveAs(blob, 'StackEdit workspace.json');
return;
}
// If app was last opened 7 days ago and synchronization is off
if (!store.getters['workspace/syncToken'] &&
(store.state.workspace.lastFocus + utils.cleanTrashAfter < Date.now())
) {
// Clean files
store.getters['file/items']
.filter(file => file.parentId === 'trash') // If file is in the trash
.forEach(file => fileSvc.deleteFile(file.id));
}
// Save welcome file content hash if not done already
const hash = utils.hash(welcomeFile);
const { welcomeFileHashes } = store.getters['data/localSettings'];
if (!welcomeFileHashes[hash]) {
store.dispatch('data/patchLocalSettings', {
welcomeFileHashes: {
...welcomeFileHashes,
[hash]: 1,
},
});
}
// Enable sponsorship
if (utils.queryParams.paymentSuccess) {
window.location.hash = ''; // PaymentSuccess param is always on its own
store.dispatch('modal/paymentSuccess')
.catch(() => { /* Cancel */ });
const sponsorToken = store.getters['workspace/sponsorToken'];
// Force check sponsorship after a few seconds
const currentDate = Date.now();
if (sponsorToken && sponsorToken.expiresOn > currentDate - checkSponsorshipAfter) {
store.dispatch('data/setGoogleToken', {
...sponsorToken,
expiresOn: currentDate - checkSponsorshipAfter,
});
// If app was last opened 7 days ago and synchronization is off
if (!store.getters['workspace/syncToken'] &&
(store.state.workspace.lastFocus + utils.cleanTrashAfter < Date.now())
) {
// Clean files
store.getters['file/items']
.filter(file => file.parentId === 'trash') // If file is in the trash
.forEach(file => fileSvc.deleteFile(file.id));
}
// Enable sponsorship
if (utils.queryParams.paymentSuccess) {
window.location.hash = ''; // PaymentSuccess param is always on its own
store.dispatch('modal/paymentSuccess')
.catch(() => { /* Cancel */ });
const sponsorToken = store.getters['workspace/sponsorToken'];
// Force check sponsorship after a few seconds
const currentDate = Date.now();
if (sponsorToken && sponsorToken.expiresOn > currentDate - checkSponsorshipAfter) {
store.dispatch('data/setGoogleToken', {
...sponsorToken,
expiresOn: currentDate - checkSponsorshipAfter,
});
}
}
// Sync local DB periodically
utils.setInterval(() => localDbSvc.sync(), 1000);
// watch current file changing
store.watch(
() => store.getters['file/current'].id,
async () => {
// See if currentFile is real, ie it has an ID
const currentFile = store.getters['file/current'];
// If current file has no ID, get the most recent file
if (!currentFile.id) {
const recentFile = store.getters['file/lastOpened'];
// Set it as the current file
if (recentFile.id) {
store.commit('file/setCurrentId', recentFile.id);
} else {
// If still no ID, create a new file
const newFile = await fileSvc.createFile({
name: 'Welcome file',
text: welcomeFile,
}, true);
// Set it as the current file
store.commit('file/setCurrentId', newFile.id);
}
} else {
try {
// Load contentState from DB
await localDbSvc.loadContentState(currentFile.id);
// Load syncedContent from DB
await localDbSvc.loadSyncedContent(currentFile.id);
// Load content from DB
try {
await localDbSvc.loadItem(`${currentFile.id}/content`);
} catch (err) {
// Failure (content is not available), go back to previous file
const lastOpenedFile = store.getters['file/lastOpened'];
store.commit('file/setCurrentId', lastOpenedFile.id);
throw err;
}
// Set last opened file
store.dispatch('data/setLastOpenedId', currentFile.id);
// Cancel new discussion and open the gutter if file contains discussions
store.commit(
'discussion/setCurrentDiscussionId',
store.getters['discussion/nextDiscussionId'],
);
} catch (err) {
console.error(err); // eslint-disable-line no-console
store.dispatch('notification/error', err);
}
}
// Sync local DB periodically
utils.setInterval(() => localDbSvc.sync(), 1000);
// watch current file changing
store.watch(
() => store.getters['file/current'].id,
() => {
// See if currentFile is real, ie it has an ID
const currentFile = store.getters['file/current'];
// If current file has no ID, get the most recent file
if (!currentFile.id) {
const recentFile = store.getters['file/lastOpened'];
// Set it as the current file
if (recentFile.id) {
store.commit('file/setCurrentId', recentFile.id);
} else {
// If still no ID, create a new file
fileSvc.createFile({
name: 'Welcome file',
text: welcomeFile,
}, true)
// Set it as the current file
.then(newFile => store.commit('file/setCurrentId', newFile.id));
}
} else {
Promise.resolve()
// Load contentState from DB
.then(() => localDbSvc.loadContentState(currentFile.id))
// Load syncedContent from DB
.then(() => localDbSvc.loadSyncedContent(currentFile.id))
// Load content from DB
.then(() => localDbSvc.loadItem(`${currentFile.id}/content`))
.then(
() => {
// Set last opened file
store.dispatch('data/setLastOpenedId', currentFile.id);
// Cancel new discussion
store.commit('discussion/setCurrentDiscussionId');
// Open the gutter if file contains discussions
store.commit(
'discussion/setCurrentDiscussionId',
store.getters['discussion/nextDiscussionId'],
);
},
(err) => {
// Failure (content is not available), go back to previous file
const lastOpenedFile = store.getters['file/lastOpened'];
store.commit('file/setCurrentId', lastOpenedFile.id);
throw err;
},
)
.catch((err) => {
console.error(err); // eslint-disable-line no-console
store.dispatch('notification/error', err);
});
}
}, {
immediate: true,
},
);
});
},
{ immediate: true },
);
},
};

View File

@ -7,6 +7,27 @@ const networkTimeout = 30 * 1000; // 30 sec
let isConnectionDown = false;
const userInactiveAfter = 2 * 60 * 1000; // 2 minutes
function parseHeaders(xhr) {
const pairs = xhr.getAllResponseHeaders().trim().split('\n');
const headers = {};
pairs.forEach((header) => {
const split = header.trim().split(':');
const key = split.shift().trim().toLowerCase();
const value = split.join(':').trim();
headers[key] = value;
});
return headers;
}
function isRetriable(err) {
if (err.status === 403) {
const googleReason = ((((err.body || {}).error || {}).errors || [])[0] || {}).reason;
return googleReason === 'rateLimitExceeded' || googleReason === 'userRateLimitExceeded';
}
return err.status === 429 || (err.status >= 500 && err.status < 600);
}
export default {
init() {
// Keep track of the last user activity
@ -31,37 +52,34 @@ export default {
window.addEventListener('focus', setLastFocus);
// Check browser is online periodically
const checkOffline = () => {
const checkOffline = async () => {
const isBrowserOffline = window.navigator.onLine === false;
if (!isBrowserOffline &&
store.state.lastOfflineCheck + networkTimeout + 5000 < Date.now() &&
this.isUserActive()
) {
store.commit('updateLastOfflineCheck');
new Promise((resolve, reject) => {
const script = document.createElement('script');
let timeout;
let clean = (cb) => {
clearTimeout(timeout);
document.head.removeChild(script);
clean = () => {}; // Prevent from cleaning several times
cb();
};
script.onload = () => clean(resolve);
script.onerror = () => clean(reject);
script.src = `https://apis.google.com/js/api.js?${Date.now()}`;
try {
document.head.appendChild(script); // This can fail with bad network
timeout = setTimeout(() => clean(reject), networkTimeout);
} catch (e) {
reject(e);
}
})
.then(() => {
isConnectionDown = false;
}, () => {
isConnectionDown = true;
const script = document.createElement('script');
let timeout;
try {
await new Promise((resolve, reject) => {
script.onload = resolve;
script.onerror = reject;
script.src = `https://apis.google.com/js/api.js?${Date.now()}`;
try {
document.head.appendChild(script); // This can fail with bad network
timeout = setTimeout(reject, networkTimeout);
} catch (e) {
reject(e);
}
});
isConnectionDown = false;
} catch (e) {
isConnectionDown = true;
} finally {
clearTimeout(timeout);
document.head.removeChild(script);
}
}
const offline = isBrowserOffline || isConnectionDown;
if (store.state.offline !== offline) {
@ -88,7 +106,7 @@ export default {
isUserActive() {
return this.lastActivity > Date.now() - userInactiveAfter && this.isWindowFocused();
},
loadScript(url) {
async loadScript(url) {
if (!scriptLoadingPromises[url]) {
scriptLoadingPromises[url] = new Promise((resolve, reject) => {
const script = document.createElement('script');
@ -103,7 +121,7 @@ export default {
}
return scriptLoadingPromises[url];
},
startOauth2(url, params = {}, silent = false) {
async startOauth2(url, params = {}, silent = false) {
// Build the authorize URL
const state = utils.uid();
params.state = state;
@ -125,69 +143,66 @@ export default {
}
}
return new Promise((resolve, reject) => {
let checkClosedInterval;
let closeTimeout;
let msgHandler;
let clean = () => {
clearInterval(checkClosedInterval);
if (!silent && !wnd.closed) {
wnd.close();
let checkClosedInterval;
let closeTimeout;
let msgHandler;
try {
return await new Promise((resolve, reject) => {
if (silent) {
iframeElt.onerror = () => {
reject(new Error('Unknown error.'));
};
closeTimeout = setTimeout(() => {
isConnectionDown = true;
store.commit('setOffline', true);
store.commit('updateLastOfflineCheck');
reject(new Error('You are offline.'));
}, networkTimeout);
} else {
closeTimeout = setTimeout(() => {
reject(new Error('Timeout.'));
}, oauth2AuthorizationTimeout);
}
if (iframeElt) {
document.body.removeChild(iframeElt);
}
clearTimeout(closeTimeout);
window.removeEventListener('message', msgHandler);
clean = () => Promise.resolve(); // Prevent from cleaning several times
return Promise.resolve();
};
if (silent) {
iframeElt.onerror = () => clean()
.then(() => reject(new Error('Unknown error.')));
closeTimeout = setTimeout(
() => clean()
.then(() => {
isConnectionDown = true;
store.commit('setOffline', true);
store.commit('updateLastOfflineCheck');
reject(new Error('You are offline.'));
}),
networkTimeout,
);
} else {
closeTimeout = setTimeout(
() => clean()
.then(() => reject(new Error('Timeout.'))),
oauth2AuthorizationTimeout,
);
}
msgHandler = event => event.source === wnd && event.origin === utils.origin && clean()
.then(() => {
const data = utils.parseQueryParams(`${event.data}`.slice(1));
if (data.error || data.state !== state) {
console.error(data); // eslint-disable-line no-console
reject(new Error('Could not get required authorization.'));
} else {
resolve({
accessToken: data.access_token,
code: data.code,
idToken: data.id_token,
expiresIn: data.expires_in,
});
msgHandler = (event) => {
if (event.source === wnd && event.origin === utils.origin) {
const data = utils.parseQueryParams(`${event.data}`.slice(1));
if (data.error || data.state !== state) {
console.error(data); // eslint-disable-line no-console
reject(new Error('Could not get required authorization.'));
} else {
resolve({
accessToken: data.access_token,
code: data.code,
idToken: data.id_token,
expiresIn: data.expires_in,
});
}
}
});
};
window.addEventListener('message', msgHandler);
if (!silent) {
checkClosedInterval = setInterval(() => wnd.closed && clean()
.then(() => reject(new Error('Authorize window was closed.'))), 250);
window.addEventListener('message', msgHandler);
if (!silent) {
checkClosedInterval = setInterval(() => {
if (wnd.closed) {
reject(new Error('Authorize window was closed.'));
}
}, 250);
}
});
} finally {
clearInterval(checkClosedInterval);
if (!silent && !wnd.closed) {
wnd.close();
}
});
if (iframeElt) {
document.body.removeChild(iframeElt);
}
clearTimeout(closeTimeout);
window.removeEventListener('message', msgHandler);
}
},
request(configParam, offlineCheck = false) {
async request(configParam, offlineCheck = false) {
let retryAfter = 500; // 500 ms
const maxRetryAfter = 10 * 1000; // 10 sec
const config = Object.assign({}, configParam);
@ -198,101 +213,84 @@ export default {
config.headers['Content-Type'] = 'application/json';
}
function parseHeaders(xhr) {
const pairs = xhr.getAllResponseHeaders().trim().split('\n');
return pairs.reduce((headers, header) => {
const split = header.trim().split(':');
const key = split.shift().trim().toLowerCase();
const value = split.join(':').trim();
headers[key] = value;
return headers;
}, {});
}
function isRetriable(err) {
if (err.status === 403) {
const googleReason = ((((err.body || {}).error || {}).errors || [])[0] || {}).reason;
return googleReason === 'rateLimitExceeded' || googleReason === 'userRateLimitExceeded';
}
return err.status === 429 || (err.status >= 500 && err.status < 600);
}
const attempt =
() => new Promise((resolve, reject) => {
if (offlineCheck) {
store.commit('updateLastOfflineCheck');
}
const xhr = new window.XMLHttpRequest();
xhr.withCredentials = config.withCredentials || false;
let timeoutId;
xhr.onload = () => {
const attempt = async () => {
try {
await new Promise((resolve, reject) => {
if (offlineCheck) {
isConnectionDown = false;
store.commit('updateLastOfflineCheck');
}
clearTimeout(timeoutId);
const result = {
status: xhr.status,
headers: parseHeaders(xhr),
body: config.blob ? xhr.response : xhr.responseText,
};
if (!config.raw && !config.blob) {
try {
result.body = JSON.parse(result.body);
} catch (e) {
// ignore
const xhr = new window.XMLHttpRequest();
xhr.withCredentials = config.withCredentials || false;
let timeoutId;
xhr.onload = () => {
if (offlineCheck) {
isConnectionDown = false;
}
}
if (result.status >= 200 && result.status < 300) {
resolve(result);
return;
}
reject(result);
};
clearTimeout(timeoutId);
const result = {
status: xhr.status,
headers: parseHeaders(xhr),
body: config.blob ? xhr.response : xhr.responseText,
};
if (!config.raw && !config.blob) {
try {
result.body = JSON.parse(result.body);
} catch (e) {
// ignore
}
}
if (result.status >= 200 && result.status < 300) {
resolve(result);
return;
}
reject(result);
};
xhr.onerror = () => {
clearTimeout(timeoutId);
if (offlineCheck) {
isConnectionDown = true;
store.commit('setOffline', true);
reject(new Error('You are offline.'));
} else {
reject(new Error('Network request failed.'));
}
};
xhr.onerror = () => {
clearTimeout(timeoutId);
if (offlineCheck) {
isConnectionDown = true;
store.commit('setOffline', true);
reject(new Error('You are offline.'));
} else {
reject(new Error('Network request failed.'));
}
};
timeoutId = setTimeout(() => {
xhr.abort();
if (offlineCheck) {
isConnectionDown = true;
store.commit('setOffline', true);
reject(new Error('You are offline.'));
} else {
reject(new Error('Network request timeout.'));
}
}, config.timeout);
timeoutId = setTimeout(() => {
xhr.abort();
if (offlineCheck) {
isConnectionDown = true;
store.commit('setOffline', true);
reject(new Error('You are offline.'));
} else {
reject(new Error('Network request timeout.'));
}
}, config.timeout);
const url = utils.addQueryParams(config.url, config.params);
xhr.open(config.method || 'GET', url);
Object.entries(config.headers).forEach(([key, value]) =>
value && xhr.setRequestHeader(key, `${value}`));
if (config.blob) {
xhr.responseType = 'blob';
}
xhr.send(config.body || null);
})
.catch((err) => {
// Try again later in case of retriable error
if (isRetriable(err) && retryAfter < maxRetryAfter) {
return new Promise((resolve) => {
setTimeout(resolve, retryAfter);
// Exponential backoff
retryAfter *= 2;
})
.then(attempt);
const url = utils.addQueryParams(config.url, config.params);
xhr.open(config.method || 'GET', url);
Object.entries(config.headers).forEach(([key, value]) =>
value && xhr.setRequestHeader(key, `${value}`));
if (config.blob) {
xhr.responseType = 'blob';
}
throw err;
xhr.send(config.body || null);
});
} catch (err) {
// Try again later in case of retriable error
if (isRetriable(err) && retryAfter < maxRetryAfter) {
await new Promise((resolve) => {
setTimeout(resolve, retryAfter);
// Exponential backoff
retryAfter *= 2;
});
attempt();
}
throw err;
}
};
return attempt();
},

View File

@ -15,24 +15,21 @@ export default new Provider({
const token = this.getToken(location);
return `${location.pageId}${location.blogUrl}${token.name}`;
},
publish(token, html, metadata, publishLocation) {
return googleHelper.uploadBlogger(
async publish(token, html, metadata, publishLocation) {
const page = await googleHelper.uploadBlogger({
token,
publishLocation.blogUrl,
publishLocation.blogId,
publishLocation.pageId,
metadata.title,
html,
null,
null,
null,
true,
)
.then(page => ({
...publishLocation,
blogId: page.blog.id,
pageId: page.id,
}));
blogUrl: publishLocation.blogUrl,
blogId: publishLocation.blogId,
postId: publishLocation.pageId,
title: metadata.title,
content: html,
isPage: true,
});
return {
...publishLocation,
blogId: page.blog.id,
pageId: page.id,
};
},
makeLocation(token, blogUrl, pageId) {
const location = {

View File

@ -15,23 +15,21 @@ export default new Provider({
const token = this.getToken(location);
return `${location.postId}${location.blogUrl}${token.name}`;
},
publish(token, html, metadata, publishLocation) {
return googleHelper.uploadBlogger(
async publish(token, html, metadata, publishLocation) {
const post = await googleHelper.uploadBlogger({
...publishLocation,
token,
publishLocation.blogUrl,
publishLocation.blogId,
publishLocation.postId,
metadata.title,
html,
metadata.tags,
metadata.status === 'draft',
metadata.date,
)
.then(post => ({
...publishLocation,
blogId: post.blog.id,
postId: post.id,
}));
title: metadata.title,
content: html,
labels: metadata.tags,
isDraft: metadata.status === 'draft',
published: metadata.date,
});
return {
...publishLocation,
blogId: post.blog.id,
postId: post.id,
};
},
makeLocation(token, blogUrl, postId) {
const location = {

View File

@ -2,6 +2,7 @@ import providerRegistry from './providerRegistry';
import emptyContent from '../../../data/emptyContent';
import utils from '../../utils';
import store from '../../../store';
import fileSvc from '../../fileSvc';
const dataExtractor = /<!--stackedit_data:([A-Za-z0-9+/=\s]+)-->$/;
@ -66,6 +67,14 @@ export default class Provider {
return utils.addItemHash(result);
}
static getContentSyncData(fileId) {
const syncData = store.getters['data/syncDataByItemId'][`${fileId}/content`];
if (!syncData) {
throw new Error(); // No need for a proper error message.
}
return syncData;
}
/**
* Find and open a file with location that meets the criteria
*/
@ -73,13 +82,13 @@ export default class Provider {
const location = utils.search(allLocations, criteria);
if (location) {
// Found one, open it if it exists
const file = store.state.file.itemMap[location.fileId];
if (file) {
store.commit('file/setCurrentId', file.id);
const item = store.state.file.itemMap[location.fileId];
if (item) {
store.commit('file/setCurrentId', item.id);
// If file is in the trash, restore it
if (file.parentId === 'trash') {
store.commit('file/patchItem', {
...file,
if (item.parentId === 'trash') {
fileSvc.setOrPatchItem({
...item,
parentId: null,
});
}

View File

@ -3,13 +3,6 @@ import couchdbHelper from './helpers/couchdbHelper';
import Provider from './common/Provider';
import utils from '../utils';
const getSyncData = (fileId) => {
const syncData = store.getters['data/syncDataByItemId'][`${fileId}/content`];
return syncData
? Promise.resolve(syncData)
: Promise.reject(); // No need for a proper error message.
};
let syncLastSeq;
export default new Provider({
@ -17,7 +10,7 @@ export default new Provider({
getToken() {
return store.getters['workspace/syncToken'];
},
initWorkspace() {
async initWorkspace() {
const dbUrl = (utils.queryParams.dbUrl || '').replace(/\/?$/, ''); // Remove trailing /
const workspaceParams = {
providerId: this.id,
@ -35,85 +28,85 @@ export default new Provider({
});
}
return Promise.resolve()
.then(() => getWorkspace() || couchdbHelper.getDb(getToken())
.then((db) => {
store.dispatch('data/patchWorkspaces', {
[workspaceId]: {
id: workspaceId,
name: db.db_name,
providerId: this.id,
dbUrl,
},
});
return getWorkspace();
}, () => {
throw new Error(`${dbUrl} is not accessible. Make sure you have the right permissions.`);
}))
.then((workspace) => {
// Fix the URL hash
utils.setQueryParams(workspaceParams);
if (workspace.url !== window.location.href) {
store.dispatch('data/patchWorkspaces', {
[workspace.id]: {
...workspace,
url: window.location.href,
},
});
}
return getWorkspace();
// Create the workspace
let workspace = getWorkspace();
if (!workspace) {
// Make sure the database exists and retrieve its name
let db;
try {
db = await couchdbHelper.getDb(getToken());
} catch (e) {
throw new Error(`${dbUrl} is not accessible. Make sure you have the proper permissions.`);
}
store.dispatch('data/patchWorkspaces', {
[workspaceId]: {
id: workspaceId,
name: db.db_name,
providerId: this.id,
dbUrl,
},
});
workspace = getWorkspace();
}
// Fix the URL hash
utils.setQueryParams(workspaceParams);
if (workspace.url !== window.location.href) {
store.dispatch('data/patchWorkspaces', {
[workspace.id]: {
...workspace,
url: window.location.href,
},
});
}
return getWorkspace();
},
getChanges() {
async getChanges() {
const syncToken = store.getters['workspace/syncToken'];
const lastSeq = store.getters['data/localSettings'].syncLastSeq;
return couchdbHelper.getChanges(syncToken, lastSeq)
.then((result) => {
const changes = result.changes.filter((change) => {
if (!change.deleted && change.doc) {
change.item = change.doc.item;
if (!change.item || !change.item.id || !change.item.type) {
return false;
}
// Build sync data
change.syncData = {
id: change.id,
itemId: change.item.id,
type: change.item.type,
hash: change.item.hash,
rev: change.doc._rev, // eslint-disable-line no-underscore-dangle
};
}
change.syncDataId = change.id;
return true;
});
syncLastSeq = result.lastSeq;
return changes;
});
const result = await couchdbHelper.getChanges(syncToken, lastSeq);
const changes = result.changes.filter((change) => {
if (!change.deleted && change.doc) {
change.item = change.doc.item;
if (!change.item || !change.item.id || !change.item.type) {
return false;
}
// Build sync data
change.syncData = {
id: change.id,
itemId: change.item.id,
type: change.item.type,
hash: change.item.hash,
rev: change.doc._rev, // eslint-disable-line no-underscore-dangle
};
}
change.syncDataId = change.id;
return true;
});
syncLastSeq = result.lastSeq;
return changes;
},
onChangesApplied() {
store.dispatch('data/patchLocalSettings', {
syncLastSeq,
});
},
saveSimpleItem(item, syncData) {
async saveSimpleItem(item, syncData) {
const syncToken = store.getters['workspace/syncToken'];
return couchdbHelper.uploadDocument(
syncToken,
const { id, rev } = couchdbHelper.uploadDocument({
token: syncToken,
item,
undefined,
undefined,
syncData && syncData.id,
syncData && syncData.rev,
)
.then(res => ({
// Build sync data
id: res.id,
itemId: item.id,
type: item.type,
hash: item.hash,
rev: res.rev,
}));
documentId: syncData && syncData.id,
rev: syncData && syncData.rev,
});
return {
// Build sync data
id,
itemId: item.id,
type: item.type,
hash: item.hash,
rev,
};
},
removeItem(syncData) {
const syncToken = store.getters['workspace/syncToken'];
@ -122,65 +115,61 @@ export default new Provider({
downloadContent(token, syncLocation) {
return this.downloadData(`${syncLocation.fileId}/content`);
},
downloadData(dataId) {
async downloadData(dataId) {
const syncData = store.getters['data/syncDataByItemId'][dataId];
if (!syncData) {
return Promise.resolve();
}
const syncToken = store.getters['workspace/syncToken'];
return couchdbHelper.retrieveDocumentWithAttachments(syncToken, syncData.id)
.then((body) => {
let item;
if (body.item.type === 'content') {
item = Provider.parseContent(body.attachments.data, body.item.id);
} else {
item = utils.addItemHash(JSON.parse(body.attachments.data));
}
const rev = body._rev; // eslint-disable-line no-underscore-dangle
if (item.hash !== syncData.hash || rev !== syncData.rev) {
store.dispatch('data/patchSyncData', {
[syncData.id]: {
...syncData,
hash: item.hash,
rev,
},
});
}
return item;
});
},
uploadContent(token, content, syncLocation) {
return this.uploadData(content)
.then(() => syncLocation);
},
uploadData(item) {
const syncData = store.getters['data/syncDataByItemId'][item.id];
if (syncData && syncData.hash === item.hash) {
return Promise.resolve();
}
let data;
let dataType;
if (item.type === 'content') {
data = Provider.serializeContent(item);
dataType = 'text/plain';
const body = await couchdbHelper.retrieveDocumentWithAttachments(syncToken, syncData.id);
let item;
if (body.item.type === 'content') {
item = Provider.parseContent(body.attachments.data, body.item.id);
} else {
data = JSON.stringify(item);
dataType = 'application/json';
item = utils.addItemHash(JSON.parse(body.attachments.data));
}
const syncToken = store.getters['workspace/syncToken'];
return couchdbHelper.uploadDocument(
syncToken,
{
id: item.id,
type: item.type,
hash: item.hash,
},
data,
dataType,
syncData && syncData.id,
syncData && syncData.rev,
)
.then(res => store.dispatch('data/patchSyncData', {
const rev = body._rev; // eslint-disable-line no-underscore-dangle
if (item.hash !== syncData.hash || rev !== syncData.rev) {
store.dispatch('data/patchSyncData', {
[syncData.id]: {
...syncData,
hash: item.hash,
rev,
},
});
}
return item;
},
async uploadContent(token, content, syncLocation) {
await this.uploadData(content);
return syncLocation;
},
async uploadData(item) {
const syncData = store.getters['data/syncDataByItemId'][item.id];
if (!syncData || syncData.hash !== item.hash) {
let data;
let dataType;
if (item.type === 'content') {
data = Provider.serializeContent(item);
dataType = 'text/plain';
} else {
data = JSON.stringify(item);
dataType = 'application/json';
}
const syncToken = store.getters['workspace/syncToken'];
const res = await couchdbHelper.uploadDocument({
token: syncToken,
item: {
id: item.id,
type: item.type,
hash: item.hash,
},
data,
dataType,
documentId: syncData && syncData.id,
rev: syncData && syncData.rev,
});
store.dispatch('data/patchSyncData', {
[res.id]: {
// Build sync data
id: res.id,
@ -189,37 +178,34 @@ export default new Provider({
hash: item.hash,
rev: res.rev,
},
}));
});
}
},
listRevisions(token, fileId) {
return getSyncData(fileId)
.then(syncData => couchdbHelper.retrieveDocumentWithRevisions(token, syncData.id))
.then((body) => {
const revisions = [];
body._revs_info.forEach((revInfo) => { // eslint-disable-line no-underscore-dangle
if (revInfo.status === 'available') {
revisions.push({
id: revInfo.rev,
sub: null,
created: null,
});
}
async listRevisions(token, fileId) {
const syncData = Provider.getContentSyncData(fileId);
const body = await couchdbHelper.retrieveDocumentWithRevisions(token, syncData.id);
const revisions = [];
body._revs_info.forEach((revInfo) => { // eslint-disable-line no-underscore-dangle
if (revInfo.status === 'available') {
revisions.push({
id: revInfo.rev,
sub: null,
created: null,
});
return revisions;
});
}
});
return revisions;
},
loadRevision(token, fileId, revision) {
return getSyncData(fileId)
.then(syncData => couchdbHelper.retrieveDocument(token, syncData.id, revision.id))
.then((body) => {
revision.sub = body.sub;
revision.created = body.time || 1; // Has to be truthy to prevent from loading several times
});
async loadRevision(token, fileId, revision) {
const syncData = Provider.getContentSyncData(fileId);
const body = await couchdbHelper.retrieveDocument(token, syncData.id, revision.id);
revision.sub = body.sub;
revision.created = body.time || 1; // Has to be truthy to prevent from loading several times
},
getRevisionContent(token, fileId, revisionId) {
return getSyncData(fileId)
.then(syncData => couchdbHelper
.retrieveDocumentWithAttachments(token, syncData.id, revisionId))
.then(body => Provider.parseContent(body.attachments.data, body.item.id));
async getRevisionContent(token, fileId, revisionId) {
const syncData = Provider.getContentSyncData(fileId);
const body = await couchdbHelper
.retrieveDocumentWithAttachments(token, syncData.id, revisionId);
return Provider.parseContent(body.attachments.data, body.item.id);
},
});

View File

@ -34,94 +34,88 @@ export default new Provider({
checkPath(path) {
return path && path.match(/^\/[^\\<>:"|?*]+$/);
},
downloadContent(token, syncLocation) {
return dropboxHelper.downloadFile(
async downloadContent(token, syncLocation) {
const { content } = await dropboxHelper.downloadFile({
token,
makePathRelative(token, syncLocation.path),
syncLocation.dropboxFileId,
)
.then(({ content }) => Provider.parseContent(content, `${syncLocation.fileId}/content`));
path: makePathRelative(token, syncLocation.path),
fileId: syncLocation.dropboxFileId,
});
return Provider.parseContent(content, `${syncLocation.fileId}/content`);
},
uploadContent(token, content, syncLocation) {
return dropboxHelper.uploadFile(
async uploadContent(token, content, syncLocation) {
const dropboxFile = await dropboxHelper.uploadFile({
token,
makePathRelative(token, syncLocation.path),
Provider.serializeContent(content),
syncLocation.dropboxFileId,
)
.then(dropboxFile => ({
...syncLocation,
path: makePathAbsolute(token, dropboxFile.path_display),
dropboxFileId: dropboxFile.id,
}));
path: makePathRelative(token, syncLocation.path),
content: Provider.serializeContent(content),
fileId: syncLocation.dropboxFileId,
});
return {
...syncLocation,
path: makePathAbsolute(token, dropboxFile.path_display),
dropboxFileId: dropboxFile.id,
};
},
publish(token, html, metadata, publishLocation) {
return dropboxHelper.uploadFile(
async publish(token, html, metadata, publishLocation) {
const dropboxFile = await dropboxHelper.uploadFile({
token,
publishLocation.path,
html,
publishLocation.dropboxFileId,
)
.then(dropboxFile => ({
...publishLocation,
path: makePathAbsolute(token, dropboxFile.path_display),
dropboxFileId: dropboxFile.id,
}));
path: publishLocation.path,
content: html,
fileId: publishLocation.dropboxFileId,
});
return {
...publishLocation,
path: makePathAbsolute(token, dropboxFile.path_display),
dropboxFileId: dropboxFile.id,
};
},
openFiles(token, paths) {
const openOneFile = () => {
const path = paths.pop();
if (!path) {
return null;
}
if (Provider.openFileWithLocation(store.getters['syncLocation/items'], {
async openFiles(token, paths) {
await utils.awaitSequence(paths, async (path) => {
// Check if the file exists and open it
if (!Provider.openFileWithLocation(store.getters['syncLocation/items'], {
providerId: this.id,
path,
})) {
// File exists and has just been opened. Next...
return openOneFile();
}
// Download content from Dropbox and create the file
const syncLocation = {
path,
providerId: this.id,
sub: token.sub,
};
return this.downloadContent(token, syncLocation)
.then((content) => {
let name = path;
const slashPos = name.lastIndexOf('/');
if (slashPos > -1 && slashPos < name.length - 1) {
name = name.slice(slashPos + 1);
}
const dotPos = name.lastIndexOf('.');
if (dotPos > 0 && slashPos < name.length) {
name = name.slice(0, dotPos);
}
return fileSvc.createFile({
name,
parentId: store.getters['file/current'].parentId,
text: content.text,
properties: content.properties,
discussions: content.discussions,
comments: content.comments,
}, true);
})
.then((item) => {
store.commit('file/setCurrentId', item.id);
store.commit('syncLocation/setItem', {
...syncLocation,
id: utils.uid(),
fileId: item.id,
});
store.dispatch('notification/info', `${store.getters['file/current'].name} was imported from Dropbox.`);
})
.catch(() => {
// Download content from Dropbox
const syncLocation = {
path,
providerId: this.id,
sub: token.sub,
};
let content;
try {
content = await this.downloadContent(token, syncLocation);
} catch (e) {
store.dispatch('notification/error', `Could not open file ${path}.`);
})
.then(() => openOneFile());
};
return Promise.resolve(openOneFile());
return;
}
// Create the file
let name = path;
const slashPos = name.lastIndexOf('/');
if (slashPos > -1 && slashPos < name.length - 1) {
name = name.slice(slashPos + 1);
}
const dotPos = name.lastIndexOf('.');
if (dotPos > 0 && slashPos < name.length) {
name = name.slice(0, dotPos);
}
const item = await fileSvc.createFile({
name,
parentId: store.getters['file/current'].parentId,
text: content.text,
properties: content.properties,
discussions: content.discussions,
comments: content.comments,
}, true);
store.commit('file/setCurrentId', item.id);
store.commit('syncLocation/setItem', {
...syncLocation,
id: utils.uid(),
fileId: item.id,
});
store.dispatch('notification/info', `${store.getters['file/current'].name} was imported from Dropbox.`);
}
});
},
makeLocation(token, path) {
return {

View File

@ -15,39 +15,38 @@ export default new Provider({
const token = this.getToken(location);
return `${location.filename}${location.gistId}${token.name}`;
},
downloadContent(token, syncLocation) {
return githubHelper.downloadGist(token, syncLocation.gistId, syncLocation.filename)
.then(content => Provider.parseContent(content, `${syncLocation.fileId}/content`));
async downloadContent(token, syncLocation) {
const content = await githubHelper.downloadGist({
...syncLocation,
token,
});
return Provider.parseContent(content, `${syncLocation.fileId}/content`);
},
uploadContent(token, content, syncLocation) {
async uploadContent(token, content, syncLocation) {
const file = store.state.file.itemMap[syncLocation.fileId];
const description = utils.sanitizeName(file && file.name);
return githubHelper.uploadGist(
const gist = await githubHelper.uploadGist({
...syncLocation,
token,
description,
syncLocation.filename,
Provider.serializeContent(content),
syncLocation.isPublic,
syncLocation.gistId,
)
.then(gist => ({
...syncLocation,
gistId: gist.id,
}));
content: Provider.serializeContent(content),
});
return {
...syncLocation,
gistId: gist.id,
};
},
publish(token, html, metadata, publishLocation) {
return githubHelper.uploadGist(
async publish(token, html, metadata, publishLocation) {
const gist = await githubHelper.uploadGist({
...publishLocation,
token,
metadata.title,
publishLocation.filename,
html,
publishLocation.isPublic,
publishLocation.gistId,
)
.then(gist => ({
...publishLocation,
gistId: gist.id,
}));
description: metadata.title,
content: html,
});
return {
...publishLocation,
gistId: gist.id,
};
},
makeLocation(token, filename, isPublic, gistId) {
return {

View File

@ -18,99 +18,83 @@ export default new Provider({
const token = this.getToken(location);
return `${location.path}${location.owner}/${location.repo}${token.name}`;
},
downloadContent(token, syncLocation) {
return githubHelper.downloadFile(
token,
syncLocation.owner,
syncLocation.repo,
syncLocation.branch,
syncLocation.path,
)
.then(({ sha, content }) => {
savedSha[syncLocation.id] = sha;
return Provider.parseContent(content, `${syncLocation.fileId}/content`);
})
.catch(() => null); // Ignore error, upload is going to fail anyway
},
uploadContent(token, content, syncLocation) {
let result = Promise.resolve();
if (!savedSha[syncLocation.id]) {
result = this.downloadContent(token, syncLocation); // Get the last sha
}
return result
.then(() => {
const sha = savedSha[syncLocation.id];
delete savedSha[syncLocation.id];
return githubHelper.uploadFile(
token,
syncLocation.owner,
syncLocation.repo,
syncLocation.branch,
syncLocation.path,
Provider.serializeContent(content),
sha,
);
})
.then(() => syncLocation);
},
publish(token, html, metadata, publishLocation) {
return this.downloadContent(token, publishLocation) // Get the last sha
.then(() => {
const sha = savedSha[publishLocation.id];
delete savedSha[publishLocation.id];
return githubHelper.uploadFile(
token,
publishLocation.owner,
publishLocation.repo,
publishLocation.branch,
publishLocation.path,
html,
sha,
);
})
.then(() => publishLocation);
},
openFile(token, syncLocation) {
return Promise.resolve()
.then(() => {
if (Provider.openFileWithLocation(store.getters['syncLocation/items'], syncLocation)) {
// File exists and has just been opened. Next...
return null;
}
// Download content from GitHub and create the file
return this.downloadContent(token, syncLocation)
.then((content) => {
let name = syncLocation.path;
const slashPos = name.lastIndexOf('/');
if (slashPos > -1 && slashPos < name.length - 1) {
name = name.slice(slashPos + 1);
}
const dotPos = name.lastIndexOf('.');
if (dotPos > 0 && slashPos < name.length) {
name = name.slice(0, dotPos);
}
return fileSvc.createFile({
name,
parentId: store.getters['file/current'].parentId,
text: content.text,
properties: content.properties,
discussions: content.discussions,
comments: content.comments,
}, true);
})
.then((item) => {
store.commit('file/setCurrentId', item.id);
store.commit('syncLocation/setItem', {
...syncLocation,
id: utils.uid(),
fileId: item.id,
});
store.dispatch('notification/info', `${store.getters['file/current'].name} was imported from GitHub.`);
})
.catch(() => {
store.dispatch('notification/error', `Could not open file ${syncLocation.path}.`);
});
async downloadContent(token, syncLocation) {
try {
const { sha, content } = await githubHelper.downloadFile({
...syncLocation,
token,
});
savedSha[syncLocation.id] = sha;
return Provider.parseContent(content, `${syncLocation.fileId}/content`);
} catch (e) {
// Ignore error, upload is going to fail anyway
return null;
}
},
async uploadContent(token, content, syncLocation) {
if (!savedSha[syncLocation.id]) {
await this.downloadContent(token, syncLocation); // Get the last sha
}
const sha = savedSha[syncLocation.id];
delete savedSha[syncLocation.id];
await githubHelper.uploadFile({
...syncLocation,
token,
content: Provider.serializeContent(content),
sha,
});
return syncLocation;
},
async publish(token, html, metadata, publishLocation) {
await this.downloadContent(token, publishLocation); // Get the last sha
const sha = savedSha[publishLocation.id];
delete savedSha[publishLocation.id];
await githubHelper.uploadFile({
...publishLocation,
token,
content: html,
sha,
});
return publishLocation;
},
async openFile(token, syncLocation) {
// Check if the file exists and open it
if (!Provider.openFileWithLocation(store.getters['syncLocation/items'], syncLocation)) {
// Download content from GitHub
let content;
try {
content = await this.downloadContent(token, syncLocation);
} catch (e) {
store.dispatch('notification/error', `Could not open file ${syncLocation.path}.`);
return;
}
// Create the file
let name = syncLocation.path;
const slashPos = name.lastIndexOf('/');
if (slashPos > -1 && slashPos < name.length - 1) {
name = name.slice(slashPos + 1);
}
const dotPos = name.lastIndexOf('.');
if (dotPos > 0 && slashPos < name.length) {
name = name.slice(0, dotPos);
}
const item = await fileSvc.createFile({
name,
parentId: store.getters['file/current'].parentId,
text: content.text,
properties: content.properties,
discussions: content.discussions,
comments: content.comments,
}, true);
store.commit('file/setCurrentId', item.id);
store.commit('syncLocation/setItem', {
...syncLocation,
id: utils.uid(),
fileId: item.id,
});
store.dispatch('notification/info', `${store.getters['file/current'].name} was imported from GitHub.`);
}
},
parseRepoUrl(url) {
const parsedRepo = url && url.match(/([^/:]+)\/([^/]+?)(?:\.git|\/)?$/);

View File

@ -4,15 +4,8 @@ import Provider from './common/Provider';
import utils from '../utils';
import userSvc from '../userSvc';
const getSyncData = (fileId) => {
const syncData = store.getters['data/syncDataByItemId'][`${fileId}/content`];
return syncData
? Promise.resolve(syncData)
: Promise.reject(); // No need for a proper error message.
};
const getAbsolutePath = syncData =>
(store.getters['workspace/currentWorkspace'].path || '') + syncData.id;
`${store.getters['workspace/currentWorkspace'].path || ''}${syncData.id}`;
const getWorkspaceWithOwner = () => {
const workspace = store.getters['workspace/currentWorkspace'];
@ -38,7 +31,7 @@ export default new Provider({
getToken() {
return store.getters['workspace/syncToken'];
},
initWorkspace() {
async initWorkspace() {
const [owner, repo] = (utils.queryParams.repo || '').split('/');
const { branch } = utils.queryParams;
const workspaceParams = {
@ -55,409 +48,390 @@ export default new Provider({
const workspaceId = utils.makeWorkspaceId(workspaceParams);
let workspace = store.getters['data/sanitizedWorkspaces'][workspaceId];
return Promise.resolve()
.then(() => {
// See if we already have a token
if (workspace) {
// Token sub is in the workspace
const token = store.getters['data/githubTokens'][workspace.sub];
if (token) {
return token;
}
}
// If no token has been found, popup an authorize window and get one
return store.dispatch('modal/open', {
type: 'githubAccount',
onResolve: () => githubHelper.addAccount(store.getters['data/localSettings'].githubRepoFullAccess),
});
})
.then((token) => {
if (!workspace) {
const pathEntries = (path || '').split('/');
const name = pathEntries[pathEntries.length - 2] || repo; // path ends with `/`
workspace = {
...workspaceParams,
id: workspaceId,
sub: token.sub,
name,
};
}
// Fix the URL hash
utils.setQueryParams(workspaceParams);
if (workspace.url !== window.location.href) {
store.dispatch('data/patchWorkspaces', {
[workspaceId]: {
...workspace,
url: window.location.href,
},
});
}
return store.getters['data/sanitizedWorkspaces'][workspaceId];
// See if we already have a token
let token;
if (workspace) {
// Token sub is in the workspace
token = store.getters['data/githubTokens'][workspace.sub];
}
if (!token) {
await store.dispatch('modal/open', { type: 'githubAccount' });
token = await githubHelper.addAccount(store.getters['data/localSettings'].githubRepoFullAccess);
}
if (!workspace) {
const pathEntries = (path || '').split('/');
const name = pathEntries[pathEntries.length - 2] || repo; // path ends with `/`
workspace = {
...workspaceParams,
id: workspaceId,
sub: token.sub,
name,
};
}
// Fix the URL hash
utils.setQueryParams(workspaceParams);
if (workspace.url !== window.location.href) {
store.dispatch('data/patchWorkspaces', {
[workspaceId]: {
...workspace,
url: window.location.href,
},
});
}
return store.getters['data/sanitizedWorkspaces'][workspaceId];
},
getChanges() {
async getChanges() {
const syncToken = store.getters['workspace/syncToken'];
const { owner, repo, branch } = getWorkspaceWithOwner();
return githubHelper.getHeadTree(syncToken, owner, repo, branch)
.then((tree) => {
const workspacePath = store.getters['workspace/currentWorkspace'].path || '';
const syncDataByPath = store.getters['data/syncData'];
const syncDataByItemId = store.getters['data/syncDataByItemId'];
const tree = await githubHelper.getTree({
token: syncToken,
owner,
repo,
branch,
});
const workspacePath = store.getters['workspace/currentWorkspace'].path || '';
const syncDataByPath = store.getters['data/syncData'];
const syncDataByItemId = store.getters['data/syncDataByItemId'];
// Store all blobs sha
treeShaMap = Object.create(null);
// Store interesting paths
treeFolderMap = Object.create(null);
treeFileMap = Object.create(null);
treeDataMap = Object.create(null);
treeSyncLocationMap = Object.create(null);
treePublishLocationMap = Object.create(null);
// Store all blobs sha
treeShaMap = Object.create(null);
// Store interesting paths
treeFolderMap = Object.create(null);
treeFileMap = Object.create(null);
treeDataMap = Object.create(null);
treeSyncLocationMap = Object.create(null);
treePublishLocationMap = Object.create(null);
tree.filter(({ type, path }) => type === 'blob' && path.indexOf(workspacePath) === 0)
.forEach((blobEntry) => {
// Make path relative
const path = blobEntry.path.slice(workspacePath.length);
// Collect blob sha
treeShaMap[path] = blobEntry.sha;
// Collect parents path
let parentPath = '';
path.split('/').slice(0, -1).forEach((folderName) => {
const folderPath = `${parentPath}${folderName}/`;
treeFolderMap[folderPath] = parentPath;
parentPath = folderPath;
});
// Collect file path
if (path.indexOf('.stackedit-data/') === 0) {
treeDataMap[path] = true;
} else if (endsWith(path, '.md')) {
treeFileMap[path] = parentPath;
} else if (endsWith(path, '.sync')) {
treeSyncLocationMap[path] = true;
} else if (endsWith(path, '.publish')) {
treePublishLocationMap[path] = true;
}
});
// Collect changes
const changes = [];
const pathIds = {};
const syncDataToIgnore = Object.create(null);
const getId = (path) => {
const syncData = syncDataByPath[path];
const id = syncData ? syncData.itemId : utils.uid();
pathIds[path] = id;
return id;
};
// Folder creations/updates
// Assume map entries are sorted from top to bottom
Object.entries(treeFolderMap).forEach(([path, parentPath]) => {
const id = getId(path);
const item = utils.addItemHash({
id,
type: 'folder',
name: path.slice(parentPath.length, -1),
parentId: pathIds[parentPath] || null,
});
changes.push({
syncDataId: path,
item,
syncData: {
id: path,
itemId: id,
type: item.type,
hash: item.hash,
},
});
tree.filter(({ type, path }) => type === 'blob' && path.indexOf(workspacePath) === 0)
.forEach((blobEntry) => {
// Make path relative
const path = blobEntry.path.slice(workspacePath.length);
// Collect blob sha
treeShaMap[path] = blobEntry.sha;
// Collect parents path
let parentPath = '';
path.split('/').slice(0, -1).forEach((folderName) => {
const folderPath = `${parentPath}${folderName}/`;
treeFolderMap[folderPath] = parentPath;
parentPath = folderPath;
});
// File creations/updates
Object.entries(treeFileMap).forEach(([path, parentPath]) => {
const id = getId(path);
const item = utils.addItemHash({
id,
type: 'file',
name: path.slice(parentPath.length, -'.md'.length),
parentId: pathIds[parentPath] || null,
});
changes.push({
syncDataId: path,
item,
syncData: {
id: path,
itemId: id,
type: item.type,
hash: item.hash,
},
});
// Content creations/updates
const contentSyncData = syncDataByItemId[`${id}/content`];
if (contentSyncData) {
syncDataToIgnore[contentSyncData.id] = true;
}
if (!contentSyncData || contentSyncData.sha !== treeShaMap[path]) {
// Use `/` as a prefix to get a unique syncData id
changes.push({
syncDataId: `/${path}`,
item: {
id: `${id}/content`,
type: 'content',
// Need a truthy value to force saving sync data
hash: 1,
},
syncData: {
id: `/${path}`,
itemId: `${id}/content`,
type: 'content',
// Need a truthy value to force downloading the content
hash: 1,
},
});
}
});
// Data creations/updates
Object.keys(treeDataMap).forEach((path) => {
try {
const [, id] = path.match(/^\.stackedit-data\/([\s\S]+)\.json$/);
pathIds[path] = id;
const syncData = syncDataByItemId[id];
if (syncData) {
syncDataToIgnore[syncData.id] = true;
}
if (!syncData || syncData.sha !== treeShaMap[path]) {
changes.push({
syncDataId: path,
item: {
id,
type: 'data',
// Need a truthy value to force saving sync data
hash: 1,
},
syncData: {
id: path,
itemId: id,
type: 'data',
// Need a truthy value to force downloading the content
hash: 1,
},
});
}
} catch (e) {
// Ignore parsing errors
}
});
// Location creations/updates
[{
type: 'syncLocation',
map: treeSyncLocationMap,
pathMatcher: /^([\s\S]+)\.([\w-]+)\.sync$/,
}, {
type: 'publishLocation',
map: treePublishLocationMap,
pathMatcher: /^([\s\S]+)\.([\w-]+)\.publish$/,
}]
.forEach(({ type, map, pathMatcher }) => Object.keys(map).forEach((path) => {
try {
const [, filePath, data] = path.match(pathMatcher);
// If there is a corresponding md file in the tree
const fileId = pathIds[`${filePath}.md`];
if (fileId) {
const id = getId(path);
const item = utils.addItemHash({
...JSON.parse(utils.decodeBase64(data)),
id,
type,
fileId,
});
changes.push({
syncDataId: path,
item,
syncData: {
id: path,
itemId: id,
type: item.type,
hash: item.hash,
},
});
}
} catch (e) {
// Ignore parsing errors
}
}));
// Deletions
Object.keys(syncDataByPath).forEach((path) => {
if (!pathIds[path] && !syncDataToIgnore[path]) {
changes.push({ syncDataId: path });
}
});
return changes;
// Collect file path
if (path.indexOf('.stackedit-data/') === 0) {
treeDataMap[path] = true;
} else if (endsWith(path, '.md')) {
treeFileMap[path] = parentPath;
} else if (endsWith(path, '.sync')) {
treeSyncLocationMap[path] = true;
} else if (endsWith(path, '.publish')) {
treePublishLocationMap[path] = true;
}
});
},
saveSimpleItem(item) {
const path = store.getters.itemPaths[item.fileId || item.id];
return Promise.resolve()
.then(() => {
const syncToken = store.getters['workspace/syncToken'];
const { owner, repo, branch } = getWorkspaceWithOwner();
const syncData = {
itemId: item.id,
// Collect changes
const changes = [];
const pathIds = {};
const syncDataToIgnore = Object.create(null);
const getId = (path) => {
const syncData = syncDataByPath[path];
const id = syncData ? syncData.itemId : utils.uid();
pathIds[path] = id;
return id;
};
// Folder creations/updates
// Assume map entries are sorted from top to bottom
Object.entries(treeFolderMap).forEach(([path, parentPath]) => {
const id = getId(path);
const item = utils.addItemHash({
id,
type: 'folder',
name: path.slice(parentPath.length, -1),
parentId: pathIds[parentPath] || null,
});
changes.push({
syncDataId: path,
item,
syncData: {
id: path,
itemId: id,
type: item.type,
hash: item.hash,
};
},
});
});
if (item.type === 'file') {
syncData.id = `${path}.md`;
} else if (item.type === 'folder') {
syncData.id = path;
}
if (syncData.id) {
return syncData;
}
// File creations/updates
Object.entries(treeFileMap).forEach(([path, parentPath]) => {
const id = getId(path);
const item = utils.addItemHash({
id,
type: 'file',
name: path.slice(parentPath.length, -'.md'.length),
parentId: pathIds[parentPath] || null,
});
changes.push({
syncDataId: path,
item,
syncData: {
id: path,
itemId: id,
type: item.type,
hash: item.hash,
},
});
// locations are stored as paths, so we upload an empty file
const data = utils.encodeBase64(utils.serializeObject({
...item,
id: undefined,
type: undefined,
fileId: undefined,
}), true);
const extension = item.type === 'syncLocation' ? 'sync' : 'publish';
syncData.id = `${path}.${data}.${extension}`;
return githubHelper.uploadFile(
syncToken,
owner,
repo,
branch,
getAbsolutePath(syncData),
'',
treeShaMap[syncData.id],
).then(() => syncData);
});
},
removeItem(syncData) {
// Ignore content deletion
if (syncData.type === 'content') {
return Promise.resolve();
}
const syncToken = store.getters['workspace/syncToken'];
const { owner, repo, branch } = getWorkspaceWithOwner();
return githubHelper.removeFile(
syncToken,
owner,
repo,
branch,
getAbsolutePath(syncData),
treeShaMap[syncData.id],
);
},
downloadContent(token, syncLocation) {
const syncData = store.getters['data/syncDataByItemId'][syncLocation.fileId];
const contentSyncData = store.getters['data/syncDataByItemId'][`${syncLocation.fileId}/content`];
if (!syncData || !contentSyncData) {
return Promise.resolve();
}
const { owner, repo, branch } = getWorkspaceWithOwner();
return githubHelper.downloadFile(token, owner, repo, branch, getAbsolutePath(syncData))
.then(({ sha, content }) => {
const item = Provider.parseContent(content, `${syncLocation.fileId}/content`);
if (item.hash !== contentSyncData.hash) {
store.dispatch('data/patchSyncData', {
[contentSyncData.id]: {
...contentSyncData,
hash: item.hash,
sha,
},
});
}
return item;
});
},
downloadData(dataId) {
const syncData = store.getters['data/syncDataByItemId'][dataId];
if (!syncData) {
return Promise.resolve();
}
const syncToken = store.getters['workspace/syncToken'];
const { owner, repo, branch } = getWorkspaceWithOwner();
return githubHelper.downloadFile(syncToken, owner, repo, branch, getAbsolutePath(syncData))
.then(({ sha, content }) => {
const item = JSON.parse(content);
if (item.hash !== syncData.hash) {
store.dispatch('data/patchSyncData', {
[syncData.id]: {
...syncData,
hash: item.hash,
sha,
},
});
}
return item;
});
},
uploadContent(token, content, syncLocation) {
const contentSyncData = store.getters['data/syncDataByItemId'][`${syncLocation.fileId}/content`];
if (contentSyncData && contentSyncData.hash === content.hash) {
return Promise.resolve(syncLocation);
}
const syncData = store.getters['data/syncDataByItemId'][syncLocation.fileId];
const { owner, repo, branch } = getWorkspaceWithOwner();
return githubHelper.uploadFile(
token,
owner,
repo,
branch,
getAbsolutePath(syncData),
Provider.serializeContent(content),
treeShaMap[syncData.id],
)
.then((res) => {
const id = `/${syncData.id}`;
store.dispatch('data/patchSyncData', {
[id]: {
// Build sync data
id,
itemId: content.id,
type: content.type,
hash: content.hash,
sha: res.content.sha,
// Content creations/updates
const contentSyncData = syncDataByItemId[`${id}/content`];
if (contentSyncData) {
syncDataToIgnore[contentSyncData.id] = true;
}
if (!contentSyncData || contentSyncData.sha !== treeShaMap[path]) {
// Use `/` as a prefix to get a unique syncData id
changes.push({
syncDataId: `/${path}`,
item: {
id: `${id}/content`,
type: 'content',
// Need a truthy value to force saving sync data
hash: 1,
},
syncData: {
id: `/${path}`,
itemId: `${id}/content`,
type: 'content',
// Need a truthy value to force downloading the content
hash: 1,
},
});
return syncLocation;
});
}
});
// Data creations/updates
Object.keys(treeDataMap).forEach((path) => {
try {
const [, id] = path.match(/^\.stackedit-data\/([\s\S]+)\.json$/);
pathIds[path] = id;
const syncData = syncDataByItemId[id];
if (syncData) {
syncDataToIgnore[syncData.id] = true;
}
if (!syncData || syncData.sha !== treeShaMap[path]) {
changes.push({
syncDataId: path,
item: {
id,
type: 'data',
// Need a truthy value to force saving sync data
hash: 1,
},
syncData: {
id: path,
itemId: id,
type: 'data',
// Need a truthy value to force downloading the content
hash: 1,
},
});
}
} catch (e) {
// Ignore parsing errors
}
});
// Location creations/updates
[{
type: 'syncLocation',
map: treeSyncLocationMap,
pathMatcher: /^([\s\S]+)\.([\w-]+)\.sync$/,
}, {
type: 'publishLocation',
map: treePublishLocationMap,
pathMatcher: /^([\s\S]+)\.([\w-]+)\.publish$/,
}]
.forEach(({ type, map, pathMatcher }) => Object.keys(map).forEach((path) => {
try {
const [, filePath, data] = path.match(pathMatcher);
// If there is a corresponding md file in the tree
const fileId = pathIds[`${filePath}.md`];
if (fileId) {
const id = getId(path);
const item = utils.addItemHash({
...JSON.parse(utils.decodeBase64(data)),
id,
type,
fileId,
});
changes.push({
syncDataId: path,
item,
syncData: {
id: path,
itemId: id,
type: item.type,
hash: item.hash,
},
});
}
} catch (e) {
// Ignore parsing errors
}
}));
// Deletions
Object.keys(syncDataByPath).forEach((path) => {
if (!pathIds[path] && !syncDataToIgnore[path]) {
changes.push({ syncDataId: path });
}
});
return changes;
},
uploadData(item) {
const oldSyncData = store.getters['data/syncDataByItemId'][item.id];
if (oldSyncData && oldSyncData.hash === item.hash) {
return Promise.resolve();
}
async saveSimpleItem(item) {
const path = store.getters.itemPaths[item.fileId || item.id];
const syncToken = store.getters['workspace/syncToken'];
const syncData = {
id: `.stackedit-data/${item.id}.json`,
itemId: item.id,
type: item.type,
hash: item.hash,
};
if (item.type === 'file') {
syncData.id = `${path}.md`;
return syncData;
}
if (item.type === 'folder') {
syncData.id = path;
return syncData;
}
// locations are stored as paths, so we upload an empty file
const data = utils.encodeBase64(utils.serializeObject({
...item,
id: undefined,
type: undefined,
fileId: undefined,
}), true);
const extension = item.type === 'syncLocation' ? 'sync' : 'publish';
syncData.id = `${path}.${data}.${extension}`;
await githubHelper.uploadFile({
...getWorkspaceWithOwner(),
token: syncToken,
path: getAbsolutePath(syncData),
content: '',
sha: treeShaMap[syncData.id],
});
return syncData;
},
async removeItem(syncData) {
// Ignore content deletion
if (syncData.type !== 'content') {
const syncToken = store.getters['workspace/syncToken'];
await githubHelper.removeFile({
...getWorkspaceWithOwner(),
token: syncToken,
path: getAbsolutePath(syncData),
sha: treeShaMap[syncData.id],
});
}
},
async downloadContent(token, syncLocation) {
const syncData = store.getters['data/syncDataByItemId'][syncLocation.fileId];
const contentSyncData = store.getters['data/syncDataByItemId'][`${syncLocation.fileId}/content`];
if (!syncData || !contentSyncData) {
return null;
}
const { sha, content } = await githubHelper.downloadFile({
...getWorkspaceWithOwner(),
token,
path: getAbsolutePath(syncData),
});
const item = Provider.parseContent(content, `${syncLocation.fileId}/content`);
if (item.hash !== contentSyncData.hash) {
store.dispatch('data/patchSyncData', {
[contentSyncData.id]: {
...contentSyncData,
hash: item.hash,
sha,
},
});
}
return item;
},
async downloadData(dataId) {
const syncData = store.getters['data/syncDataByItemId'][dataId];
if (!syncData) {
return null;
}
const syncToken = store.getters['workspace/syncToken'];
const { owner, repo, branch } = getWorkspaceWithOwner();
return githubHelper.uploadFile(
syncToken,
owner,
repo,
branch,
getAbsolutePath(syncData),
JSON.stringify(item),
oldSyncData && oldSyncData.sha,
)
.then(res => store.dispatch('data/patchSyncData', {
const { sha, content } = await githubHelper.downloadFile({
...getWorkspaceWithOwner(),
token: syncToken,
path: getAbsolutePath(syncData),
});
const item = JSON.parse(content);
if (item.hash !== syncData.hash) {
store.dispatch('data/patchSyncData', {
[syncData.id]: {
...syncData,
hash: item.hash,
sha,
},
});
}
return item;
},
async uploadContent(token, content, syncLocation) {
const contentSyncData = store.getters['data/syncDataByItemId'][`${syncLocation.fileId}/content`];
if (!contentSyncData || contentSyncData.hash !== content.hash) {
const path = `${store.getters.itemPaths[syncLocation.fileId]}.md`;
const absolutePath = `${store.getters['workspace/currentWorkspace'].path || ''}${path}`;
const id = `/${path}`;
const res = await githubHelper.uploadFile({
...getWorkspaceWithOwner(),
token,
path: absolutePath,
content: Provider.serializeContent(content),
sha: treeShaMap[id],
});
store.dispatch('data/patchSyncData', {
[id]: {
// Build sync data
id,
itemId: content.id,
type: content.type,
hash: content.hash,
sha: res.content.sha,
},
});
}
return syncLocation;
},
async uploadData(item) {
const oldSyncData = store.getters['data/syncDataByItemId'][item.id];
if (!oldSyncData || oldSyncData.hash !== item.hash) {
const syncData = {
id: `.stackedit-data/${item.id}.json`,
itemId: item.id,
type: item.type,
hash: item.hash,
};
const syncToken = store.getters['workspace/syncToken'];
const res = await githubHelper.uploadFile({
...getWorkspaceWithOwner(),
token: syncToken,
path: getAbsolutePath(syncData),
content: JSON.stringify(item),
sha: oldSyncData && oldSyncData.sha,
});
store.dispatch('data/patchSyncData', {
[syncData.id]: {
...syncData,
sha: res.content.sha,
},
}));
});
}
},
onSyncEnd() {
// Clean up
@ -468,34 +442,48 @@ export default new Provider({
treeSyncLocationMap = null;
treePublishLocationMap = null;
},
listRevisions(token, fileId) {
async listRevisions(token, fileId) {
const { owner, repo, branch } = getWorkspaceWithOwner();
return getSyncData(fileId)
.then(syncData => githubHelper.getCommits(token, owner, repo, branch, syncData.id))
.then(entries => entries.map((entry) => {
let user;
if (entry.author && entry.author.login) {
user = entry.author;
} else if (entry.committer && entry.committer.login) {
user = entry.committer;
}
const sub = `gh:${user.id}`;
userSvc.addInfo({ id: sub, name: user.login, imageUrl: user.avatar_url });
const date = (entry.commit.author && entry.commit.author.date)
|| (entry.commit.committer && entry.commit.committer.date);
return {
id: entry.sha,
sub,
created: date ? new Date(date).getTime() : 1,
};
})
.sort((revision1, revision2) => revision2.created - revision1.created));
const syncData = Provider.getContentSyncData(fileId);
const entries = await githubHelper.getCommits({
token,
owner,
repo,
sha: branch,
path: syncData.id,
});
return entries.map(({
author,
committer,
commit,
sha,
}) => {
let user;
if (author && author.login) {
user = author;
} else if (committer && committer.login) {
user = committer;
}
const sub = `gh:${user.id}`;
userSvc.addInfo({ id: sub, name: user.login, imageUrl: user.avatar_url });
const date = (commit.author && commit.author.date)
|| (commit.committer && commit.committer.date);
return {
id: sha,
sub,
created: date ? new Date(date).getTime() : 1,
};
})
.sort((revision1, revision2) => revision2.created - revision1.created);
},
getRevisionContent(token, fileId, revisionId) {
const { owner, repo } = getWorkspaceWithOwner();
return getSyncData(fileId)
.then(syncData => githubHelper
.downloadFile(token, owner, repo, revisionId, getAbsolutePath(syncData)))
.then(({ content }) => Provider.parseContent(content, `${fileId}/content`));
async getRevisionContent(token, fileId, revisionId) {
const syncData = Provider.getContentSyncData(fileId);
const { content } = await githubHelper.downloadFile({
...getWorkspaceWithOwner(),
token,
branch: revisionId,
path: getAbsolutePath(syncData),
});
return Provider.parseContent(content, `${fileId}/content`);
},
});

View File

@ -10,65 +10,59 @@ export default new Provider({
getToken() {
return store.getters['workspace/syncToken'];
},
initWorkspace() {
async initWorkspace() {
// Nothing much to do since the main workspace isn't necessarily synchronized
return Promise.resolve()
.then(() => {
// Remove the URL hash
utils.setQueryParams();
// Return the main workspace
return store.getters['data/workspaces'].main;
});
// Remove the URL hash
utils.setQueryParams();
// Return the main workspace
return store.getters['data/workspaces'].main;
},
getChanges() {
async getChanges() {
const syncToken = store.getters['workspace/syncToken'];
const startPageToken = store.getters['data/localSettings'].syncStartPageToken;
return googleHelper.getChanges(syncToken, startPageToken, true)
.then((result) => {
const changes = result.changes.filter((change) => {
if (change.file) {
// Parse item from file name
try {
change.item = JSON.parse(change.file.name);
} catch (e) {
return false;
}
// Build sync data
change.syncData = {
id: change.fileId,
itemId: change.item.id,
type: change.item.type,
hash: change.item.hash,
};
}
change.syncDataId = change.fileId;
return true;
});
syncStartPageToken = result.startPageToken;
return changes;
});
const result = await googleHelper.getChanges(syncToken, startPageToken, true);
const changes = result.changes.filter((change) => {
if (change.file) {
// Parse item from file name
try {
change.item = JSON.parse(change.file.name);
} catch (e) {
return false;
}
// Build sync data
change.syncData = {
id: change.fileId,
itemId: change.item.id,
type: change.item.type,
hash: change.item.hash,
};
}
change.syncDataId = change.fileId;
return true;
});
syncStartPageToken = result.startPageToken;
return changes;
},
onChangesApplied() {
store.dispatch('data/patchLocalSettings', {
syncStartPageToken,
});
},
saveSimpleItem(item, syncData, ifNotTooLate) {
async saveSimpleItem(item, syncData, ifNotTooLate) {
const syncToken = store.getters['workspace/syncToken'];
return googleHelper.uploadAppDataFile(
syncToken,
JSON.stringify(item),
undefined,
syncData && syncData.id,
const file = await googleHelper.uploadAppDataFile({
token: syncToken,
name: JSON.stringify(item),
fileId: syncData && syncData.id,
ifNotTooLate,
)
.then(file => ({
// Build sync data
id: file.id,
itemId: item.id,
type: item.type,
hash: item.hash,
}));
});
// Build sync data
return {
id: file.id,
itemId: item.id,
type: item.type,
hash: item.hash,
};
},
removeItem(syncData, ifNotTooLate) {
const syncToken = store.getters['workspace/syncToken'];
@ -77,48 +71,44 @@ export default new Provider({
downloadContent(token, syncLocation) {
return this.downloadData(`${syncLocation.fileId}/content`);
},
downloadData(dataId) {
async downloadData(dataId) {
const syncData = store.getters['data/syncDataByItemId'][dataId];
if (!syncData) {
return Promise.resolve();
return null;
}
const syncToken = store.getters['workspace/syncToken'];
return googleHelper.downloadAppDataFile(syncToken, syncData.id)
.then((data) => {
const item = utils.addItemHash(JSON.parse(data));
if (item.hash !== syncData.hash) {
store.dispatch('data/patchSyncData', {
[syncData.id]: {
...syncData,
hash: item.hash,
},
});
}
return item;
const data = await googleHelper.downloadAppDataFile(syncToken, syncData.id);
const item = utils.addItemHash(JSON.parse(data));
if (item.hash !== syncData.hash) {
store.dispatch('data/patchSyncData', {
[syncData.id]: {
...syncData,
hash: item.hash,
},
});
},
uploadContent(token, content, syncLocation, ifNotTooLate) {
return this.uploadData(content, ifNotTooLate)
.then(() => syncLocation);
},
uploadData(item, ifNotTooLate) {
const syncData = store.getters['data/syncDataByItemId'][item.id];
if (syncData && syncData.hash === item.hash) {
return Promise.resolve();
}
const syncToken = store.getters['workspace/syncToken'];
return googleHelper.uploadAppDataFile(
syncToken,
JSON.stringify({
id: item.id,
type: item.type,
hash: item.hash,
}),
JSON.stringify(item),
syncData && syncData.id,
ifNotTooLate,
)
.then(file => store.dispatch('data/patchSyncData', {
return item;
},
async uploadContent(token, content, syncLocation, ifNotTooLate) {
await this.uploadData(content, ifNotTooLate);
return syncLocation;
},
async uploadData(item, ifNotTooLate) {
const syncData = store.getters['data/syncDataByItemId'][item.id];
if (!syncData || syncData.hash !== item.hash) {
const syncToken = store.getters['workspace/syncToken'];
const file = await googleHelper.uploadAppDataFile({
token: syncToken,
name: JSON.stringify({
id: item.id,
type: item.type,
hash: item.hash,
}),
media: JSON.stringify(item),
fileId: syncData && syncData.id,
ifNotTooLate,
});
store.dispatch('data/patchSyncData', {
[file.id]: {
// Build sync data
id: file.id,
@ -126,27 +116,22 @@ export default new Provider({
type: item.type,
hash: item.hash,
},
}));
},
listRevisions(token, fileId) {
const syncData = store.getters['data/syncDataByItemId'][`${fileId}/content`];
if (!syncData) {
return Promise.reject(); // No need for a proper error message.
});
}
return googleHelper.getAppDataFileRevisions(token, syncData.id)
.then(revisions => revisions.map(revision => ({
id: revision.id,
sub: revision.lastModifyingUser && `go:${revision.lastModifyingUser.permissionId}`,
created: new Date(revision.modifiedTime).getTime(),
}))
.sort((revision1, revision2) => revision2.created - revision1.created));
},
getRevisionContent(token, fileId, revisionId) {
const syncData = store.getters['data/syncDataByItemId'][`${fileId}/content`];
if (!syncData) {
return Promise.reject(); // No need for a proper error message.
}
return googleHelper.downloadAppDataFileRevision(token, syncData.id, revisionId)
.then(content => JSON.parse(content));
async listRevisions(token, fileId) {
const syncData = Provider.getContentSyncData(fileId);
const revisions = await googleHelper.getAppDataFileRevisions(token, syncData.id);
return revisions.map(revision => ({
id: revision.id,
sub: revision.lastModifyingUser && `go:${revision.lastModifyingUser.permissionId}`,
created: new Date(revision.modifiedTime).getTime(),
}))
.sort((revision1, revision2) => revision2.created - revision1.created);
},
async getRevisionContent(token, fileId, revisionId) {
const syncData = Provider.getContentSyncData(fileId);
const content = await googleHelper.downloadAppDataFileRevision(token, syncData.id, revisionId);
return JSON.parse(content);
},
});

View File

@ -17,184 +17,167 @@ export default new Provider({
const token = this.getToken(location);
return `${location.driveFileId}${token.name}`;
},
initAction() {
async initAction() {
const state = googleHelper.driveState || {};
return state.userId && Promise.resolve()
.then(() => {
// Try to find the token corresponding to the user ID
const token = store.getters['data/googleTokens'][state.userId];
// If not found or not enough permission, popup an OAuth2 window
return token && token.isDrive ? token : store.dispatch('modal/open', {
type: 'googleDriveAccount',
onResolve: () => googleHelper.addDriveAccount(
!store.getters['data/localSettings'].googleDriveRestrictedAccess,
state.userId,
),
});
})
.then((token) => {
const openWorkspaceIfExists = (file) => {
const folderId = file
&& file.appProperties
&& file.appProperties.folderId;
if (folderId) {
// See if we have the corresponding workspace
const workspaceParams = {
providerId: 'googleDriveWorkspace',
folderId,
};
const workspaceId = utils.makeWorkspaceId(workspaceParams);
const workspace = store.getters['data/sanitizedWorkspaces'][workspaceId];
// If we have the workspace, open it by changing the current URL
if (workspace) {
utils.setQueryParams(workspaceParams);
if (state.userId) {
// Try to find the token corresponding to the user ID
let token = store.getters['data/googleTokens'][state.userId];
// If not found or not enough permission, popup an OAuth2 window
if (!token || !token.isDrive) {
await store.dispatch('modal/open', { type: 'googleDriveAccount' });
token = await googleHelper.addDriveAccount(
!store.getters['data/localSettings'].googleDriveRestrictedAccess,
state.userId,
);
}
const openWorkspaceIfExists = (file) => {
const folderId = file
&& file.appProperties
&& file.appProperties.folderId;
if (folderId) {
// See if we have the corresponding workspace
const workspaceParams = {
providerId: 'googleDriveWorkspace',
folderId,
};
const workspaceId = utils.makeWorkspaceId(workspaceParams);
const workspace = store.getters['data/sanitizedWorkspaces'][workspaceId];
// If we have the workspace, open it by changing the current URL
if (workspace) {
utils.setQueryParams(workspaceParams);
}
}
};
switch (state.action) {
case 'create':
default:
// See if folder is part of a workspace we can open
try {
const folder = await googleHelper.getFile(token, state.folderId);
folder.appProperties = folder.appProperties || {};
googleHelper.driveActionFolder = folder;
openWorkspaceIfExists(folder);
} catch (err) {
if (!err || err.status !== 404) {
throw err;
}
// We received an HTTP 404 meaning we have no permission to read the folder
googleHelper.driveActionFolder = { id: state.folderId };
}
};
break;
switch (state.action) {
case 'create':
default:
// See if folder is part of a workspace we can open
return googleHelper.getFile(token, state.folderId)
.then((folder) => {
folder.appProperties = folder.appProperties || {};
googleHelper.driveActionFolder = folder;
openWorkspaceIfExists(folder);
}, (err) => {
if (!err || err.status !== 404) {
throw err;
}
// We received an HTTP 404 meaning we have no permission to read the folder
googleHelper.driveActionFolder = { id: state.folderId };
});
case 'open': {
await utils.awaitSequence(state.ids || [], async (id) => {
const file = await googleHelper.getFile(token, id);
file.appProperties = file.appProperties || {};
googleHelper.driveActionFiles.push(file);
});
case 'open': {
const getOneFile = (ids = state.ids || []) => {
const id = ids.shift();
return id && googleHelper.getFile(token, id)
.then((file) => {
file.appProperties = file.appProperties || {};
googleHelper.driveActionFiles.push(file);
return getOneFile(ids);
});
};
return getOneFile()
// Check if first file is part of a workspace
.then(() => openWorkspaceIfExists(googleHelper.driveActionFiles[0]));
}
// Check if first file is part of a workspace
openWorkspaceIfExists(googleHelper.driveActionFiles[0]);
}
});
}
}
},
performAction() {
return Promise.resolve()
.then(() => {
const state = googleHelper.driveState || {};
const token = store.getters['data/googleTokens'][state.userId];
switch (token && state.action) {
case 'create':
return fileSvc.createFile({}, true)
.then((file) => {
store.commit('file/setCurrentId', file.id);
// Return a new syncLocation
return this.makeLocation(token, null, googleHelper.driveActionFolder.id);
});
case 'open':
return store.dispatch(
'queue/enqueue',
() => this.openFiles(token, googleHelper.driveActionFiles),
);
default:
return null;
}
});
async performAction() {
const state = googleHelper.driveState || {};
const token = store.getters['data/googleTokens'][state.userId];
switch (token && state.action) {
case 'create': {
const file = await fileSvc.createFile({}, true);
store.commit('file/setCurrentId', file.id);
// Return a new syncLocation
return this.makeLocation(token, null, googleHelper.driveActionFolder.id);
}
case 'open':
store.dispatch(
'queue/enqueue',
() => this.openFiles(token, googleHelper.driveActionFiles),
);
return null;
default:
return null;
}
},
downloadContent(token, syncLocation) {
return googleHelper.downloadFile(token, syncLocation.driveFileId)
.then(content => Provider.parseContent(content, `${syncLocation.fileId}/content`));
async downloadContent(token, syncLocation) {
const content = await googleHelper.downloadFile(token, syncLocation.driveFileId);
return Provider.parseContent(content, `${syncLocation.fileId}/content`);
},
uploadContent(token, content, syncLocation, ifNotTooLate) {
async uploadContent(token, content, syncLocation, ifNotTooLate) {
const file = store.state.file.itemMap[syncLocation.fileId];
const name = utils.sanitizeName(file && file.name);
const parents = [];
if (syncLocation.driveParentId) {
parents.push(syncLocation.driveParentId);
}
return googleHelper.uploadFile(
const driveFile = await googleHelper.uploadFile({
token,
name,
parents,
undefined,
Provider.serializeContent(content),
undefined,
syncLocation.driveFileId,
undefined,
media: Provider.serializeContent(content),
fileId: syncLocation.driveFileId,
ifNotTooLate,
)
.then(driveFile => ({
...syncLocation,
driveFileId: driveFile.id,
}));
});
return {
...syncLocation,
driveFileId: driveFile.id,
};
},
publish(token, html, metadata, publishLocation) {
return googleHelper.uploadFile(
async publish(token, html, metadata, publishLocation) {
const driveFile = await googleHelper.uploadFile({
token,
metadata.title,
[],
undefined,
html,
publishLocation.templateId ? 'text/html' : undefined,
publishLocation.driveFileId,
)
.then(driveFile => ({
...publishLocation,
driveFileId: driveFile.id,
}));
name: metadata.title,
parents: [],
media: html,
mediaType: publishLocation.templateId ? 'text/html' : undefined,
fileId: publishLocation.driveFileId,
});
return {
...publishLocation,
driveFileId: driveFile.id,
};
},
openFiles(token, driveFiles) {
const openOneFile = () => {
const driveFile = driveFiles.shift();
if (!driveFile) {
return null;
}
if (Provider.openFileWithLocation(store.getters['syncLocation/items'], {
async openFiles(token, driveFiles) {
return utils.awaitSequence(driveFiles, async (driveFile) => {
// Check if the file exists and open it
if (!Provider.openFileWithLocation(store.getters['syncLocation/items'], {
providerId: this.id,
driveFileId: driveFile.id,
})) {
// File exists and has just been opened. Next...
return openOneFile();
}
// Download content from Google Drive and create the file
const syncLocation = {
driveFileId: driveFile.id,
providerId: this.id,
sub: token.sub,
};
return this.downloadContent(token, syncLocation)
.then(content => fileSvc.createFile({
// Download content from Google Drive
const syncLocation = {
driveFileId: driveFile.id,
providerId: this.id,
sub: token.sub,
};
let content;
try {
content = await this.downloadContent(token, syncLocation);
} catch (e) {
store.dispatch('notification/error', `Could not open file ${driveFile.id}.`);
return;
}
// Create the file
const item = await fileSvc.createFile({
name: driveFile.name,
parentId: store.getters['file/current'].parentId,
text: content.text,
properties: content.properties,
discussions: content.discussions,
comments: content.comments,
}, true))
.then((item) => {
store.commit('file/setCurrentId', item.id);
store.commit('syncLocation/setItem', {
...syncLocation,
id: utils.uid(),
fileId: item.id,
});
store.dispatch('notification/info', `${store.getters['file/current'].name} was imported from Google Drive.`);
})
.catch(() => {
store.dispatch('notification/error', `Could not open file ${driveFile.id}.`);
})
.then(() => openOneFile());
};
return Promise.resolve(openOneFile());
}, true);
store.commit('file/setCurrentId', item.id);
store.commit('syncLocation/setItem', {
...syncLocation,
id: utils.uid(),
fileId: item.id,
});
store.dispatch('notification/info', `${store.getters['file/current'].name} was imported from Google Drive.`);
}
});
},
makeLocation(token, fileId, folderId) {
const location = {

View File

@ -4,13 +4,6 @@ import Provider from './common/Provider';
import utils from '../utils';
import fileSvc from '../fileSvc';
const getSyncData = (fileId) => {
const syncData = store.getters['data/syncDataByItemId'][`${fileId}/content`];
return syncData
? Promise.resolve(syncData)
: Promise.reject(); // No need for a proper error message.
};
let fileIdToOpen;
let syncStartPageToken;
@ -19,7 +12,7 @@ export default new Provider({
getToken() {
return store.getters['workspace/syncToken'];
},
initWorkspace() {
async initWorkspace() {
const makeWorkspaceParams = folderId => ({
providerId: this.id,
folderId,
@ -31,489 +24,437 @@ export default new Provider({
const getWorkspace = folderId =>
store.getters['data/sanitizedWorkspaces'][makeWorkspaceId(folderId)];
const initFolder = (token, folder) => Promise.resolve({
folderId: folder.id,
dataFolderId: folder.appProperties.dataFolderId,
trashFolderId: folder.appProperties.trashFolderId,
})
.then((properties) => {
// Make sure data folder exists
if (properties.dataFolderId) {
return properties;
}
return googleHelper.uploadFile(
token,
'.stackedit-data',
[folder.id],
{ folderId: folder.id },
undefined,
googleHelper.folderMimeType,
)
.then(dataFolder => ({
...properties,
dataFolderId: dataFolder.id,
}));
})
.then((properties) => {
// Make sure trash folder exists
if (properties.trashFolderId) {
return properties;
}
return googleHelper.uploadFile(
token,
'.stackedit-trash',
[folder.id],
{ folderId: folder.id },
undefined,
googleHelper.folderMimeType,
)
.then(trashFolder => ({
...properties,
trashFolderId: trashFolder.id,
}));
})
.then((properties) => {
// Update workspace if some properties are missing
if (properties.folderId === folder.appProperties.folderId
&& properties.dataFolderId === folder.appProperties.dataFolderId
&& properties.trashFolderId === folder.appProperties.trashFolderId
) {
return properties;
}
return googleHelper.uploadFile(
token,
undefined,
undefined,
properties,
undefined,
googleHelper.folderMimeType,
folder.id,
)
.then(() => properties);
})
.then((properties) => {
// Update workspace in the store
const workspaceId = makeWorkspaceId(folder.id);
store.dispatch('data/patchWorkspaces', {
[workspaceId]: {
id: workspaceId,
sub: token.sub,
name: folder.name,
providerId: this.id,
url: window.location.href,
folderId: folder.id,
teamDriveId: folder.teamDriveId,
dataFolderId: properties.dataFolderId,
trashFolderId: properties.trashFolderId,
},
});
const initFolder = async (token, folder) => {
const appProperties = {
folderId: folder.id,
dataFolderId: folder.appProperties.dataFolderId,
trashFolderId: folder.appProperties.trashFolderId,
};
// Return the workspace
return store.getters['data/sanitizedWorkspaces'][workspaceId];
});
return Promise.resolve()
.then(() => {
const workspace = getWorkspace(utils.queryParams.folderId);
// See if we already have a token
const googleTokens = store.getters['data/googleTokens'];
// Token sub is in the workspace or in the url if workspace is about to be created
const token = workspace ? googleTokens[workspace.sub] : googleTokens[utils.queryParams.sub];
if (token && token.isDrive && token.driveFullAccess) {
return token;
}
// If no token has been found, popup an authorize window and get one
return store.dispatch('modal/workspaceGoogleRedirection', {
onResolve: () => googleHelper.addDriveAccount(true, utils.queryParams.sub),
});
})
.then(token => Promise.resolve()
// If no folderId is provided, create one
.then(() => utils.queryParams.folderId || googleHelper.uploadFile(
// Make sure data folder exists
if (!appProperties.dataFolderId) {
appProperties.dataFolderId = (await googleHelper.uploadFile({
token,
'StackEdit workspace',
[],
undefined,
undefined,
googleHelper.folderMimeType,
)
.then(folder => initFolder(token, {
...folder,
appProperties: {},
})
.then(() => folder.id)))
// If workspace does not exist, initialize one
.then(folderId => getWorkspace(folderId) || googleHelper.getFile(token, folderId)
.then((folder) => {
folder.appProperties = folder.appProperties || {};
const folderIdProperty = folder.appProperties.folderId;
if (folderIdProperty && folderIdProperty !== folderId) {
throw new Error(`Folder ${folderId} is part of another workspace.`);
}
return initFolder(token, folder);
}, () => {
throw new Error(`Folder ${folderId} is not accessible. Make sure you have the right permissions.`);
}))
.then((workspace) => {
// Fix the URL hash
utils.setQueryParams(makeWorkspaceParams(workspace.folderId));
if (workspace.url !== window.location.href) {
store.dispatch('data/patchWorkspaces', {
[workspace.id]: {
...workspace,
url: window.location.href,
},
});
}
return store.getters['data/sanitizedWorkspaces'][workspace.id];
}));
},
performAction() {
return Promise.resolve()
.then(() => {
const state = googleHelper.driveState || {};
const token = this.getToken();
switch (token && state.action) {
case 'create':
return Promise.resolve()
.then(() => {
const driveFolder = googleHelper.driveActionFolder;
let syncData = store.getters['data/syncData'][driveFolder.id];
if (!syncData && driveFolder.appProperties.id) {
// Create folder if not already synced
store.commit('folder/setItem', {
id: driveFolder.appProperties.id,
name: driveFolder.name,
});
const item = store.state.folder.itemMap[driveFolder.appProperties.id];
syncData = {
id: driveFolder.id,
itemId: item.id,
type: item.type,
hash: item.hash,
};
store.dispatch('data/patchSyncData', {
[syncData.id]: syncData,
});
}
return fileSvc.createFile({
parentId: syncData && syncData.itemId,
}, true)
.then((file) => {
store.commit('file/setCurrentId', file.id);
// File will be created on next workspace sync
});
});
case 'open':
return Promise.resolve()
.then(() => {
// open first file only
const firstFile = googleHelper.driveActionFiles[0];
const syncData = store.getters['data/syncData'][firstFile.id];
if (!syncData) {
fileIdToOpen = firstFile.id;
} else {
store.commit('file/setCurrentId', syncData.itemId);
}
});
default:
return null;
}
name: '.stackedit-data',
parents: [folder.id],
appProperties: { folderId: folder.id },
mediaType: googleHelper.folderMimeType,
})).id;
}
// Make sure trash folder exists
if (!appProperties.trashFolderId) {
appProperties.trashFolderId = (await googleHelper.uploadFile({
token,
name: '.stackedit-trash',
parents: [folder.id],
appProperties: { folderId: folder.id },
mediaType: googleHelper.folderMimeType,
})).id;
}
// Update workspace if some properties are missing
if (appProperties.folderId !== folder.appProperties.folderId
|| appProperties.dataFolderId !== folder.appProperties.dataFolderId
|| appProperties.trashFolderId !== folder.appProperties.trashFolderId
) {
await googleHelper.uploadFile({
token,
appProperties,
mediaType: googleHelper.folderMimeType,
fileId: folder.id,
});
}
// Update workspace in the store
const workspaceId = makeWorkspaceId(folder.id);
store.dispatch('data/patchWorkspaces', {
[workspaceId]: {
id: workspaceId,
sub: token.sub,
name: folder.name,
providerId: this.id,
url: window.location.href,
folderId: folder.id,
teamDriveId: folder.teamDriveId,
dataFolderId: appProperties.dataFolderId,
trashFolderId: appProperties.trashFolderId,
},
});
};
// Token sub is in the workspace or in the url if workspace is about to be created
const { sub } = getWorkspace(utils.queryParams.folderId) || utils.queryParams;
// See if we already have a token
let token = store.getters['data/googleTokens'][sub];
// If no token has been found, popup an authorize window and get one
if (!token || !token.isDrive || !token.driveFullAccess) {
await store.dispatch('modal/workspaceGoogleRedirection');
token = await googleHelper.addDriveAccount(true, utils.queryParams.sub);
}
let { folderId } = utils.queryParams;
// If no folderId is provided, create one
if (!folderId) {
const folder = await googleHelper.uploadFile({
token,
name: 'StackEdit workspace',
parents: [],
mediaType: googleHelper.folderMimeType,
});
await initFolder(token, {
...folder,
appProperties: {},
});
folderId = folder.id;
}
// Init workspace
let workspace = getWorkspace(folderId);
if (!workspace) {
let folder;
try {
folder = googleHelper.getFile(token, folderId);
} catch (err) {
throw new Error(`Folder ${folderId} is not accessible. Make sure you have the right permissions.`);
}
folder.appProperties = folder.appProperties || {};
const folderIdProperty = folder.appProperties.folderId;
if (folderIdProperty && folderIdProperty !== folderId) {
throw new Error(`Folder ${folderId} is part of another workspace.`);
}
await initFolder(token, folder);
workspace = getWorkspace(folderId);
}
// Fix the URL hash
utils.setQueryParams(makeWorkspaceParams(workspace.folderId));
if (workspace.url !== window.location.href) {
store.dispatch('data/patchWorkspaces', {
[workspace.id]: {
...workspace,
url: window.location.href,
},
});
}
return store.getters['data/sanitizedWorkspaces'][workspace.id];
},
getChanges() {
async performAction() {
const state = googleHelper.driveState || {};
const token = this.getToken();
switch (token && state.action) {
case 'create': {
const driveFolder = googleHelper.driveActionFolder;
let syncData = store.getters['data/syncData'][driveFolder.id];
if (!syncData && driveFolder.appProperties.id) {
// Create folder if not already synced
store.commit('folder/setItem', {
id: driveFolder.appProperties.id,
name: driveFolder.name,
});
const item = store.state.folder.itemMap[driveFolder.appProperties.id];
syncData = {
id: driveFolder.id,
itemId: item.id,
type: item.type,
hash: item.hash,
};
store.dispatch('data/patchSyncData', {
[syncData.id]: syncData,
});
}
const file = await fileSvc.createFile({
parentId: syncData && syncData.itemId,
}, true);
store.commit('file/setCurrentId', file.id);
// File will be created on next workspace sync
break;
}
case 'open': {
// open first file only
const firstFile = googleHelper.driveActionFiles[0];
const syncData = store.getters['data/syncData'][firstFile.id];
if (!syncData) {
fileIdToOpen = firstFile.id;
} else {
store.commit('file/setCurrentId', syncData.itemId);
}
break;
}
default:
}
},
async getChanges() {
const workspace = store.getters['workspace/currentWorkspace'];
const syncToken = store.getters['workspace/syncToken'];
const startPageToken = store.getters['data/localSettings'].syncStartPageToken;
return googleHelper.getChanges(syncToken, startPageToken, false, workspace.teamDriveId)
.then((result) => {
// Collect possible parent IDs
const parentIds = {};
Object.entries(store.getters['data/syncDataByItemId']).forEach(([id, syncData]) => {
parentIds[syncData.id] = id;
});
result.changes.forEach((change) => {
const { id } = (change.file || {}).appProperties || {};
if (id) {
parentIds[change.fileId] = id;
}
});
const lastStartPageToken = store.getters['data/localSettings'].syncStartPageToken;
const { changes, startPageToken } = await googleHelper
.getChanges(syncToken, lastStartPageToken, false, workspace.teamDriveId);
// Collect changes
const changes = [];
result.changes.forEach((change) => {
// Ignore changes on StackEdit own folders
if (change.fileId === workspace.folderId
|| change.fileId === workspace.dataFolderId
|| change.fileId === workspace.trashFolderId
) {
// Collect possible parent IDs
const parentIds = {};
Object.entries(store.getters['data/syncDataByItemId']).forEach(([id, syncData]) => {
parentIds[syncData.id] = id;
});
changes.forEach((change) => {
const { id } = (change.file || {}).appProperties || {};
if (id) {
parentIds[change.fileId] = id;
}
});
// Collect changes
const result = [];
changes.forEach((change) => {
// Ignore changes on StackEdit own folders
if (change.fileId === workspace.folderId
|| change.fileId === workspace.dataFolderId
|| change.fileId === workspace.trashFolderId
) {
return;
}
let contentChange;
if (change.file) {
// Ignore changes in files that are not in the workspace
const { appProperties } = change.file;
if (!appProperties || appProperties.folderId !== workspace.folderId
) {
return;
}
// If change is on a data item
if (change.file.parents[0] === workspace.dataFolderId) {
// Data item has a JSON filename
try {
change.item = JSON.parse(change.file.name);
} catch (e) {
return;
}
} else {
// Change on a file or folder
const type = change.file.mimeType === googleHelper.folderMimeType
? 'folder'
: 'file';
const item = {
id: appProperties.id,
type,
name: change.file.name,
parentId: null,
};
let contentChange;
if (change.file) {
// Ignore changes in files that are not in the workspace
const { appProperties } = change.file;
if (!appProperties || appProperties.folderId !== workspace.folderId
) {
return;
}
// If change is on a data item
if (change.file.parents[0] === workspace.dataFolderId) {
// Data item has a JSON filename
try {
change.item = JSON.parse(change.file.name);
} catch (e) {
return;
}
} else {
// Change on a file or folder
const type = change.file.mimeType === googleHelper.folderMimeType
? 'folder'
: 'file';
const item = {
id: appProperties.id,
type,
name: change.file.name,
parentId: null,
};
// Fill parentId
if (change.file.parents.some(parentId => parentId === workspace.trashFolderId)) {
item.parentId = 'trash';
} else {
change.file.parents.some((parentId) => {
if (!parentIds[parentId]) {
return false;
}
item.parentId = parentIds[parentId];
return true;
});
}
change.item = utils.addItemHash(item);
if (type === 'file') {
// create a fake change as a file content change
contentChange = {
item: {
id: `${appProperties.id}/content`,
type: 'content',
// Need a truthy value to force saving sync data
hash: 1,
},
syncData: {
id: `${change.fileId}/content`,
itemId: `${appProperties.id}/content`,
type: 'content',
// Need a truthy value to force downloading the content
hash: 1,
},
syncDataId: `${change.fileId}/content`,
};
}
}
// Build sync data
change.syncData = {
id: change.fileId,
parentIds: change.file.parents,
itemId: change.item.id,
type: change.item.type,
hash: change.item.hash,
};
// Fill parentId
if (change.file.parents.some(parentId => parentId === workspace.trashFolderId)) {
item.parentId = 'trash';
} else {
// Item was removed
const syncData = store.getters['data/syncData'][change.fileId];
if (syncData && syncData.type === 'file') {
// create a fake change as a file content change
contentChange = {
syncDataId: `${change.fileId}/content`,
};
}
change.file.parents.some((parentId) => {
if (!parentIds[parentId]) {
return false;
}
item.parentId = parentIds[parentId];
return true;
});
}
change.item = utils.addItemHash(item);
// Push change
change.syncDataId = change.fileId;
changes.push(change);
if (contentChange) {
changes.push(contentChange);
if (type === 'file') {
// create a fake change as a file content change
contentChange = {
item: {
id: `${appProperties.id}/content`,
type: 'content',
// Need a truthy value to force saving sync data
hash: 1,
},
syncData: {
id: `${change.fileId}/content`,
itemId: `${appProperties.id}/content`,
type: 'content',
// Need a truthy value to force downloading the content
hash: 1,
},
syncDataId: `${change.fileId}/content`,
};
}
});
syncStartPageToken = result.startPageToken;
return changes;
});
}
// Build sync data
change.syncData = {
id: change.fileId,
parentIds: change.file.parents,
itemId: change.item.id,
type: change.item.type,
hash: change.item.hash,
};
} else {
// Item was removed
const syncData = store.getters['data/syncData'][change.fileId];
if (syncData && syncData.type === 'file') {
// create a fake change as a file content change
contentChange = {
syncDataId: `${change.fileId}/content`,
};
}
}
// Push change
change.syncDataId = change.fileId;
result.push(change);
if (contentChange) {
result.push(contentChange);
}
});
syncStartPageToken = startPageToken;
return result;
},
onChangesApplied() {
store.dispatch('data/patchLocalSettings', {
syncStartPageToken,
});
},
saveSimpleItem(item, syncData, ifNotTooLate) {
return Promise.resolve()
.then(() => {
const workspace = store.getters['workspace/currentWorkspace'];
const syncToken = store.getters['workspace/syncToken'];
if (item.type !== 'file' && item.type !== 'folder') {
return googleHelper.uploadFile(
syncToken,
JSON.stringify(item),
[workspace.dataFolderId],
{
folderId: workspace.folderId,
},
undefined,
undefined,
syncData && syncData.id,
syncData && syncData.parentIds,
ifNotTooLate,
);
}
// For type `file` or `folder`
const parentSyncData = store.getters['data/syncDataByItemId'][item.parentId];
let parentId;
if (item.parentId === 'trash') {
parentId = workspace.trashFolderId;
} else if (parentSyncData) {
parentId = parentSyncData.id;
} else {
parentId = workspace.folderId;
}
return googleHelper.uploadFile(
syncToken,
item.name,
[parentId],
{
id: item.id,
folderId: workspace.folderId,
},
undefined,
item.type === 'folder' ? googleHelper.folderMimeType : undefined,
syncData && syncData.id,
syncData && syncData.parentIds,
ifNotTooLate,
);
})
.then(file => ({
// Build sync data
id: file.id,
itemId: item.id,
type: item.type,
hash: item.hash,
}));
},
removeItem(syncData, ifNotTooLate) {
// Ignore content deletion
if (syncData.type === 'content') {
return Promise.resolve();
}
async saveSimpleItem(item, syncData, ifNotTooLate) {
const workspace = store.getters['workspace/currentWorkspace'];
const syncToken = store.getters['workspace/syncToken'];
return googleHelper.removeFile(syncToken, syncData.id, ifNotTooLate);
let file;
if (item.type !== 'file' && item.type !== 'folder') {
// For sync/publish locations, store item as filename
file = await googleHelper.uploadFile({
token: syncToken,
name: JSON.stringify(item),
parents: [workspace.dataFolderId],
appProperties: {
folderId: workspace.folderId,
},
fileId: syncData && syncData.id,
oldParents: syncData && syncData.parentIds,
ifNotTooLate,
});
} else {
// For type `file` or `folder`
const parentSyncData = store.getters['data/syncDataByItemId'][item.parentId];
let parentId;
if (item.parentId === 'trash') {
parentId = workspace.trashFolderId;
} else if (parentSyncData) {
parentId = parentSyncData.id;
} else {
parentId = workspace.folderId;
}
file = await googleHelper.uploadFile({
token: syncToken,
name: item.name,
parents: [parentId],
appProperties: {
id: item.id,
folderId: workspace.folderId,
},
mediaType: item.type === 'folder' ? googleHelper.folderMimeType : undefined,
fileId: syncData && syncData.id,
oldParents: syncData && syncData.parentIds,
ifNotTooLate,
});
}
// Build sync data
return {
id: file.id,
itemId: item.id,
type: item.type,
hash: item.hash,
};
},
downloadContent(token, syncLocation) {
async removeItem(syncData, ifNotTooLate) {
// Ignore content deletion
if (syncData.type !== 'content') {
const syncToken = store.getters['workspace/syncToken'];
await googleHelper.removeFile(syncToken, syncData.id, ifNotTooLate);
}
},
async downloadContent(token, syncLocation) {
const syncData = store.getters['data/syncDataByItemId'][syncLocation.fileId];
const contentSyncData = store.getters['data/syncDataByItemId'][`${syncLocation.fileId}/content`];
if (!syncData || !contentSyncData) {
return Promise.resolve();
return null;
}
return googleHelper.downloadFile(token, syncData.id)
.then((content) => {
const item = Provider.parseContent(content, `${syncLocation.fileId}/content`);
if (item.hash !== contentSyncData.hash) {
store.dispatch('data/patchSyncData', {
[contentSyncData.id]: {
...contentSyncData,
hash: item.hash,
},
});
}
// Open the file requested by action if it wasn't synced yet
if (fileIdToOpen && fileIdToOpen === syncData.id) {
fileIdToOpen = null;
// Open the file once downloaded content has been stored
setTimeout(() => {
store.commit('file/setCurrentId', syncData.itemId);
}, 10);
}
return item;
const content = await googleHelper.downloadFile(token, syncData.id);
const item = Provider.parseContent(content, `${syncLocation.fileId}/content`);
if (item.hash !== contentSyncData.hash) {
store.dispatch('data/patchSyncData', {
[contentSyncData.id]: {
...contentSyncData,
hash: item.hash,
},
});
}
// Open the file requested by action if it wasn't synced yet
if (fileIdToOpen && fileIdToOpen === syncData.id) {
fileIdToOpen = null;
// Open the file once downloaded content has been stored
setTimeout(() => {
store.commit('file/setCurrentId', syncData.itemId);
}, 10);
}
return item;
},
downloadData(dataId) {
async downloadData(dataId) {
const syncData = store.getters['data/syncDataByItemId'][dataId];
if (!syncData) {
return Promise.resolve();
return null;
}
const syncToken = store.getters['workspace/syncToken'];
return googleHelper.downloadFile(syncToken, syncData.id)
.then((content) => {
const item = JSON.parse(content);
if (item.hash !== syncData.hash) {
store.dispatch('data/patchSyncData', {
[syncData.id]: {
...syncData,
hash: item.hash,
},
});
}
return item;
const content = await googleHelper.downloadFile(syncToken, syncData.id);
const item = JSON.parse(content);
if (item.hash !== syncData.hash) {
store.dispatch('data/patchSyncData', {
[syncData.id]: {
...syncData,
hash: item.hash,
},
});
},
uploadContent(token, content, syncLocation, ifNotTooLate) {
const contentSyncData = store.getters['data/syncDataByItemId'][`${syncLocation.fileId}/content`];
if (contentSyncData && contentSyncData.hash === content.hash) {
return Promise.resolve(syncLocation);
}
return Promise.resolve()
.then(() => {
const syncData = store.getters['data/syncDataByItemId'][syncLocation.fileId];
if (syncData) {
// Only update file media
return googleHelper.uploadFile(
token,
undefined,
undefined,
undefined,
Provider.serializeContent(content),
undefined,
syncData.id,
undefined,
ifNotTooLate,
);
}
return item;
},
async uploadContent(token, content, syncLocation, ifNotTooLate) {
const contentSyncData = store.getters['data/syncDataByItemId'][`${syncLocation.fileId}/content`];
if (!contentSyncData || contentSyncData.hash !== content.hash) {
const syncData = store.getters['data/syncDataByItemId'][syncLocation.fileId];
let file;
if (syncData) {
// Only update file media
file = await googleHelper.uploadFile({
token,
media: Provider.serializeContent(content),
fileId: syncData.id,
ifNotTooLate,
});
} else {
// Create file with media
const workspace = store.getters['workspace/currentWorkspace'];
// Use deepCopy to freeze objects
const item = utils.deepCopy(store.state.file.itemMap[syncLocation.fileId]);
const parentSyncData = store.getters['data/syncDataByItemId'][item.parentId];
return googleHelper.uploadFile(
file = await googleHelper.uploadFile({
token,
item.name,
[parentSyncData ? parentSyncData.id : workspace.folderId],
{
name: item.name,
parents: [parentSyncData ? parentSyncData.id : workspace.folderId],
appProperties: {
id: item.id,
folderId: workspace.folderId,
},
Provider.serializeContent(content),
undefined,
undefined,
undefined,
media: Provider.serializeContent(content),
ifNotTooLate,
)
.then((file) => {
store.dispatch('data/patchSyncData', {
[file.id]: {
id: file.id,
itemId: item.id,
type: item.type,
hash: item.hash,
},
});
return file;
});
})
.then(file => store.dispatch('data/patchSyncData', {
});
store.dispatch('data/patchSyncData', {
[file.id]: {
id: file.id,
itemId: item.id,
type: item.type,
hash: item.hash,
},
});
}
store.dispatch('data/patchSyncData', {
[`${file.id}/content`]: {
// Build sync data
id: `${file.id}/content`,
@ -521,34 +462,32 @@ export default new Provider({
type: content.type,
hash: content.hash,
},
}))
.then(() => syncLocation);
},
uploadData(item, ifNotTooLate) {
const syncData = store.getters['data/syncDataByItemId'][item.id];
if (syncData && syncData.hash === item.hash) {
return Promise.resolve();
});
}
const workspace = store.getters['workspace/currentWorkspace'];
const syncToken = store.getters['workspace/syncToken'];
return googleHelper.uploadFile(
syncToken,
JSON.stringify({
id: item.id,
type: item.type,
hash: item.hash,
}),
[workspace.dataFolderId],
{
folderId: workspace.folderId,
},
JSON.stringify(item),
undefined,
syncData && syncData.id,
syncData && syncData.parentIds,
ifNotTooLate,
)
.then(file => store.dispatch('data/patchSyncData', {
return syncLocation;
},
async uploadData(item, ifNotTooLate) {
const syncData = store.getters['data/syncDataByItemId'][item.id];
if (!syncData || syncData.hash !== item.hash) {
const workspace = store.getters['workspace/currentWorkspace'];
const syncToken = store.getters['workspace/syncToken'];
const file = await googleHelper.uploadFile({
token: syncToken,
name: JSON.stringify({
id: item.id,
type: item.type,
hash: item.hash,
}),
parents: [workspace.dataFolderId],
appProperties: {
folderId: workspace.folderId,
},
media: JSON.stringify(item),
fileId: syncData && syncData.id,
oldParents: syncData && syncData.parentIds,
ifNotTooLate,
});
store.dispatch('data/patchSyncData', {
[file.id]: {
// Build sync data
id: file.id,
@ -556,21 +495,22 @@ export default new Provider({
type: item.type,
hash: item.hash,
},
}));
});
}
},
listRevisions(token, fileId) {
return getSyncData(fileId)
.then(syncData => googleHelper.getFileRevisions(token, syncData.id))
.then(revisions => revisions.map(revision => ({
id: revision.id,
sub: revision.lastModifyingUser && revision.lastModifyingUser.permissionId,
created: new Date(revision.modifiedTime).getTime(),
}))
.sort((revision1, revision2) => revision2.created - revision1.created));
async listRevisions(token, fileId) {
const syncData = Provider.getContentSyncData(fileId);
const revisions = await googleHelper.getFileRevisions(token, syncData.id);
return revisions.map(revision => ({
id: revision.id,
sub: revision.lastModifyingUser && revision.lastModifyingUser.permissionId,
created: new Date(revision.modifiedTime).getTime(),
}))
.sort((revision1, revision2) => revision2.created - revision1.created);
},
getRevisionContent(token, fileId, revisionId) {
return getSyncData(fileId)
.then(syncData => googleHelper.downloadFileRevision(token, syncData.id, revisionId))
.then(content => Provider.parseContent(content, `${fileId}/content`));
async getRevisionContent(token, fileId, revisionId) {
const syncData = Provider.getContentSyncData(fileId);
const content = await googleHelper.downloadFileRevision(token, syncData.id, revisionId);
return Provider.parseContent(content, `${fileId}/content`);
},
});

View File

@ -2,31 +2,37 @@ import networkSvc from '../../networkSvc';
import utils from '../../utils';
import store from '../../../store';
const request = (token, options = {}) => {
const request = async (token, options = {}) => {
const baseUrl = `${token.dbUrl}/`;
const getLastToken = () => store.getters['data/couchdbTokens'][token.sub];
const ifUnauthorized = cb => (err) => {
const assertUnauthorized = (err) => {
if (err.status !== 401) {
throw err;
}
return cb(err);
};
const onUnauthorized = () => networkSvc.request({
method: 'POST',
url: utils.resolveUrl(baseUrl, '../_session'),
withCredentials: true,
body: {
name: getLastToken().name,
password: getLastToken().password,
},
})
.catch(ifUnauthorized(() => store.dispatch('modal/open', {
type: 'couchdbCredentials',
token: getLastToken(),
})
.then(onUnauthorized)));
const onUnauthorized = async () => {
try {
const { name, password } = getLastToken();
await networkSvc.request({
method: 'POST',
url: utils.resolveUrl(baseUrl, '../_session'),
withCredentials: true,
body: {
name,
password,
},
});
} catch (err) {
assertUnauthorized(err);
await store.dispatch('modal/open', {
type: 'couchdbCredentials',
token: getLastToken(),
});
await onUnauthorized();
}
};
const config = {
...options,
@ -38,55 +44,75 @@ const request = (token, options = {}) => {
withCredentials: true,
};
return networkSvc.request(config)
.catch(ifUnauthorized(() => onUnauthorized()
.then(() => networkSvc.request(config))))
.then(res => res.body)
.catch((err) => {
if (err.status === 409) {
throw new Error('TOO_LATE');
}
throw err;
});
try {
let res;
try {
res = await networkSvc.request(config);
} catch (err) {
assertUnauthorized(err);
await onUnauthorized();
res = await networkSvc.request(config);
}
return res.body;
} catch (err) {
if (err.status === 409) {
throw new Error('TOO_LATE');
}
throw err;
}
};
export default {
/**
* http://docs.couchdb.org/en/2.1.1/api/database/common.html#db
*/
getDb(token) {
return request(token);
},
getChanges(token, lastSeq) {
/**
* http://docs.couchdb.org/en/2.1.1/api/database/changes.html#db-changes
*/
async getChanges(token, lastSeq) {
const result = {
changes: [],
lastSeq,
};
const getPage = (since = 0) => request(token, {
method: 'GET',
path: '_changes',
params: {
since,
include_docs: true,
limit: 1000,
},
})
.then((body) => {
result.changes = result.changes.concat(body.results);
if (body.pending) {
return getPage(body.last_seq);
}
result.lastSeq = body.last_seq;
return result;
const getPage = async () => {
const body = await request(token, {
method: 'GET',
path: '_changes',
params: {
since: result.lastSeq || 0,
include_docs: true,
limit: 1000,
},
});
result.changes = [...result.changes, ...body.results];
result.lastSeq = body.last_seq;
if (body.pending) {
return getPage();
}
return result;
};
return getPage(lastSeq);
return getPage();
},
uploadDocument(
/**
* http://docs.couchdb.org/en/2.1.1/api/database/common.html#post--db
* http://docs.couchdb.org/en/2.1.1/api/document/common.html#put--db-docid
*/
async uploadDocument({
token,
item,
data = null,
dataType = null,
documentId = null,
rev = null,
) {
}) {
const options = {
method: 'POST',
body: { item, time: Date.now() },
@ -110,34 +136,48 @@ export default {
}
return request(token, options);
},
removeDocument(token, documentId, rev) {
/**
* http://docs.couchdb.org/en/2.1.1/api/document/common.html#delete--db-docid
*/
async removeDocument(token, documentId, rev) {
return request(token, {
method: 'DELETE',
path: documentId,
params: { rev },
});
},
retrieveDocument(token, documentId, rev) {
/**
* http://docs.couchdb.org/en/2.1.1/api/document/common.html#get--db-docid
*/
async retrieveDocument(token, documentId, rev) {
return request(token, {
path: documentId,
params: { rev },
});
},
retrieveDocumentWithAttachments(token, documentId, rev) {
return request(token, {
/**
* http://docs.couchdb.org/en/2.1.1/api/document/common.html#get--db-docid
*/
async retrieveDocumentWithAttachments(token, documentId, rev) {
const body = await request(token, {
path: documentId,
params: { attachments: true, rev },
})
.then((body) => {
body.attachments = {};
// eslint-disable-next-line no-underscore-dangle
Object.entries(body._attachments).forEach(([name, attachment]) => {
body.attachments[name] = utils.decodeBase64(attachment.data);
});
return body;
});
});
body.attachments = {};
// eslint-disable-next-line no-underscore-dangle
Object.entries(body._attachments).forEach(([name, attachment]) => {
body.attachments[name] = utils.decodeBase64(attachment.data);
});
return body;
},
retrieveDocumentWithRevisions(token, documentId) {
/**
* http://docs.couchdb.org/en/2.1.1/api/document/common.html#get--db-docid
*/
async retrieveDocumentWithRevisions(token, documentId) {
return request(token, {
path: documentId,
params: {

View File

@ -1,8 +1,6 @@
import networkSvc from '../../networkSvc';
import store from '../../../store';
let Dropbox;
const getAppKey = (fullAccess) => {
if (fullAccess) {
return 'lq6mwopab8wskas';
@ -22,89 +20,105 @@ const request = (token, options, args) => networkSvc.request({
});
export default {
startOauth2(fullAccess, sub = null, silent = false) {
return networkSvc.startOauth2(
/**
* https://www.dropbox.com/developers/documentation/http/documentation#oauth2-authorize
*/
async startOauth2(fullAccess, sub = null, silent = false) {
const { accessToken } = await networkSvc.startOauth2(
'https://www.dropbox.com/oauth2/authorize',
{
client_id: getAppKey(fullAccess),
response_type: 'token',
},
silent,
)
// Call the user info endpoint
.then(({ accessToken }) => request({ accessToken }, {
method: 'POST',
url: 'https://api.dropboxapi.com/2/users/get_current_account',
})
.then((res) => {
// Check the returned sub consistency
if (sub && `${res.body.account_id}` !== sub) {
throw new Error('Dropbox account ID not expected.');
}
// Build token object including scopes and sub
const token = {
accessToken,
name: res.body.name.display_name,
sub: `${res.body.account_id}`,
fullAccess,
};
// Add token to dropboxTokens
store.dispatch('data/setDropboxToken', token);
return token;
}));
},
loadClientScript() {
if (Dropbox) {
return Promise.resolve();
);
// Call the user info endpoint
const { body } = await request({ accessToken }, {
method: 'POST',
url: 'https://api.dropboxapi.com/2/users/get_current_account',
});
// Check the returned sub consistency
if (sub && `${body.account_id}` !== sub) {
throw new Error('Dropbox account ID not expected.');
}
return networkSvc.loadScript('https://www.dropbox.com/static/api/2/dropins.js')
.then(() => {
({ Dropbox } = window);
});
// Build token object including scopes and sub
const token = {
accessToken,
name: body.name.display_name,
sub: `${body.account_id}`,
fullAccess,
};
// Add token to dropboxTokens
store.dispatch('data/setDropboxToken', token);
return token;
},
addAccount(fullAccess = false) {
return this.startOauth2(fullAccess);
},
uploadFile(token, path, content, fileId) {
return request(token, {
/**
* https://www.dropbox.com/developers/documentation/http/documentation#files-upload
*/
async uploadFile({
token,
path,
content,
fileId,
}) {
return (await request(token, {
method: 'POST',
url: 'https://content.dropboxapi.com/2/files/upload',
body: content,
}, {
path: fileId || path,
mode: 'overwrite',
})
.then(res => res.body);
})).body;
},
downloadFile(token, path, fileId) {
return request(token, {
/**
* https://www.dropbox.com/developers/documentation/http/documentation#files-download
*/
async downloadFile({
token,
path,
fileId,
}) {
const res = await request(token, {
method: 'POST',
url: 'https://content.dropboxapi.com/2/files/download',
raw: true,
}, {
path: fileId || path,
})
.then(res => ({
id: JSON.parse(res.headers['dropbox-api-result']).id,
content: res.body,
}));
});
return {
id: JSON.parse(res.headers['dropbox-api-result']).id,
content: res.body,
};
},
openChooser(token) {
return this.loadClientScript()
.then(() => new Promise((resolve) => {
Dropbox.appKey = getAppKey(token.fullAccess);
Dropbox.choose({
multiselect: true,
linkType: 'direct',
success: (files) => {
const paths = files.map((file) => {
const path = file.link.replace(/.*\/view\/[^/]*/, '');
return decodeURI(path);
});
resolve(paths);
},
cancel: () => resolve([]),
});
}));
/**
* https://www.dropbox.com/developers/chooser
*/
async openChooser(token) {
if (!window.Dropbox) {
await networkSvc.loadScript('https://www.dropbox.com/static/api/2/dropins.js');
}
return new Promise((resolve) => {
window.Dropbox.appKey = getAppKey(token.fullAccess);
window.Dropbox.choose({
multiselect: true,
linkType: 'direct',
success: files => resolve(files.map((file) => {
const path = file.link.replace(/.*\/view\/[^/]*/, '');
return decodeURI(path);
})),
cancel: () => resolve([]),
});
});
},
};

View File

@ -20,7 +20,8 @@ const request = (token, options) => networkSvc.request({
const repoRequest = (token, owner, repo, options) => request(token, {
...options,
url: `https://api.github.com/repos/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}/${options.url}`,
});
})
.then(res => res.body);
const getCommitMessage = (name, path) => {
const message = store.getters['data/computedSettings'].github[name];
@ -28,95 +29,131 @@ const getCommitMessage = (name, path) => {
};
export default {
startOauth2(scopes, sub = null, silent = false) {
return networkSvc.startOauth2(
/**
* https://developer.github.com/apps/building-oauth-apps/authorization-options-for-oauth-apps/
*/
async startOauth2(scopes, sub = null, silent = false) {
const { code } = await networkSvc.startOauth2(
'https://github.com/login/oauth/authorize',
{
client_id: clientId,
scope: scopes.join(' '),
},
silent,
)
// Exchange code with token
.then(data => networkSvc.request({
method: 'GET',
url: 'oauth2/githubToken',
params: {
clientId,
code: data.code,
},
})
.then(res => res.body))
// Call the user info endpoint
.then(accessToken => networkSvc.request({
method: 'GET',
url: 'https://api.github.com/user',
params: {
access_token: accessToken,
},
})
.then((res) => {
// Check the returned sub consistency
if (sub && `${res.body.id}` !== sub) {
throw new Error('GitHub account ID not expected.');
}
// Build token object including scopes and sub
const token = {
scopes,
accessToken,
name: res.body.login,
sub: `${res.body.id}`,
repoFullAccess: scopes.indexOf('repo') !== -1,
};
// Add token to githubTokens
store.dispatch('data/setGithubToken', token);
return token;
}));
);
// Exchange code with token
const accessToken = (await networkSvc.request({
method: 'GET',
url: 'oauth2/githubToken',
params: {
clientId,
code,
},
})).body;
// Call the user info endpoint
const user = (await networkSvc.request({
method: 'GET',
url: 'https://api.github.com/user',
params: {
access_token: accessToken,
},
})).body;
// Check the returned sub consistency
if (sub && `${user.id}` !== sub) {
throw new Error('GitHub account ID not expected.');
}
// Build token object including scopes and sub
const token = {
scopes,
accessToken,
name: user.login,
sub: `${user.id}`,
repoFullAccess: scopes.indexOf('repo') !== -1,
};
// Add token to githubTokens
store.dispatch('data/setGithubToken', token);
return token;
},
addAccount(repoFullAccess = false) {
async addAccount(repoFullAccess = false) {
return this.startOauth2(getScopes({ repoFullAccess }));
},
getUser(userId) {
return networkSvc.request({
/**
* Getting a user from its userId is not feasible with API v3.
* Using an undocumented endpoint...
*/
async getUser(userId) {
const user = (await networkSvc.request({
url: `https://api.github.com/user/${userId}`,
params: {
t: Date.now(), // Prevent from caching
},
})
.then((res) => {
store.commit('userInfo/addItem', {
id: `gh:${res.body.id}`,
name: res.body.login,
imageUrl: res.body.avatar_url || '',
});
return res.body;
});
})).body;
store.commit('userInfo/addItem', {
id: `gh:${user.id}`,
name: user.login,
imageUrl: user.avatar_url || '',
});
return user;
},
getTree(token, owner, repo, sha) {
return repoRequest(token, owner, repo, {
url: `git/trees/${encodeURIComponent(sha)}?recursive=1`,
})
.then((res) => {
if (res.body.truncated) {
throw new Error('Git tree too big. Please remove some files in the repository.');
}
return res.body.tree;
});
},
getHeadTree(token, owner, repo, branch) {
return repoRequest(token, owner, repo, {
/**
* https://developer.github.com/v3/repos/commits/#get-a-single-commit
* https://developer.github.com/v3/git/trees/#get-a-tree
*/
async getTree({
token,
owner,
repo,
branch,
}) {
const { commit } = (await repoRequest(token, owner, repo, {
url: `commits/${encodeURIComponent(branch)}`,
})
.then(res => this.getTree(token, owner, repo, res.body.commit.tree.sha));
})).body;
const { tree, truncated } = (await repoRequest(token, owner, repo, {
url: `git/trees/${encodeURIComponent(commit.tree.sha)}?recursive=1`,
})).body;
if (truncated) {
throw new Error('Git tree too big. Please remove some files in the repository.');
}
return tree;
},
getCommits(token, owner, repo, sha, path) {
/**
* https://developer.github.com/v3/repos/commits/#list-commits-on-a-repository
*/
async getCommits({
token,
owner,
repo,
sha,
path,
}) {
return repoRequest(token, owner, repo, {
url: 'commits',
params: { sha, path },
})
.then(res => res.body);
});
},
uploadFile(token, owner, repo, branch, path, content, sha) {
/**
* https://developer.github.com/v3/repos/contents/#create-a-file
* https://developer.github.com/v3/repos/contents/#update-a-file
*/
async uploadFile({
token,
owner,
repo,
branch,
path,
content,
sha,
}) {
return repoRequest(token, owner, repo, {
method: 'PUT',
url: `contents/${encodeURIComponent(path)}`,
@ -126,10 +163,20 @@ export default {
sha,
branch,
},
})
.then(res => res.body);
});
},
removeFile(token, owner, repo, branch, path, sha) {
/**
* https://developer.github.com/v3/repos/contents/#delete-a-file
*/
async removeFile({
token,
owner,
repo,
branch,
path,
sha,
}) {
return repoRequest(token, owner, repo, {
method: 'DELETE',
url: `contents/${encodeURIComponent(path)}`,
@ -138,21 +185,42 @@ export default {
sha,
branch,
},
})
.then(res => res.body);
});
},
downloadFile(token, owner, repo, branch, path) {
return repoRequest(token, owner, repo, {
/**
* https://developer.github.com/v3/repos/contents/#get-contents
*/
async downloadFile({
token,
owner,
repo,
branch,
path,
}) {
const body = await repoRequest(token, owner, repo, {
url: `contents/${encodeURIComponent(path)}`,
params: { ref: branch },
})
.then(res => ({
sha: res.body.sha,
content: utils.decodeBase64(res.body.content),
}));
});
return {
sha: body.sha,
content: utils.decodeBase64(body.content),
};
},
uploadGist(token, description, filename, content, isPublic, gistId) {
return request(token, gistId ? {
/**
* https://developer.github.com/v3/gists/#create-a-gist
* https://developer.github.com/v3/gists/#edit-a-gist
*/
async uploadGist({
token,
description,
filename,
content,
isPublic,
gistId,
}) {
const { body } = await request(token, gistId ? {
method: 'PATCH',
url: `https://api.github.com/gists/${gistId}`,
body: {
@ -175,19 +243,24 @@ export default {
},
public: isPublic,
},
})
.then(res => res.body);
});
return body;
},
downloadGist(token, gistId, filename) {
return request(token, {
/**
* https://developer.github.com/v3/gists/#get-a-single-gist
*/
async downloadGist({
token,
gistId,
filename,
}) {
const result = (await request(token, {
url: `https://api.github.com/gists/${gistId}`,
})
.then((res) => {
const result = res.body.files[filename];
if (!result) {
throw new Error('Gist file not found.');
}
return result.content;
});
})).body.files[filename];
if (!result) {
throw new Error('Gist file not found.');
}
return result.content;
},
};

File diff suppressed because it is too large Load Diff

View File

@ -10,11 +10,15 @@ const request = (token, options) => networkSvc.request({
...options.headers || {},
Authorization: `Bearer ${token.accessToken}`,
},
});
})
.then(res => res.body);
export default {
startOauth2(sub = null, silent = false) {
return networkSvc.startOauth2(
/**
* https://developer.wordpress.com/docs/oauth2/
*/
async startOauth2(sub = null, silent = false) {
const { accessToken, expiresIn } = await networkSvc.startOauth2(
'https://public-api.wordpress.com/oauth2/authorize',
{
client_id: clientId,
@ -22,49 +26,49 @@ export default {
scope: 'global',
},
silent,
)
// Call the user info endpoint
.then(data => request({ accessToken: data.accessToken }, {
url: 'https://public-api.wordpress.com/rest/v1.1/me',
})
.then((res) => {
// Check the returned sub consistency
if (sub && `${res.body.ID}` !== sub) {
throw new Error('WordPress account ID not expected.');
}
// Build token object including scopes and sub
const token = {
accessToken: data.accessToken,
expiresOn: Date.now() + (data.expiresIn * 1000),
name: res.body.display_name,
sub: `${res.body.ID}`,
};
// Add token to wordpressTokens
store.dispatch('data/setWordpressToken', token);
return token;
}));
);
// Call the user info endpoint
const body = await request({ accessToken }, {
url: 'https://public-api.wordpress.com/rest/v1.1/me',
});
// Check the returned sub consistency
if (sub && `${body.ID}` !== sub) {
throw new Error('WordPress account ID not expected.');
}
// Build token object including scopes and sub
const token = {
accessToken,
expiresOn: Date.now() + (expiresIn * 1000),
name: body.display_name,
sub: `${body.ID}`,
};
// Add token to wordpressTokens
store.dispatch('data/setWordpressToken', token);
return token;
},
refreshToken(token) {
async refreshToken(token) {
const { sub } = token;
const lastToken = store.getters['data/wordpressTokens'][sub];
return Promise.resolve()
.then(() => {
if (lastToken.expiresOn > Date.now() + tokenExpirationMargin) {
return lastToken;
}
// Existing token is going to expire.
// Try to get a new token in background
return store.dispatch('modal/providerRedirection', {
providerName: 'WordPress',
onResolve: () => this.startOauth2(sub),
});
});
if (lastToken.expiresOn > Date.now() + tokenExpirationMargin) {
return lastToken;
}
// Existing token is going to expire.
// Try to get a new token in background
await store.dispatch('modal/providerRedirection', { providerName: 'WordPress' });
return this.startOauth2(sub);
},
addAccount(fullAccess = false) {
return this.startOauth2(fullAccess);
},
uploadPost(
/**
* https://developer.wordpress.com/docs/api/1.2/post/sites/%24site/posts/new/
* https://developer.wordpress.com/docs/api/1.2/post/sites/%24site/posts/%24post_ID/
*/
async uploadPost({
token,
domain,
siteId,
@ -78,23 +82,22 @@ export default {
featuredImage,
status,
date,
) {
return this.refreshToken(token)
.then(refreshedToken => request(refreshedToken, {
method: 'POST',
url: `https://public-api.wordpress.com/rest/v1.2/sites/${siteId || domain}/posts/${postId || 'new'}`,
body: {
content,
title,
tags,
categories,
excerpt,
author,
featured_image: featuredImage || '',
status,
date: date && date.toISOString(),
},
})
.then(res => res.body));
}) {
const refreshedToken = await this.refreshToken(token);
await request(refreshedToken, {
method: 'POST',
url: `https://public-api.wordpress.com/rest/v1.2/sites/${siteId || domain}/posts/${postId || 'new'}`,
body: {
content,
title,
tags,
categories,
excerpt,
author,
featured_image: featuredImage || '',
status,
date: date && date.toISOString(),
},
});
},
};

View File

@ -7,11 +7,16 @@ const request = (token, options) => networkSvc.request({
...options.headers || {},
Authorization: `Bearer ${token.accessToken}`,
},
});
})
.then(res => res.body);
export default {
startOauth2(subdomain, clientId, sub = null, silent = false) {
return networkSvc.startOauth2(
/**
* https://support.zendesk.com/hc/en-us/articles/203663836-Using-OAuth-authentication-with-your-application
*/
async startOauth2(subdomain, clientId, sub = null, silent = false) {
const { accessToken } = await networkSvc.startOauth2(
`https://${subdomain}.zendesk.com/oauth/authorizations/new`,
{
client_id: clientId,
@ -19,33 +24,39 @@ export default {
scope: 'read hc:write',
},
silent,
)
// Call the user info endpoint
.then(({ accessToken }) => request({ accessToken }, {
url: `https://${subdomain}.zendesk.com/api/v2/users/me.json`,
})
.then((res) => {
const uniqueSub = `${subdomain}/${res.body.user.id}`;
// Check the returned sub consistency
if (sub && uniqueSub !== sub) {
throw new Error('Zendesk account ID not expected.');
}
// Build token object including scopes and sub
const token = {
accessToken,
name: res.body.user.name,
subdomain,
sub: uniqueSub,
};
// Add token to zendeskTokens
store.dispatch('data/setZendeskToken', token);
return token;
}));
);
// Call the user info endpoint
const { user } = await request({ accessToken }, {
url: `https://${subdomain}.zendesk.com/api/v2/users/me.json`,
});
const uniqueSub = `${subdomain}/${user.id}`;
// Check the returned sub consistency
if (sub && uniqueSub !== sub) {
throw new Error('Zendesk account ID not expected.');
}
// Build token object including scopes and sub
const token = {
accessToken,
name: user.name,
subdomain,
sub: uniqueSub,
};
// Add token to zendeskTokens
store.dispatch('data/setZendeskToken', token);
return token;
},
addAccount(subdomain, clientId) {
return this.startOauth2(subdomain, clientId);
},
uploadArticle(
/**
* https://developer.zendesk.com/rest_api/docs/help_center/articles
*/
async uploadArticle({
token,
sectionId,
articleId,
@ -54,20 +65,25 @@ export default {
labels,
locale,
isDraft,
) {
}) {
const article = {
title,
body: content,
locale,
draft: isDraft,
};
if (articleId) {
return request(token, {
// Update article
await request(token, {
method: 'PUT',
url: `https://${token.subdomain}.zendesk.com/api/v2/help_center/articles/${articleId}/translations/${locale}.json`,
body: { translation: article },
})
.then(() => labels && request(token, {
});
// Add labels
if (labels) {
await request(token, {
method: 'PUT',
url: `https://${token.subdomain}.zendesk.com/api/v2/help_center/articles/${articleId}.json`,
body: {
@ -75,17 +91,20 @@ export default {
label_names: labels,
},
},
}))
.then(() => articleId);
});
}
return articleId;
}
// Create new article
if (labels) {
article.label_names = labels;
}
return request(token, {
const body = await request(token, {
method: 'POST',
url: `https://${token.subdomain}.zendesk.com/api/v2/help_center/sections/${sectionId}/articles.json`,
body: { article },
})
.then(res => `${res.body.article.id}`);
});
return `${body.article.id}`;
},
};

View File

@ -14,27 +14,18 @@ export default new Provider({
const token = this.getToken(location);
return `${location.postId}${location.domain}${token.name}`;
},
publish(token, html, metadata, publishLocation) {
return wordpressHelper.uploadPost(
async publish(token, html, metadata, publishLocation) {
const post = await wordpressHelper.uploadPost({
...publishLocation,
...metadata,
token,
publishLocation.domain,
publishLocation.siteId,
publishLocation.postId,
metadata.title,
html,
metadata.tags,
metadata.categories,
metadata.excerpt,
metadata.author,
metadata.featuredImage,
metadata.status,
metadata.date,
)
.then(post => ({
...publishLocation,
siteId: `${post.site_ID}`,
postId: `${post.ID}`,
}));
content: html,
});
return {
...publishLocation,
siteId: `${post.site_ID}`,
postId: `${post.ID}`,
};
},
makeLocation(token, domain, postId) {
const location = {

View File

@ -15,21 +15,19 @@ export default new Provider({
const token = this.getToken(location);
return `${location.articleId}${token.name}${token.subdomain}`;
},
publish(token, html, metadata, publishLocation) {
return zendeskHelper.uploadArticle(
async publish(token, html, metadata, publishLocation) {
const articleId = await zendeskHelper.uploadArticle({
...publishLocation,
token,
publishLocation.sectionId,
publishLocation.articleId,
metadata.title,
html,
metadata.tags,
publishLocation.locale,
metadata.status === 'draft',
)
.then(articleId => ({
...publishLocation,
articleId,
}));
title: metadata.title,
content: html,
labels: metadata.tags,
isDraft: metadata.status === 'draft',
});
return {
...publishLocation,
articleId,
};
},
makeLocation(token, sectionId, locale, articleId) {
const location = {

View File

@ -38,80 +38,66 @@ const ensureDate = (value, defaultValue) => {
return new Date(`${value}`);
};
function publish(publishLocation) {
const publish = async (publishLocation) => {
const { fileId } = publishLocation;
const template = store.getters['data/allTemplates'][publishLocation.templateId];
return exportSvc.applyTemplate(fileId, template)
.then(html => localDbSvc.loadItem(`${fileId}/content`)
.then((content) => {
const file = store.state.file.itemMap[fileId];
const properties = utils.computeProperties(content.properties);
const provider = providerRegistry.providers[publishLocation.providerId];
const token = provider.getToken(publishLocation);
const metadata = {
title: ensureString(properties.title, file.name),
author: ensureString(properties.author),
tags: ensureArray(properties.tags),
categories: ensureArray(properties.categories),
excerpt: ensureString(properties.excerpt),
featuredImage: ensureString(properties.featuredImage),
status: ensureString(properties.status),
date: ensureDate(properties.date, new Date()),
};
return provider.publish(token, html, metadata, publishLocation);
}));
}
const html = await exportSvc.applyTemplate(fileId, template);
const content = await localDbSvc.loadItem(`${fileId}/content`);
const file = store.state.file.itemMap[fileId];
const properties = utils.computeProperties(content.properties);
const provider = providerRegistry.providers[publishLocation.providerId];
const token = provider.getToken(publishLocation);
const metadata = {
title: ensureString(properties.title, file.name),
author: ensureString(properties.author),
tags: ensureArray(properties.tags),
categories: ensureArray(properties.categories),
excerpt: ensureString(properties.excerpt),
featuredImage: ensureString(properties.featuredImage),
status: ensureString(properties.status),
date: ensureDate(properties.date, new Date()),
};
return provider.publish(token, html, metadata, publishLocation);
};
function publishFile(fileId) {
const publishFile = async (fileId) => {
let counter = 0;
return loadContent(fileId)
.then(() => {
const publishLocations = [
...store.getters['publishLocation/filteredGroupedByFileId'][fileId] || [],
];
const publishOneContentLocation = () => {
const publishLocation = publishLocations.shift();
if (!publishLocation) {
return null;
}
return store.dispatch('queue/doWithLocation', {
location: publishLocation,
promise: publish(publishLocation)
.then((publishLocationToStore) => {
// Replace publish location if modified
if (utils.serializeObject(publishLocation) !==
utils.serializeObject(publishLocationToStore)
) {
store.commit('publishLocation/patchItem', publishLocationToStore);
}
counter += 1;
return publishOneContentLocation();
}, (err) => {
if (store.state.offline) {
throw err;
}
console.error(err); // eslint-disable-line no-console
store.dispatch('notification/error', err);
return publishOneContentLocation();
}),
});
};
return publishOneContentLocation();
})
.then(() => {
const file = store.state.file.itemMap[fileId];
store.dispatch('notification/info', `"${file.name}" was published to ${counter} location(s).`);
})
.then(
() => localDbSvc.unloadContents(),
err => localDbSvc.unloadContents()
.then(() => {
throw err;
}),
);
}
await loadContent(fileId);
const publishLocations = [
...store.getters['publishLocation/filteredGroupedByFileId'][fileId] || [],
];
try {
await utils.awaitSequence(publishLocations, async (publishLocation) => {
await store.dispatch('queue/doWithLocation', {
location: publishLocation,
action: async () => {
const publishLocationToStore = await publish(publishLocation);
try {
// Replace publish location if modified
if (utils.serializeObject(publishLocation) !==
utils.serializeObject(publishLocationToStore)
) {
store.commit('publishLocation/patchItem', publishLocationToStore);
}
counter += 1;
} catch (err) {
if (store.state.offline) {
throw err;
}
console.error(err); // eslint-disable-line no-console
store.dispatch('notification/error', err);
}
},
});
});
const file = store.state.file.itemMap[fileId];
store.dispatch('notification/info', `"${file.name}" was published to ${counter} location(s).`);
} finally {
await localDbSvc.unloadContents();
}
};
function requestPublish() {
const requestPublish = () => {
// No publish in light mode
if (store.state.light) {
return;
@ -135,21 +121,21 @@ function requestPublish() {
intervalId = utils.setInterval(() => attempt(), 1000);
attempt();
}));
}
};
function createPublishLocation(publishLocation) {
const createPublishLocation = (publishLocation) => {
publishLocation.id = utils.uid();
const currentFile = store.getters['file/current'];
publishLocation.fileId = currentFile.id;
store.dispatch(
'queue/enqueue',
() => publish(publishLocation)
.then((publishLocationToStore) => {
store.commit('publishLocation/setItem', publishLocationToStore);
store.dispatch('notification/info', `A new publication location was added to "${currentFile.name}".`);
}),
async () => {
const publishLocationToStore = await publish(publishLocation);
store.commit('publishLocation/setItem', publishLocationToStore);
store.dispatch('notification/info', `A new publication location was added to "${currentFile.name}".`);
},
);
}
};
export default {
requestPublish,

View File

@ -1,46 +1,46 @@
function SectionDimension(startOffset, endOffset) {
this.startOffset = startOffset;
this.endOffset = endOffset;
this.height = endOffset - startOffset;
class SectionDimension {
constructor(startOffset, endOffset) {
this.startOffset = startOffset;
this.endOffset = endOffset;
this.height = endOffset - startOffset;
}
}
function dimensionNormalizer(dimensionName) {
return (editorSvc) => {
const dimensionList = editorSvc.previewCtx.sectionDescList
.map(sectionDesc => sectionDesc[dimensionName]);
let dimension;
let i;
let j;
for (i = 0; i < dimensionList.length; i += 1) {
dimension = dimensionList[i];
if (dimension.height) {
for (j = i + 1; j < dimensionList.length && dimensionList[j].height === 0; j += 1) {
// Loop
}
const normalizeFactor = j - i;
if (normalizeFactor !== 1) {
const normalizedHeight = dimension.height / normalizeFactor;
const dimensionNormalizer = dimensionName => (editorSvc) => {
const dimensionList = editorSvc.previewCtx.sectionDescList
.map(sectionDesc => sectionDesc[dimensionName]);
let dimension;
let i;
let j;
for (i = 0; i < dimensionList.length; i += 1) {
dimension = dimensionList[i];
if (dimension.height) {
for (j = i + 1; j < dimensionList.length && dimensionList[j].height === 0; j += 1) {
// Loop
}
const normalizeFactor = j - i;
if (normalizeFactor !== 1) {
const normalizedHeight = dimension.height / normalizeFactor;
dimension.height = normalizedHeight;
dimension.endOffset = dimension.startOffset + dimension.height;
for (j = i + 1; j < i + normalizeFactor; j += 1) {
const startOffset = dimension.endOffset;
dimension = dimensionList[j];
dimension.startOffset = startOffset;
dimension.height = normalizedHeight;
dimension.endOffset = dimension.startOffset + dimension.height;
for (j = i + 1; j < i + normalizeFactor; j += 1) {
const startOffset = dimension.endOffset;
dimension = dimensionList[j];
dimension.startOffset = startOffset;
dimension.height = normalizedHeight;
dimension.endOffset = dimension.startOffset + dimension.height;
}
i = j - 1;
}
i = j - 1;
}
}
};
}
}
};
const normalizeEditorDimensions = dimensionNormalizer('editorDimension');
const normalizePreviewDimensions = dimensionNormalizer('previewDimension');
const normalizeTocDimensions = dimensionNormalizer('tocDimension');
function measureSectionDimensions(editorSvc) {
const measureSectionDimensions = (editorSvc) => {
let editorSectionOffset = 0;
let previewSectionOffset = 0;
let tocSectionOffset = 0;
@ -106,7 +106,7 @@ function measureSectionDimensions(editorSvc) {
normalizeEditorDimensions(editorSvc);
normalizePreviewDimensions(editorSvc);
normalizeTocDimensions(editorSvc);
}
};
export default {
measureSectionDimensions,

View File

@ -8,20 +8,19 @@ let lastCheck = 0;
const appId = 'ESTHdCYOi18iLhhO';
let monetize;
const getMonetize = () => Promise.resolve()
.then(() => networkSvc.loadScript('https://cdn.monetizejs.com/api/js/latest/monetize.min.js'))
.then(() => {
monetize = monetize || new window.MonetizeJS({
applicationID: appId,
});
const getMonetize = async () => {
await networkSvc.loadScript('https://cdn.monetizejs.com/api/js/latest/monetize.min.js');
monetize = monetize || new window.MonetizeJS({
applicationID: appId,
});
};
const isGoogleSponsor = () => {
const sponsorToken = store.getters['workspace/sponsorToken'];
return sponsorToken && sponsorToken.isSponsor;
};
const checkPayment = () => {
const checkPayment = async () => {
const currentDate = Date.now();
if (!isGoogleSponsor()
&& networkSvc.isUserActive()
@ -30,15 +29,15 @@ const checkPayment = () => {
&& lastCheck + checkPaymentEvery < currentDate
) {
lastCheck = currentDate;
getMonetize()
.then(() => monetize.getPaymentsImmediate((err, payments) => {
const isSponsor = payments && payments.app === appId && (
(payments.chargeOption && payments.chargeOption.alias === 'once') ||
(payments.subscriptionOption && payments.subscriptionOption.alias === 'yearly'));
if (isSponsor !== store.state.monetizeSponsor) {
store.commit('setMonetizeSponsor', isSponsor);
}
}));
await getMonetize();
monetize.getPaymentsImmediate((err, payments) => {
const isSponsor = payments && payments.app === appId && (
(payments.chargeOption && payments.chargeOption.alias === 'once') ||
(payments.subscriptionOption && payments.subscriptionOption.alias === 'yearly'));
if (isSponsor !== store.state.monetizeSponsor) {
store.commit('setMonetizeSponsor', isSponsor);
}
});
}
};
@ -46,12 +45,11 @@ export default {
init: () => {
utils.setInterval(checkPayment, 2000);
},
getToken() {
async getToken() {
if (isGoogleSponsor() || store.state.offline) {
return Promise.resolve();
return null;
}
return getMonetize()
.then(() => new Promise(resolve =>
monetize.getTokenImmediate((err, result) => resolve(result))));
await getMonetize();
return new Promise(resolve => monetize.getTokenImmediate((err, result) => resolve(result)));
},
};

File diff suppressed because it is too large Load Diff

View File

@ -25,75 +25,73 @@ export default {
}
this.closed = true;
},
init() {
async init() {
if (!origin || !window.parent) {
return Promise.resolve();
return;
}
store.commit('setLight', true);
return fileSvc.createFile({
const file = await fileSvc.createFile({
name: fileName || utils.getHostname(origin),
text: contentText || '\n',
properties: contentProperties,
parentId: 'temp',
}, true)
.then((file) => {
const fileItemMap = store.state.file.itemMap;
}, true);
// Sanitize file creations
const lastCreated = {};
Object.entries(store.getters['data/lastCreated']).forEach(([id, createdOn]) => {
if (fileItemMap[id] && fileItemMap[id].parentId === 'temp') {
lastCreated[id] = createdOn;
}
});
const fileItemMap = store.state.file.itemMap;
// Track file creation from other site
lastCreated[file.id] = {
created: Date.now(),
};
// Sanitize file creations
const lastCreated = {};
Object.entries(store.getters['data/lastCreated']).forEach(([id, createdOn]) => {
if (fileItemMap[id] && fileItemMap[id].parentId === 'temp') {
lastCreated[id] = createdOn;
}
});
// Keep only the last 10 temp files created by other sites
Object.entries(lastCreated)
.sort(([, createdOn1], [, createdOn2]) => createdOn2 - createdOn1)
.splice(10)
.forEach(([id]) => {
delete lastCreated[id];
fileSvc.deleteFile(id);
});
// Track file creation from other site
lastCreated[file.id] = {
created: Date.now(),
};
// Store file creations and open the file
store.dispatch('data/setLastCreated', lastCreated);
store.commit('file/setCurrentId', file.id);
const onChange = cledit.Utils.debounce(() => {
const currentFile = store.getters['file/current'];
if (currentFile.id !== file.id) {
// Close editor if file has changed for some reason
this.close();
} else if (!this.closed && editorSvc.previewCtx.html != null) {
const content = store.getters['content/current'];
const properties = utils.computeProperties(content.properties);
window.parent.postMessage({
type: 'fileChange',
payload: {
id: file.id,
name: currentFile.name,
content: {
text: content.text.slice(0, -1), // Remove trailing LF
properties,
yamlProperties: content.properties,
html: editorSvc.previewCtx.html,
},
},
}, origin);
}
}, 25);
// Watch preview refresh and file name changes
editorSvc.$on('previewCtx', onChange);
store.watch(() => store.getters['file/current'].name, onChange);
// Keep only the last 10 temp files created by other sites
Object.entries(lastCreated)
.sort(([, createdOn1], [, createdOn2]) => createdOn2 - createdOn1)
.splice(10)
.forEach(([id]) => {
delete lastCreated[id];
fileSvc.deleteFile(id);
});
// Store file creations and open the file
store.dispatch('data/setLastCreated', lastCreated);
store.commit('file/setCurrentId', file.id);
const onChange = cledit.Utils.debounce(() => {
const currentFile = store.getters['file/current'];
if (currentFile.id !== file.id) {
// Close editor if file has changed for some reason
this.close();
} else if (!this.closed && editorSvc.previewCtx.html != null) {
const content = store.getters['content/current'];
const properties = utils.computeProperties(content.properties);
window.parent.postMessage({
type: 'fileChange',
payload: {
id: file.id,
name: currentFile.name,
content: {
text: content.text.slice(0, -1), // Remove trailing LF
properties,
yamlProperties: content.properties,
html: editorSvc.previewCtx.html,
},
},
}, origin);
}
}, 25);
// Watch preview refresh and file name changes
editorSvc.$on('previewCtx', onChange);
store.watch(() => store.getters['file/current'].name, onChange);
},
};

View File

@ -16,7 +16,7 @@ export default {
promised[id] = true;
store.commit('userInfo/addItem', { id, name, imageUrl });
},
getInfo(userId) {
async getInfo(userId) {
if (!promised[userId]) {
const [type, sub] = parseUserId(userId);
@ -33,27 +33,26 @@ export default {
if (!store.state.offline) {
promised[userId] = true;
switch (type) {
case 'github': {
return githubHelper.getUser(sub)
.catch((err) => {
if (err.status !== 404) {
promised[userId] = false;
}
});
}
case 'github':
try {
await githubHelper.getUser(sub);
} catch (err) {
if (err.status !== 404) {
promised[userId] = false;
}
}
break;
case 'google':
default: {
return googleHelper.getUser(sub)
.catch((err) => {
if (err.status !== 404) {
promised[userId] = false;
}
});
}
default:
try {
await googleHelper.getUser(sub);
} catch (err) {
if (err.status !== 404) {
promised[userId] = false;
}
}
}
}
}
return null;
},
};

View File

@ -224,6 +224,14 @@ export default {
};
return runWithNextValue();
},
someResult(values, func) {
let result;
values.some((value) => {
result = func(value);
return result;
});
return result;
},
parseQueryParams,
addQueryParams(url = '', params = {}, hash = false) {
const keys = Object.keys(params).filter(key => params[key] != null);

View File

@ -31,11 +31,11 @@ module.mutations = {
module.getters = {
...module.getters,
current: (state, getters, rootState, rootGetters) => {
if (state.revisionContent) {
return state.revisionContent;
current: ({ itemMap, revisionContent }, getters, rootState, rootGetters) => {
if (revisionContent) {
return revisionContent;
}
return state.itemMap[`${rootGetters['file/current'].id}/content`] || empty();
return itemMap[`${rootGetters['file/current'].id}/content`] || empty();
},
currentChangeTrigger: (state, getters) => {
const { current } = getters;
@ -45,11 +45,9 @@ module.getters = {
current.hash,
]);
},
currentProperties: (state, getters) => utils.computeProperties(getters.current.properties),
isCurrentEditable: (state, getters, rootState, rootGetters) =>
!state.revisionContent &&
getters.current.id &&
rootGetters['layout/styles'].showEditor,
currentProperties: (state, { current }) => utils.computeProperties(current.properties),
isCurrentEditable: ({ revisionContent }, { current }, rootState, rootGetters) =>
!revisionContent && current.id && rootGetters['layout/styles'].showEditor,
};
module.actions = {
@ -76,7 +74,7 @@ module.actions = {
});
}
},
restoreRevision({
async restoreRevision({
state,
getters,
commit,
@ -84,31 +82,29 @@ module.actions = {
}) {
const { revisionContent } = state;
if (revisionContent) {
dispatch('modal/fileRestoration', null, { root: true })
.then(() => {
// Close revision
commit('setRevisionContent');
const currentContent = utils.deepCopy(getters.current);
if (currentContent) {
// Restore text and move discussions
const diffs = diffMatchPatch
.diff_main(currentContent.text, revisionContent.originalText);
diffMatchPatch.diff_cleanupSemantic(diffs);
Object.entries(currentContent.discussions).forEach(([, discussion]) => {
const adjustOffset = (offsetName) => {
const marker = new cledit.Marker(discussion[offsetName], offsetName === 'end');
marker.adjustOffset(diffs);
discussion[offsetName] = marker.offset;
};
adjustOffset('start');
adjustOffset('end');
});
dispatch('patchCurrent', {
...currentContent,
text: revisionContent.originalText,
});
}
await dispatch('modal/fileRestoration', null, { root: true });
// Close revision
commit('setRevisionContent');
const currentContent = utils.deepCopy(getters.current);
if (currentContent) {
// Restore text and move discussions
const diffs = diffMatchPatch
.diff_main(currentContent.text, revisionContent.originalText);
diffMatchPatch.diff_cleanupSemantic(diffs);
Object.entries(currentContent.discussions).forEach(([, discussion]) => {
const adjustOffset = (offsetName) => {
const marker = new cledit.Marker(discussion[offsetName], offsetName === 'end');
marker.adjustOffset(diffs);
discussion[offsetName] = marker.offset;
};
adjustOffset('start');
adjustOffset('end');
});
dispatch('patchCurrent', {
...currentContent,
text: revisionContent.originalText,
});
}
}
},
};

View File

@ -5,8 +5,8 @@ const module = moduleTemplate(empty, true);
module.getters = {
...module.getters,
current: (state, getters, rootState, rootGetters) =>
state.itemMap[`${rootGetters['file/current'].id}/contentState`] || empty(),
current: ({ itemMap }, getters, rootState, rootGetters) =>
itemMap[`${rootGetters['file/current'].id}/contentState`] || empty(),
};
module.actions = {

View File

@ -104,7 +104,7 @@ export default {
lsItemMap: {},
},
mutations: {
setItem: (state, value) => {
setItem: ({ itemMap, lsItemMap }, value) => {
// Create an empty item and override its data field
const emptyItem = empty(value.id);
const data = typeof value.data === 'object'
@ -118,19 +118,19 @@ export default {
});
// Store item in itemMap or lsItemMap if its stored in the localStorage
Vue.set(lsItemIdSet.has(item.id) ? state.lsItemMap : state.itemMap, item.id, item);
Vue.set(lsItemIdSet.has(item.id) ? lsItemMap : itemMap, item.id, item);
},
deleteItem(state, id) {
deleteItem({ itemMap }, id) {
// Only used by localDbSvc to clean itemMap from object moved to localStorage
Vue.delete(state.itemMap, id);
Vue.delete(itemMap, id);
},
},
getters: {
workspaces: getter('workspaces'),
sanitizedWorkspaces: (state, getters, rootState, rootGetters) => {
sanitizedWorkspaces: (state, { workspaces }, rootState, rootGetters) => {
const sanitizedWorkspaces = {};
const mainWorkspaceToken = rootGetters['workspace/mainWorkspaceToken'];
Object.entries(getters.workspaces).forEach(([id, workspace]) => {
Object.entries(workspaces).forEach(([id, workspace]) => {
const sanitizedWorkspace = {
id,
providerId: mainWorkspaceToken && 'googleDriveAppData',
@ -146,9 +146,9 @@ export default {
return sanitizedWorkspaces;
},
settings: getter('settings'),
computedSettings: (state, getters) => {
const customSettings = yaml.safeLoad(getters.settings);
const settings = yaml.safeLoad(defaultSettings);
computedSettings: (state, { settings }) => {
const customSettings = yaml.safeLoad(settings);
const parsedSettings = yaml.safeLoad(defaultSettings);
const override = (obj, opt) => {
const objType = Object.prototype.toString.call(obj);
const optType = Object.prototype.toString.call(opt);
@ -166,44 +166,44 @@ export default {
});
return obj;
};
return override(settings, customSettings);
return override(parsedSettings, customSettings);
},
localSettings: getter('localSettings'),
layoutSettings: getter('layoutSettings'),
templates: getter('templates'),
allTemplates: (state, getters) => ({
...getters.templates,
allTemplates: (state, { templates }) => ({
...templates,
...additionalTemplates,
}),
lastCreated: getter('lastCreated'),
lastOpened: getter('lastOpened'),
lastOpenedIds: (state, getters, rootState) => {
const lastOpened = {
...getters.lastOpened,
lastOpenedIds: (state, { lastOpened }, rootState) => {
const result = {
...lastOpened,
};
const currentFileId = rootState.file.currentId;
if (currentFileId && !lastOpened[currentFileId]) {
lastOpened[currentFileId] = Date.now();
if (currentFileId && !result[currentFileId]) {
result[currentFileId] = Date.now();
}
return Object.keys(lastOpened)
return Object.keys(result)
.filter(id => rootState.file.itemMap[id])
.sort((id1, id2) => lastOpened[id2] - lastOpened[id1])
.sort((id1, id2) => result[id2] - result[id1])
.slice(0, 20);
},
syncData: getter('syncData'),
syncDataByItemId: (state, getters) => {
syncDataByItemId: (state, { syncData }) => {
const result = {};
Object.entries(getters.syncData).forEach(([, value]) => {
Object.entries(syncData).forEach(([, value]) => {
result[value.itemId] = value;
});
return result;
},
syncDataByType: (state, getters) => {
syncDataByType: (state, { syncData }) => {
const result = {};
utils.types.forEach((type) => {
result[type] = {};
});
Object.entries(getters.syncData).forEach(([, item]) => {
Object.entries(syncData).forEach(([, item]) => {
if (result[item.type]) {
result[item.type][item.itemId] = item;
}
@ -212,12 +212,12 @@ export default {
},
dataSyncData: getter('dataSyncData'),
tokens: getter('tokens'),
googleTokens: (state, getters) => getters.tokens.google || {},
couchdbTokens: (state, getters) => getters.tokens.couchdb || {},
dropboxTokens: (state, getters) => getters.tokens.dropbox || {},
githubTokens: (state, getters) => getters.tokens.github || {},
wordpressTokens: (state, getters) => getters.tokens.wordpress || {},
zendeskTokens: (state, getters) => getters.tokens.zendesk || {},
googleTokens: (state, { tokens }) => tokens.google || {},
couchdbTokens: (state, { tokens }) => tokens.couchdb || {},
dropboxTokens: (state, { tokens }) => tokens.dropbox || {},
githubTokens: (state, { tokens }) => tokens.github || {},
wordpressTokens: (state, { tokens }) => tokens.wordpress || {},
zendeskTokens: (state, { tokens }) => tokens.zendesk || {},
},
actions: {
setWorkspaces: setter('workspaces'),

View File

@ -59,8 +59,8 @@ export default {
},
},
getters: {
newDiscussion: state =>
state.currentDiscussionId === state.newDiscussionId && state.newDiscussion,
newDiscussion: ({ currentDiscussionId, newDiscussionId, newDiscussion }) =>
currentDiscussionId === newDiscussionId && newDiscussion,
currentFileDiscussionLastComments: (state, getters, rootState, rootGetters) => {
const { discussions, comments } = rootGetters['content/current'];
const discussionLastComments = {};
@ -74,14 +74,18 @@ export default {
});
return discussionLastComments;
},
currentFileDiscussions: (state, getters, rootState, rootGetters) => {
currentFileDiscussions: (
{ newDiscussionId },
{ newDiscussion, currentFileDiscussionLastComments },
rootState,
rootGetters,
) => {
const currentFileDiscussions = {};
const { newDiscussion } = getters;
if (newDiscussion) {
currentFileDiscussions[state.newDiscussionId] = newDiscussion;
currentFileDiscussions[newDiscussionId] = newDiscussion;
}
const { discussions } = rootGetters['content/current'];
Object.entries(getters.currentFileDiscussionLastComments)
Object.entries(currentFileDiscussionLastComments)
.sort(([, lastComment1], [, lastComment2]) =>
lastComment1.created - lastComment2.created)
.forEach(([discussionId]) => {
@ -89,17 +93,22 @@ export default {
});
return currentFileDiscussions;
},
currentDiscussion: (state, getters) =>
getters.currentFileDiscussions[state.currentDiscussionId],
currentDiscussion: ({ currentDiscussionId }, { currentFileDiscussions }) =>
currentFileDiscussions[currentDiscussionId],
previousDiscussionId: idShifter(-1),
nextDiscussionId: idShifter(1),
currentDiscussionComments: (state, getters, rootState, rootGetters) => {
currentDiscussionComments: (
{ currentDiscussionId },
{ currentDiscussion },
rootState,
rootGetters,
) => {
const comments = {};
if (getters.currentDiscussion) {
if (currentDiscussion) {
const contentComments = rootGetters['content/current'].comments;
Object.entries(contentComments)
.filter(([, comment]) =>
comment.discussionId === state.currentDiscussionId)
comment.discussionId === currentDiscussionId)
.sort(([, comment1], [, comment2]) =>
comment1.created - comment2.created)
.forEach(([commentId, comment]) => {
@ -108,10 +117,12 @@ export default {
}
return comments;
},
currentDiscussionLastCommentId: (state, getters) =>
Object.keys(getters.currentDiscussionComments).pop(),
currentDiscussionLastComment: (state, getters) =>
getters.currentDiscussionComments[getters.currentDiscussionLastCommentId],
currentDiscussionLastCommentId: (state, { currentDiscussionComments }) =>
Object.keys(currentDiscussionComments).pop(),
currentDiscussionLastComment: (
state,
{ currentDiscussionComments, currentDiscussionLastCommentId },
) => currentDiscussionComments[currentDiscussionLastCommentId],
},
actions: {
cancelNewComment({ commit, getters }) {
@ -120,15 +131,15 @@ export default {
commit('setCurrentDiscussionId', getters.nextDiscussionId);
}
},
createNewDiscussion({ commit, dispatch, rootGetters }, selection) {
async createNewDiscussion({ commit, dispatch, rootGetters }, selection) {
const loginToken = rootGetters['workspace/loginToken'];
if (!loginToken) {
dispatch('modal/signInForComment', {
onResolve: () => googleHelper.signin()
.then(() => syncSvc.requestSync())
.then(() => dispatch('createNewDiscussion', selection)),
}, { root: true })
.catch(() => { /* Cancel */ });
try {
await dispatch('modal/signInForComment', null, { root: true });
await googleHelper.signin();
syncSvc.requestSync();
await dispatch('createNewDiscussion', selection);
} catch (e) { /* cancel */ }
} else if (selection) {
let text = rootGetters['content/current'].text.slice(selection.start, selection.end).trim();
const maxLength = 80;

View File

@ -44,11 +44,11 @@ const fakeFileNode = new Node(emptyFile());
fakeFileNode.item.id = 'fake';
fakeFileNode.noDrag = true;
function getParent(node, getters) {
if (node.isNil) {
function getParent({ item, isNil }, { nodeMap, rootNode }) {
if (isNil) {
return nilFileNode;
}
return getters.nodeMap[node.item.parentId] || getters.rootNode;
return nodeMap[item.parentId] || rootNode;
}
function getFolder(node, getters) {
@ -67,6 +67,21 @@ export default {
newChildNode: nilFileNode,
openNodes: {},
},
mutations: {
setSelectedId: setter('selectedId'),
setEditingId: setter('editingId'),
setDragSourceId: setter('dragSourceId'),
setDragTargetId: setter('dragTargetId'),
setNewItem(state, item) {
state.newChildNode = item ? new Node(item, [], item.type === 'folder') : nilFileNode;
},
setNewItemName(state, name) {
state.newChildNode.item.name = name;
},
toggleOpenNode(state, id) {
Vue.set(state.openNodes, id, !state.openNodes[id]);
},
},
getters: {
nodeStructure: (state, getters, rootState, rootGetters) => {
const rootNode = new Node(emptyFolder(), [], true, true);
@ -138,41 +153,26 @@ export default {
rootNode,
};
},
nodeMap: (state, getters) => getters.nodeStructure.nodeMap,
rootNode: (state, getters) => getters.nodeStructure.rootNode,
nodeMap: (state, { nodeStructure }) => nodeStructure.nodeMap,
rootNode: (state, { nodeStructure }) => nodeStructure.rootNode,
newChildNodeParent: (state, getters) => getParent(state.newChildNode, getters),
selectedNode: (state, getters) => getters.nodeMap[state.selectedId] || nilFileNode,
selectedNode: ({ selectedId }, { nodeMap }) => nodeMap[selectedId] || nilFileNode,
selectedNodeFolder: (state, getters) => getFolder(getters.selectedNode, getters),
editingNode: (state, getters) => getters.nodeMap[state.editingId] || nilFileNode,
dragSourceNode: (state, getters) => getters.nodeMap[state.dragSourceId] || nilFileNode,
dragTargetNode: (state, getters) => {
if (state.dragTargetId === 'fake') {
editingNode: ({ editingId }, { nodeMap }) => nodeMap[editingId] || nilFileNode,
dragSourceNode: ({ dragSourceId }, { nodeMap }) => nodeMap[dragSourceId] || nilFileNode,
dragTargetNode: ({ dragTargetId }, { nodeMap }) => {
if (dragTargetId === 'fake') {
return fakeFileNode;
}
return getters.nodeMap[state.dragTargetId] || nilFileNode;
return nodeMap[dragTargetId] || nilFileNode;
},
dragTargetNodeFolder: (state, getters) => {
if (state.dragTargetId === 'fake') {
dragTargetNodeFolder: ({ dragTargetId }, getters) => {
if (dragTargetId === 'fake') {
return getters.rootNode;
}
return getFolder(getters.dragTargetNode, getters);
},
},
mutations: {
setSelectedId: setter('selectedId'),
setEditingId: setter('editingId'),
setDragSourceId: setter('dragSourceId'),
setDragTargetId: setter('dragTargetId'),
setNewItem(state, item) {
state.newChildNode = item ? new Node(item, [], item.type === 'folder') : nilFileNode;
},
setNewItemName(state, name) {
state.newChildNode.item.name = name;
},
toggleOpenNode(state, id) {
Vue.set(state.openNodes, id, !state.openNodes[id]);
},
},
actions: {
openNode({
state,

View File

@ -10,10 +10,10 @@ module.state = {
module.getters = {
...module.getters,
current: state => state.itemMap[state.currentId] || empty(),
isCurrentTemp: (state, getters) => getters.current.parentId === 'temp',
lastOpened: (state, getters, rootState, rootGetters) =>
state.itemMap[rootGetters['data/lastOpenedIds'][0]] || getters.items[0] || empty(),
current: ({ itemMap, currentId }) => itemMap[currentId] || empty(),
isCurrentTemp: (state, { current }) => current.parentId === 'temp',
lastOpened: ({ itemMap }, { items }, rootState, rootGetters) =>
itemMap[rootGetters['data/lastOpenedIds'][0]] || items[0] || empty(),
};
module.mutations = {

View File

@ -54,13 +54,33 @@ const store = new Vuex.Store({
minuteCounter: 0,
monetizeSponsor: false,
},
mutations: {
setLight: (state, value) => {
state.light = value;
},
setOffline: (state, value) => {
state.offline = value;
},
updateLastOfflineCheck: (state) => {
state.lastOfflineCheck = Date.now();
},
updateMinuteCounter: (state) => {
state.minuteCounter += 1;
},
setMonetizeSponsor: (state, value) => {
state.monetizeSponsor = value;
},
setGoogleSponsor: (state, value) => {
state.googleSponsor = value;
},
},
getters: {
allItemMap: (state) => {
const result = {};
utils.types.forEach(type => Object.assign(result, state[type].itemMap));
return result;
},
itemPaths: (state) => {
itemPaths: (state, getters) => {
const result = {};
const folderMap = state.folder.itemMap;
const getPath = (item) => {
@ -84,8 +104,10 @@ const store = new Vuex.Store({
result[item.id] = itemPath;
return itemPath;
};
[...state.folder.items, ...state.file.items].forEach(item => getPath(item));
[
...getters['folder/items'],
...getters['file/items'],
].forEach(item => getPath(item));
return result;
},
pathItems: (state, { allItemMap, itemPaths }) => {
@ -97,29 +119,9 @@ const store = new Vuex.Store({
});
return result;
},
isSponsor: (state, getters) => {
isSponsor: ({ light, monetizeSponsor }, getters) => {
const sponsorToken = getters['workspace/sponsorToken'];
return state.light || state.monetizeSponsor || (sponsorToken && sponsorToken.isSponsor);
},
},
mutations: {
setLight: (state, value) => {
state.light = value;
},
setOffline: (state, value) => {
state.offline = value;
},
updateLastOfflineCheck: (state) => {
state.lastOfflineCheck = Date.now();
},
updateMinuteCounter: (state) => {
state.minuteCounter += 1;
},
setMonetizeSponsor: (state, value) => {
state.monetizeSponsor = value;
},
setGoogleSponsor: (state, value) => {
state.googleSponsor = value;
return light || monetizeSponsor || (sponsorToken && sponsorToken.isSponsor);
},
},
actions: {

View File

@ -12,22 +12,22 @@ export default (empty) => {
module.getters = {
...module.getters,
groupedByFileId: (state, getters) => {
groupedByFileId: (state, { items }) => {
const groups = {};
getters.items.forEach(item => addToGroup(groups, item));
items.forEach(item => addToGroup(groups, item));
return groups;
},
filteredGroupedByFileId: (state, getters) => {
filteredGroupedByFileId: (state, { items }) => {
const groups = {};
getters.items.filter((item) => {
items.filter((item) => {
// Filter items that we can't use
const provider = providerRegistry.providers[item.providerId];
return provider && provider.getToken(item);
}).forEach(item => addToGroup(groups, item));
return groups;
},
current: (state, getters, rootState, rootGetters) => {
const locations = getters.filteredGroupedByFileId[rootGetters['file/current'].id] || [];
current: (state, { filteredGroupedByFileId }, rootState, rootGetters) => {
const locations = filteredGroupedByFileId[rootGetters['file/current'].id] || [];
return locations.map((location) => {
const provider = providerRegistry.providers[location.providerId];
return {

View File

@ -13,39 +13,28 @@ export default {
},
},
getters: {
config: state => !state.hidden && state.stack[0],
config: ({ hidden, stack }) => !hidden && stack[0],
},
actions: {
open({ commit, state }, param) {
return new Promise((resolve, reject) => {
const config = typeof param === 'object' ? { ...param } : { type: param };
const clean = () => commit('setStack', state.stack.filter((otherConfig => otherConfig !== config)));
config.resolve = (result) => {
clean();
if (config.onResolve) {
// Call onResolve immediately (mostly to prevent browsers from blocking popup windows)
config.onResolve(result)
.then(res => resolve(res));
} else {
resolve(result);
}
};
config.reject = (error) => {
clean();
reject(error);
};
commit('setStack', [config, ...state.stack]);
});
async open({ commit, state }, param) {
const config = typeof param === 'object' ? { ...param } : { type: param };
try {
return await new Promise((resolve, reject) => {
config.resolve = resolve;
config.reject = reject;
commit('setStack', [config, ...state.stack]);
});
} finally {
commit('setStack', state.stack.filter((otherConfig => otherConfig !== config)));
}
},
hideUntil({ commit }, promise) {
commit('setHidden', true);
return promise.then((res) => {
async hideUntil({ commit }, promise) {
try {
commit('setHidden', true);
return await promise;
} finally {
commit('setHidden', false);
return res;
}, (err) => {
commit('setHidden', false);
throw err;
});
}
},
folderDeletion: ({ dispatch }, item) => dispatch('open', {
content: `<p>You are about to delete the folder <b>${item.name}</b>. Its files will be moved to Trash. Are you sure?</p>`,
@ -105,39 +94,34 @@ export default {
resolveText: 'Yes, clean',
rejectText: 'No',
}),
providerRedirection: ({ dispatch }, { providerName, onResolve }) => dispatch('open', {
providerRedirection: ({ dispatch }, { providerName }) => dispatch('open', {
content: `<p>You are about to navigate to the <b>${providerName}</b> authorization page.</p>`,
resolveText: 'Ok, go on',
rejectText: 'Cancel',
onResolve,
}),
workspaceGoogleRedirection: ({ dispatch }, { onResolve }) => dispatch('open', {
workspaceGoogleRedirection: ({ dispatch }) => dispatch('open', {
content: '<p>StackEdit needs full Google Drive access to open this workspace.</p>',
resolveText: 'Ok, grant',
rejectText: 'Cancel',
onResolve,
}),
signInForSponsorship: ({ dispatch }, { onResolve }) => dispatch('open', {
signInForSponsorship: ({ dispatch }) => dispatch('open', {
type: 'signInForSponsorship',
content: `<p>You have to sign in with Google to sponsor.</p>
<div class="modal__info"><b>Note:</b> This will sync your main workspace.</div>`,
resolveText: 'Ok, sign in',
rejectText: 'Cancel',
onResolve,
}),
signInForComment: ({ dispatch }, { onResolve }) => dispatch('open', {
signInForComment: ({ dispatch }) => dispatch('open', {
content: `<p>You have to sign in with Google to start commenting.</p>
<div class="modal__info"><b>Note:</b> This will sync your main workspace.</div>`,
resolveText: 'Ok, sign in',
rejectText: 'Cancel',
onResolve,
}),
signInForHistory: ({ dispatch }, { onResolve }) => dispatch('open', {
signInForHistory: ({ dispatch }) => dispatch('open', {
content: `<p>You have to sign in with Google to enable revision history.</p>
<div class="modal__info"><b>Note:</b> This will sync your main workspace.</div>`,
resolveText: 'Ok, sign in',
rejectText: 'Cancel',
onResolve,
}),
sponsorOnly: ({ dispatch }) => dispatch('open', {
content: '<p>This feature is restricted to sponsors as it relies on server resources.</p>',

View File

@ -11,7 +11,7 @@ export default (empty, simpleHash = false) => {
itemMap: {},
},
getters: {
items: state => Object.values(state.itemMap),
items: ({ itemMap }) => Object.values(itemMap),
},
mutations: {
setItem(state, value) {

View File

@ -71,16 +71,13 @@ export default {
}));
}
},
doWithLocation({ commit }, { location, promise }) {
commit('setCurrentLocation', location);
return promise
.then((res) => {
commit('setCurrentLocation', {});
return res;
}, (err) => {
commit('setCurrentLocation', {});
throw err;
});
async doWithLocation({ commit }, { location, action }) {
try {
commit('setCurrentLocation', location);
return await action();
} finally {
commit('setCurrentLocation', {});
}
},
},
};

View File

@ -5,8 +5,8 @@ const module = moduleTemplate(empty, true);
module.getters = {
...module.getters,
current: (state, getters, rootState, rootGetters) =>
state.itemMap[`${rootGetters['file/current'].id}/syncedContent`] || empty(),
current: ({ itemMap }, getters, rootState, rootGetters) =>
itemMap[`${rootGetters['file/current'].id}/syncedContent`] || empty(),
};
export default module;

View File

@ -6,8 +6,8 @@ export default {
itemMap: {},
},
mutations: {
addItem: (state, item) => {
Vue.set(state.itemMap, item.id, item);
addItem: ({ itemMap }, item) => {
Vue.set(itemMap, item.id, item);
},
},
};

View File

@ -19,54 +19,50 @@ export default {
const workspaces = rootGetters['data/sanitizedWorkspaces'];
return workspaces.main;
},
currentWorkspace: (state, getters, rootState, rootGetters) => {
currentWorkspace: ({ currentWorkspaceId }, { mainWorkspace }, rootState, rootGetters) => {
const workspaces = rootGetters['data/sanitizedWorkspaces'];
return workspaces[state.currentWorkspaceId] || getters.mainWorkspace;
return workspaces[currentWorkspaceId] || mainWorkspace;
},
hasUniquePaths: (state, getters) => {
const workspace = getters.currentWorkspace;
return workspace.providerId === 'githubWorkspace';
},
lastSyncActivityKey: (state, getters) => `${getters.currentWorkspace.id}/lastSyncActivity`,
lastFocusKey: (state, getters) => `${getters.currentWorkspace.id}/lastWindowFocus`,
hasUniquePaths: (state, { currentWorkspace }) =>
currentWorkspace.providerId === 'githubWorkspace',
lastSyncActivityKey: (state, { currentWorkspace }) => `${currentWorkspace.id}/lastSyncActivity`,
lastFocusKey: (state, { currentWorkspace }) => `${currentWorkspace.id}/lastWindowFocus`,
mainWorkspaceToken: (state, getters, rootState, rootGetters) => {
const googleTokens = rootGetters['data/googleTokens'];
const loginSubs = Object.keys(googleTokens)
.filter(sub => googleTokens[sub].isLogin);
return googleTokens[loginSubs[0]];
},
syncToken: (state, getters, rootState, rootGetters) => {
const workspace = getters.currentWorkspace;
switch (workspace.providerId) {
syncToken: (state, { currentWorkspace, mainWorkspaceToken }, rootState, rootGetters) => {
switch (currentWorkspace.providerId) {
case 'googleDriveWorkspace': {
const googleTokens = rootGetters['data/googleTokens'];
return googleTokens[workspace.sub];
return googleTokens[currentWorkspace.sub];
}
case 'githubWorkspace': {
const githubTokens = rootGetters['data/githubTokens'];
return githubTokens[workspace.sub];
return githubTokens[currentWorkspace.sub];
}
case 'couchdbWorkspace': {
const couchdbTokens = rootGetters['data/couchdbTokens'];
return couchdbTokens[workspace.id];
return couchdbTokens[currentWorkspace.id];
}
default:
return getters.mainWorkspaceToken;
return mainWorkspaceToken;
}
},
loginToken: (state, getters, rootState, rootGetters) => {
const workspace = getters.currentWorkspace;
switch (workspace.providerId) {
loginToken: (state, { currentWorkspace, mainWorkspaceToken }, rootState, rootGetters) => {
switch (currentWorkspace.providerId) {
case 'googleDriveWorkspace': {
const googleTokens = rootGetters['data/googleTokens'];
return googleTokens[workspace.sub];
return googleTokens[currentWorkspace.sub];
}
case 'githubWorkspace': {
const githubTokens = rootGetters['data/githubTokens'];
return githubTokens[workspace.sub];
return githubTokens[currentWorkspace.sub];
}
default:
return getters.mainWorkspaceToken;
return mainWorkspaceToken;
}
},
userId: (state, { loginToken }, rootState, rootGetters) => {
@ -82,7 +78,7 @@ export default {
});
return prefix ? `${prefix}:${loginToken.sub}` : loginToken.sub;
},
sponsorToken: (state, getters) => getters.mainWorkspaceToken,
sponsorToken: (state, { mainWorkspaceToken }) => mainWorkspaceToken,
},
actions: {
setCurrentWorkspaceId: ({ commit, getters }, value) => {