store renamings

This commit is contained in:
Benoit Schweblin 2018-06-21 20:16:33 +01:00
parent e05e7717eb
commit 7a87015af1
63 changed files with 672 additions and 594 deletions

View File

@ -1,15 +1,15 @@
<template>
<div class="explorer-node" :class="{'explorer-node--selected': isSelected, 'explorer-node--open': isOpen, 'explorer-node--drag-target': isDragTargetFolder}" @dragover.prevent @dragenter.stop="node.noDrop || setDragTarget(node.item.id)" @dragleave.stop="isDragTarget && setDragTargetId()" @drop.prevent.stop="onDrop" @contextmenu="onContextMenu">
<div class="explorer-node__item-editor" v-if="isEditing" :class="['explorer-node__item-editor--' + node.item.type]" :style="{paddingLeft: leftPadding}" draggable="true" @dragstart.stop.prevent>
<div class="explorer-node" :class="{'explorer-node--selected': isSelected, 'explorer-node--folder': node.isFolder, 'explorer-node--open': isOpen, 'explorer-node--trash': node.isTrash, 'explorer-node--temp': node.isTemp, 'explorer-node--drag-target': isDragTargetFolder}" @dragover.prevent @dragenter.stop="node.noDrop || setDragTarget(node.item.id)" @dragleave.stop="isDragTarget && setDragTargetId()" @drop.prevent.stop="onDrop" @contextmenu="onContextMenu">
<div class="explorer-node__item-editor" v-if="isEditing" :style="{paddingLeft: leftPadding}" draggable="true" @dragstart.stop.prevent>
<input type="text" class="text-input" v-focus @blur="submitEdit()" @keydown.stop @keydown.enter="submitEdit()" @keydown.esc="submitEdit(true)" v-model="editingNodeName">
</div>
<div class="explorer-node__item" v-else :class="['explorer-node__item--' + node.item.type]" :style="{paddingLeft: leftPadding}" @click="select()" draggable="true" @dragstart.stop="setDragSourceId" @dragend.stop="setDragTargetId()">
<div class="explorer-node__item" v-else :style="{paddingLeft: leftPadding}" @click="select()" draggable="true" @dragstart.stop="setDragSourceId" @dragend.stop="setDragTargetId()">
{{node.item.name}}
<icon-provider class="explorer-node__location" v-for="location in node.locations" :key="location.id" :provider-id="location.providerId"></icon-provider>
</div>
<div class="explorer-node__children" v-if="node.isFolder && isOpen">
<explorer-node v-for="node in node.folders" :key="node.item.id" :node="node" :depth="depth + 1"></explorer-node>
<div v-if="newChild" class="explorer-node__new-child" :class="['explorer-node__new-child--' + newChild.item.type]" :style="{paddingLeft: childLeftPadding}">
<div v-if="newChild" class="explorer-node__new-child" :class="{'explorer-node__new-child--folder': newChild.isFolder}" :style="{paddingLeft: childLeftPadding}">
<input type="text" class="text-input" v-focus @blur="submitNewChild()" @keydown.stop @keydown.enter="submitNewChild()" @keydown.esc="submitNewChild(true)" v-model.trim="newChildName">
</div>
<explorer-node v-for="node in node.files" :key="node.item.id" :node="node" :depth="depth + 1"></explorer-node>
@ -227,17 +227,25 @@ $item-font-size: 14px;
}
}
.explorer-node__item--folder,
.explorer-node__item-editor--folder,
.explorer-node--trash,
.explorer-node--temp {
color: rgba(0, 0, 0, 0.5);
}
.explorer-node--folder > .explorer-node__item,
.explorer-node--folder > .explorer-node__item-editor,
.explorer-node__new-child--folder {
&::before {
content: '▹';
position: absolute;
margin-left: -13px;
}
}
.explorer-node--open > & {
content: '▾';
}
.explorer-node--folder.explorer-node--open > .explorer-node__item,
.explorer-node--folder.explorer-node--open > .explorer-node__item-editor {
&::before {
content: '▾';
}
}

View File

@ -1,5 +1,5 @@
<template>
<div class="modal" v-if="config" @keydown.esc="onEscape" @keydown.tab="onTab">
<div class="modal" v-if="config" @keydown.esc="onEscape" @keydown.tab="onTab" @focusin="onFocusInOut" @focusout="onFocusInOut">
<component v-if="currentModalComponent" :is="currentModalComponent"></component>
<modal-inner v-else aria-label="Dialog">
<div class="modal__content" v-html="simpleModal.contentHtml(config)"></div>
@ -138,8 +138,8 @@ export default {
const isFocusIn = evt.type === 'focusin';
if (evt.target.parentNode && evt.target.parentNode.parentNode) {
// Focus effect
if (evt.target.parentNode.classList.contains('form-entry__field') &&
evt.target.parentNode.parentNode.classList.contains('form-entry')) {
if (evt.target.parentNode.classList.contains('form-entry__field')
&& evt.target.parentNode.parentNode.classList.contains('form-entry')) {
evt.target.parentNode.parentNode.classList.toggle('form-entry--focused', isFocusIn);
}
}
@ -159,19 +159,15 @@ export default {
mounted() {
this.$watch(
() => this.config,
() => {
if (this.$el) {
window.addEventListener('focusin', this.onFocusInOut);
window.addEventListener('focusout', this.onFocusInOut);
(isOpen) => {
if (isOpen) {
const tabbables = getTabbables(this.$el);
if (tabbables[0]) {
tabbables[0].focus();
}
} else {
window.removeEventListener('focusin', this.onFocusInOut);
window.removeEventListener('focusout', this.onFocusInOut);
}
},
{ immediate: true },
);
},
};

View File

@ -10,7 +10,7 @@ export default {
props: ['userId'],
computed: {
url() {
const userInfo = this.$store.state.userInfo.itemMap[this.userId];
const userInfo = this.$store.state.userInfo.itemsById[this.userId];
return userInfo && userInfo.imageUrl && `url('${userInfo.imageUrl}')`;
},
},

View File

@ -9,7 +9,7 @@ export default {
props: ['userId'],
computed: {
name() {
const userInfo = this.$store.state.userInfo.itemMap[this.userId];
const userInfo = this.$store.state.userInfo.itemsById[this.userId];
return userInfo ? userInfo.name : 'Someone';
},
},

View File

@ -89,7 +89,7 @@ export default {
async templates() {
try {
const { templates } = await this.$store.dispatch('modal/open', 'templates');
this.$store.dispatch('data/setTemplates', templates);
this.$store.dispatch('data/setTemplatesById', templates);
} catch (e) {
// Cancel
}

View File

@ -146,22 +146,22 @@ export default {
return this.$store.getters['file/current'].name;
},
googleDriveTokens() {
return tokensToArray(this.$store.getters['data/googleTokens'], token => token.isDrive);
return tokensToArray(this.$store.getters['data/googleTokensBySub'], token => token.isDrive);
},
dropboxTokens() {
return tokensToArray(this.$store.getters['data/dropboxTokens']);
return tokensToArray(this.$store.getters['data/dropboxTokensBySub']);
},
githubTokens() {
return tokensToArray(this.$store.getters['data/githubTokens']);
return tokensToArray(this.$store.getters['data/githubTokensBySub']);
},
wordpressTokens() {
return tokensToArray(this.$store.getters['data/wordpressTokens']);
return tokensToArray(this.$store.getters['data/wordpressTokensBySub']);
},
bloggerTokens() {
return tokensToArray(this.$store.getters['data/googleTokens'], token => token.isBlogger);
return tokensToArray(this.$store.getters['data/googleTokensBySub'], token => token.isBlogger);
},
zendeskTokens() {
return tokensToArray(this.$store.getters['data/zendeskTokens']);
return tokensToArray(this.$store.getters['data/zendeskTokensBySub']);
},
noToken() {
return !this.googleDriveTokens.length

View File

@ -122,13 +122,13 @@ export default {
return this.$store.getters['file/current'].name;
},
googleDriveTokens() {
return tokensToArray(this.$store.getters['data/googleTokens'], token => token.isDrive);
return tokensToArray(this.$store.getters['data/googleTokensBySub'], token => token.isDrive);
},
dropboxTokens() {
return tokensToArray(this.$store.getters['data/dropboxTokens']);
return tokensToArray(this.$store.getters['data/dropboxTokensBySub']);
},
githubTokens() {
return tokensToArray(this.$store.getters['data/githubTokens']);
return tokensToArray(this.$store.getters['data/githubTokensBySub']);
},
noToken() {
return !this.googleDriveTokens.length

View File

@ -1,6 +1,6 @@
<template>
<div class="side-bar__panel side-bar__panel--menu">
<div class="workspace" v-for="(workspace, id) in sanitizedWorkspaces" :key="id">
<div class="workspace" v-for="(workspace, id) in sanitizedWorkspacesById" :key="id">
<menu-entry :href="workspace.url" target="_blank">
<icon-provider slot="icon" :provider-id="workspace.providerId"></icon-provider>
<div class="workspace__name"><div class="menu-entry__label" v-if="currentWorkspace === workspace">current</div>{{workspace.name}}</div>
@ -37,7 +37,7 @@ export default {
},
computed: {
...mapGetters('data', [
'sanitizedWorkspaces',
'sanitizedWorkspaceById',
]),
...mapGetters('workspace', [
'currentWorkspace',

View File

@ -4,7 +4,7 @@
<p>Please choose a template for your <b>HTML export</b>.</p>
<form-entry label="Template">
<select class="textfield" slot="field" v-model="selectedTemplate" @keydown.enter="resolve()">
<option v-for="(template, id) in allTemplates" :key="id" :value="id">
<option v-for="(template, id) in allTemplatesById" :key="id" :value="id">
{{ template.name }}
</option>
</select>
@ -41,7 +41,7 @@ export default modalTemplate({
const currentFile = this.$store.getters['file/current'];
const html = await exportSvc.applyTemplate(
currentFile.id,
this.allTemplates[selectedTemplate],
this.allTemplatesById[selectedTemplate],
);
this.result = html;
}, 10);
@ -60,7 +60,7 @@ export default modalTemplate({
const { config } = this;
const currentFile = this.$store.getters['file/current'];
config.resolve();
exportSvc.exportToDisk(currentFile.id, 'html', this.allTemplates[this.selectedTemplate]);
exportSvc.exportToDisk(currentFile.id, 'html', this.allTemplatesById[this.selectedTemplate]);
},
},
});

View File

@ -36,7 +36,7 @@ export default modalTemplate({
}),
computed: {
googlePhotosTokens() {
const googleTokens = this.$store.getters['data/googleTokens'];
const googleTokens = this.$store.getters['data/googleTokensBySub'];
return Object.entries(googleTokens)
.map(([, token]) => token)
.filter(token => token.isPhotos)

View File

@ -4,7 +4,7 @@
<p>Please choose a template for your <b>PDF export</b>.</p>
<form-entry label="Template">
<select class="textfield" slot="field" v-model="selectedTemplate" @keydown.enter="resolve()">
<option v-for="(template, id) in allTemplates" :key="id" :value="id">
<option v-for="(template, id) in allTemplatesById" :key="id" :value="id">
{{ template.name }}
</option>
</select>
@ -45,7 +45,7 @@ export default modalTemplate({
sponsorSvc.getToken(),
exportSvc.applyTemplate(
currentFile.id,
this.allTemplates[this.selectedTemplate],
this.allTemplatesById[this.selectedTemplate],
true,
),
]));

View File

@ -91,11 +91,11 @@ export default {
},
created() {
this.$watch(
() => this.$store.getters['data/allTemplates'],
(allTemplates) => {
() => this.$store.getters['data/allTemplatesById'],
(allTemplatesById) => {
const templates = {};
// Sort templates by name
Object.entries(allTemplates)
Object.entries(allTemplatesById)
.sort(([, template1], [, template2]) => collator.compare(template1.name, template2.name))
.forEach(([id, template]) => {
const templateClone = utils.deepCopy(template);

View File

@ -1,7 +1,7 @@
<template>
<modal-inner class="modal__inner-1--workspace-management" aria-label="Manage workspaces">
<div class="modal__content">
<div class="workspace-entry flex flex--row flex--align-center" v-for="(workspace, id) in sanitizedWorkspaces" :key="id">
<div class="workspace-entry flex flex--row flex--align-center" v-for="(workspace, id) in sanitizedWorkspacesById" :key="id">
<div class="workspace-entry__icon flex flex--column flex--center">
<icon-provider :provider-id="workspace.providerId"></icon-provider>
</div>
@ -48,8 +48,8 @@ export default {
'config',
]),
...mapGetters('data', [
'workspaces',
'sanitizedWorkspaces',
'workspacesById',
'sanitizedWorkspacesById',
]),
...mapGetters('workspace', [
'mainWorkspace',
@ -59,12 +59,12 @@ export default {
methods: {
edit(id) {
this.editedId = id;
this.editingName = this.workspaces[id].name;
this.editingName = this.workspacesById[id].name;
},
submitEdit(cancel) {
const workspace = this.workspaces[this.editedId];
const workspace = this.workspacesById[this.editedId];
if (workspace && !cancel && this.editingName) {
this.$store.dispatch('data/patchWorkspaces', {
this.$store.dispatch('data/patchWorkspacesById', {
[this.editedId]: {
...workspace,
name: this.editingName,

View File

@ -5,7 +5,7 @@
<icon-close></icon-close>
</button>
<div class="modal__sponsor-button" v-if="showSponsorButton">
StackEdit is <a class="not-tabbable" target="_blank" href="https://github.com/benweet/stackedit/">open source</a>. Please consider
StackEdit is <a class="not-tabbable" target="_blank" href="https://github.com/benweet/stackedit/">open source</a>, please consider
<a class="not-tabbable" href="javascript:void(0)" @click="sponsor">sponsoring</a> for just $5.
</div>
<slot></slot>
@ -51,11 +51,11 @@ export default {
.modal__close-button {
position: absolute;
top: 8px;
right: 8px;
top: 7px;
right: 7px;
color: rgba(0, 0, 0, 0.5);
width: 30px;
height: 30px;
width: 28px;
height: 28px;
padding: 2px;
&:active,

View File

@ -52,15 +52,15 @@ export default (desc) => {
},
};
if (key === 'selectedTemplate') {
component.computed.allTemplates = () => {
const allTemplates = store.getters['data/allTemplates'];
const sortedTemplates = {};
Object.entries(allTemplates)
component.computed.allTemplatesById = () => {
const allTemplatesById = store.getters['data/allTemplatesById'];
const sortedTemplatesById = {};
Object.entries(allTemplatesById)
.sort(([, template1], [, template2]) => collator.compare(template1.name, template2.name))
.forEach(([templateId, template]) => {
sortedTemplates[templateId] = template;
sortedTemplatesById[templateId] = template;
});
return sortedTemplates;
return sortedTemplatesById;
};
// Make use of `function` to have `this` bound to the component
component.methods.configureTemplates = async function () { // eslint-disable-line func-names
@ -68,7 +68,7 @@ export default (desc) => {
type: 'templates',
selectedId: this.selectedTemplate,
});
store.dispatch('data/setTemplates', templates);
store.dispatch('data/setTemplatesById', templates);
store.dispatch('data/patchLocalSettings', {
[id]: selectedId,
});

View File

@ -16,7 +16,7 @@
</form-entry>
<form-entry label="Template">
<select slot="field" class="textfield" v-model="selectedTemplate" @keydown.enter="resolve()">
<option v-for="(template, id) in allTemplates" :key="id" :value="id">
<option v-for="(template, id) in allTemplatesById" :key="id" :value="id">
{{ template.name }}
</option>
</select>

View File

@ -16,7 +16,7 @@
</form-entry>
<form-entry label="Template">
<select slot="field" class="textfield" v-model="selectedTemplate" @keydown.enter="resolve()">
<option v-for="(template, id) in allTemplates" :key="id" :value="id">
<option v-for="(template, id) in allTemplatesById" :key="id" :value="id">
{{ template.name }}
</option>
</select>

View File

@ -45,7 +45,7 @@ export default modalTemplate({
name: this.name,
password: this.password,
};
this.$store.dispatch('data/setCouchdbToken', token);
this.$store.dispatch('data/addCouchdbToken', token);
this.config.resolve();
}
},

View File

@ -14,7 +14,7 @@
</form-entry>
<form-entry label="Template">
<select slot="field" class="textfield" v-model="selectedTemplate" @keydown.enter="resolve()">
<option v-for="(template, id) in allTemplates" :key="id" :value="id">
<option v-for="(template, id) in allTemplatesById" :key="id" :value="id">
{{ template.name }}
</option>
</select>

View File

@ -23,7 +23,7 @@
</form-entry>
<form-entry label="Template">
<select slot="field" class="textfield" v-model="selectedTemplate" @keydown.enter="resolve()">
<option v-for="(template, id) in allTemplates" :key="id" :value="id">
<option v-for="(template, id) in allTemplatesById" :key="id" :value="id">
{{ template.name }}
</option>
</select>

View File

@ -8,7 +8,7 @@
<div class="form-entry">
<div class="form-entry__checkbox">
<label>
<input type="checkbox" v-model="repoFullAccess"> Grant access to my <b>private repositories</b>
<input type="checkbox" v-model="repoFullAccess"> Grant access to your private repositories
</label>
</div>
</div>

View File

@ -26,7 +26,7 @@
</form-entry>
<form-entry label="Template">
<select slot="field" class="textfield" v-model="selectedTemplate" @keydown.enter="resolve()">
<option v-for="(template, id) in allTemplates" :key="id" :value="id">
<option v-for="(template, id) in allTemplatesById" :key="id" :value="id">
{{ template.name }}
</option>
</select>

View File

@ -34,7 +34,7 @@
</div>
<form-entry label="Template">
<select slot="field" class="textfield" v-model="selectedTemplate" @keydown.enter="resolve()">
<option v-for="(template, id) in allTemplates" :key="id" :value="id">
<option v-for="(template, id) in allTemplatesById" :key="id" :value="id">
{{ template.name }}
</option>
</select>

View File

@ -17,7 +17,7 @@
</form-entry>
<form-entry label="Template">
<select slot="field" class="textfield" v-model="selectedTemplate" @keydown.enter="resolve()">
<option v-for="(template, id) in allTemplates" :key="id" :value="id">
<option v-for="(template, id) in allTemplatesById" :key="id" :value="id">
{{ template.name }}
</option>
</select>

View File

@ -22,7 +22,7 @@
</form-entry>
<form-entry label="Template">
<select slot="field" class="textfield" v-model="selectedTemplate" @keydown.enter="resolve()">
<option v-for="(template, id) in allTemplates" :key="id" :value="id">
<option v-for="(template, id) in allTemplatesById" :key="id" :value="id">
{{ template.name }}
</option>
</select>

View File

@ -1,6 +1,6 @@
export default () => ({
main: {
name: 'Main workspace',
// The rest will be filled by the data/sanitizedWorkspaces getter
// The rest will be filled by the data/sanitizedWorkspacesById getter
},
});

View File

@ -82,7 +82,7 @@ export default {
if (doClose) {
// Close the current file by opening the last opened, not deleted one
store.getters['data/lastOpenedIds'].some((id) => {
const file = store.state.file.itemMap[id];
const file = store.state.file.itemsById[id];
if (file.parentId === 'trash') {
return false;
}

View File

@ -46,7 +46,7 @@ export default {
value: '{{{files.0.content.text}}}',
helpers: '',
}, pdf = false) {
const file = store.state.file.itemMap[fileId];
const file = store.state.file.itemsById[fileId];
const content = await localDbSvc.loadItem(`${fileId}/content`);
const properties = utils.computeProperties(content.properties);
const options = extensionSvc.getOptions(properties);
@ -114,7 +114,7 @@ export default {
* Export a file to disk.
*/
async exportToDisk(fileId, type, template) {
const file = store.state.file.itemMap[fileId];
const file = store.state.file.itemsById[fileId];
const html = await this.applyTemplate(fileId, template);
const blob = new Blob([html], {
type: 'text/plain;charset=utf-8',

View File

@ -43,9 +43,9 @@ export default {
// Check if there is already a file with that path
if (workspaceUniquePaths) {
const parentPath = store.getters.itemPaths[item.parentId] || '';
const parentPath = store.getters.pathsByItemId[item.parentId] || '';
const path = parentPath + item.name;
if (store.getters.pathItems[path]) {
if (store.getters.itemsByPath[path]) {
await store.dispatch('modal/open', {
type: 'pathConflict',
item,
@ -62,7 +62,7 @@ export default {
}
// Return the new file item
return store.state.file.itemMap[id];
return store.state.file.itemsById[id];
},
/**
@ -88,12 +88,13 @@ export default {
item,
});
}
// Check if there is a path conflict
if (store.getters['workspace/hasUniquePaths']) {
const parentPath = store.getters.itemPaths[item.parentId] || '';
const parentPath = store.getters.pathsByItemId[item.parentId] || '';
const path = parentPath + sanitizedName;
const pathItems = store.getters.pathItems[path] || [];
if (pathItems.some(itemWithSamePath => itemWithSamePath.id !== id)) {
const items = store.getters.itemsByPath[path] || [];
if (items.some(itemWithSamePath => itemWithSamePath.id !== id)) {
await store.dispatch('modal/open', {
type: 'pathConflict',
item,
@ -112,7 +113,7 @@ export default {
*/
setOrPatchItem(patch) {
const item = {
...store.getters.allItemMap[patch.id] || patch,
...store.getters.allItemsById[patch.id] || patch,
};
if (!item.id) {
return null;
@ -136,7 +137,7 @@ export default {
this.makePathUnique(item.id);
}
return store.getters.allItemMap[item.id];
return store.getters.allItemsById[item.id];
},
/**
@ -160,12 +161,15 @@ export default {
},
/**
* Ensure two files/folders don't have the same path if the workspace doesn't support it.
* Ensure two files/folders don't have the same path if the workspace doesn't allow it.
*/
ensureUniquePaths() {
ensureUniquePaths(idsToKeep = {}) {
if (store.getters['workspace/hasUniquePaths']) {
if (Object.keys(store.getters.itemPaths).some(id => this.makePathUnique(id))) {
this.ensureUniquePaths();
if (Object.keys(store.getters.pathsByItemId)
.some(id => !idsToKeep[id] && this.makePathUnique(id))
) {
// Just changed one item path, restart
this.ensureUniquePaths(idsToKeep);
}
}
},
@ -175,13 +179,13 @@ export default {
* Add a prefix to its name and return true otherwise.
*/
makePathUnique(id) {
const { pathItems, allItemMap, itemPaths } = store.getters;
const item = allItemMap[id];
const { itemsByPath, allItemsById, pathsByItemId } = store.getters;
const item = allItemsById[id];
if (!item) {
return false;
}
let path = itemPaths[id];
if (pathItems[path].length === 1) {
let path = pathsByItemId[id];
if (itemsByPath[path].length === 1) {
return false;
}
const isFolder = item.type === 'folder';
@ -190,11 +194,11 @@ export default {
path = path.slice(0, -1);
}
for (let suffix = 1; ; suffix += 1) {
let pathWithPrefix = `${path}.${suffix}`;
let pathWithSuffix = `${path}.${suffix}`;
if (isFolder) {
pathWithPrefix += '/';
pathWithSuffix += '/';
}
if (!pathItems[pathWithPrefix]) {
if (!itemsByPath[pathWithSuffix]) {
store.commit(`${item.type}/patchItem`, {
id: item.id,
name: `${item.name}.${suffix}`,

View File

@ -123,7 +123,7 @@ const localDbSvc = {
}
// Write item if different from stored one
const item = store.state.data.lsItemMap[id];
const item = store.state.data.lsItemsById[id];
if (item && item.hash !== lsHashMap[id]) {
localStorage.setItem(key, JSON.stringify(item));
lsHashMap[id] = item.hash;
@ -178,7 +178,7 @@ const localDbSvc = {
changes.push(item);
cursor.continue();
} else {
const storeItemMap = { ...store.getters.allItemMap };
const storeItemMap = { ...store.getters.allItemsById };
changes.forEach((item) => {
this.readDbItem(item, storeItemMap);
// If item is an old delete marker, remove it from the DB
@ -213,7 +213,7 @@ const localDbSvc = {
checker = cb => (id) => {
if (!storeItemMap[id]) {
const [fileId] = id.split('/');
if (!store.state.file.itemMap[fileId]) {
if (!store.state.file.itemsById[fileId]) {
cb(id);
}
}
@ -277,7 +277,7 @@ const localDbSvc = {
*/
async loadItem(id) {
// Check if item is in the store
const itemInStore = store.getters.allItemMap[id];
const itemInStore = store.getters.allItemsById[id];
if (itemInStore) {
// Use deepCopy to freeze item
return Promise.resolve(itemInStore);
@ -326,11 +326,11 @@ const localDbSvc = {
* Drop the database and clean the localStorage for the specified workspaceId.
*/
async removeWorkspace(id) {
const workspaces = {
...store.getters['data/workspaces'],
const workspacesById = {
...store.getters['data/workspacesById'],
};
delete workspaces[id];
store.dispatch('data/setWorkspaces', workspaces);
delete workspacesById[id];
store.dispatch('data/setWorkspacesById', workspacesById);
this.syncLocalStorage();
await new Promise((resolve, reject) => {
const dbName = getDbName(id);
@ -348,7 +348,7 @@ const localDbSvc = {
async init() {
// Reset the app if reset flag was passed
if (resetApp) {
await Promise.all(Object.keys(store.getters['data/workspaces'])
await Promise.all(Object.keys(store.getters['data/workspacesById'])
.map(workspaceId => localDbSvc.removeWorkspace(workspaceId)));
utils.localStorageDataIds.forEach((id) => {
// Clean data stored in localStorage
@ -366,7 +366,7 @@ const localDbSvc = {
// If exportWorkspace parameter was provided
if (exportWorkspace) {
const backup = JSON.stringify(store.getters.allItemMap);
const backup = JSON.stringify(store.getters.allItemsById);
const blob = new Blob([backup], {
type: 'text/plain;charset=utf-8',
});
@ -405,7 +405,7 @@ const localDbSvc = {
// Force check sponsorship after a few seconds
const currentDate = Date.now();
if (sponsorToken && sponsorToken.expiresOn > currentDate - checkSponsorshipAfter) {
store.dispatch('data/setGoogleToken', {
store.dispatch('data/addGoogleToken', {
...sponsorToken,
expiresOn: currentDate - checkSponsorshipAfter,
});

View File

@ -5,7 +5,7 @@ import Provider from './common/Provider';
export default new Provider({
id: 'bloggerPage',
getToken(location) {
const token = store.getters['data/googleTokens'][location.sub];
const token = store.getters['data/googleTokensBySub'][location.sub];
return token && token.isBlogger ? token : null;
},
getUrl(location) {

View File

@ -5,7 +5,7 @@ import Provider from './common/Provider';
export default new Provider({
id: 'blogger',
getToken(location) {
const token = store.getters['data/googleTokens'][location.sub];
const token = store.getters['data/googleTokensBySub'][location.sub];
return token && token.isBlogger ? token : null;
},
getUrl(location) {

View File

@ -7,6 +7,9 @@ import fileSvc from '../../fileSvc';
const dataExtractor = /<!--stackedit_data:([A-Za-z0-9+/=\s]+)-->$/;
export default class Provider {
prepareChanges = changes => changes
onChangesApplied = () => {}
constructor(props) {
Object.assign(this, props);
providerRegistry.register(this);
@ -41,7 +44,7 @@ export default class Provider {
* Parse content serialized with serializeContent()
*/
static parseContent(serializedContent, id) {
const result = utils.deepCopy(store.state.content.itemMap[id]) || emptyContent(id);
const result = utils.deepCopy(store.state.content.itemsById[id]) || emptyContent(id);
result.text = utils.sanitizeText(serializedContent);
result.history = [];
const extractedData = dataExtractor.exec(serializedContent);
@ -82,7 +85,7 @@ export default class Provider {
const location = utils.search(allLocations, criteria);
if (location) {
// Found one, open it if it exists
const item = store.state.file.itemMap[location.fileId];
const item = store.state.file.itemsById[location.fileId];
if (item) {
store.commit('file/setCurrentId', item.id);
// If file is in the trash, restore it

View File

@ -17,12 +17,12 @@ export default new Provider({
dbUrl,
};
const workspaceId = utils.makeWorkspaceId(workspaceParams);
const getToken = () => store.getters['data/couchdbTokens'][workspaceId];
const getWorkspace = () => store.getters['data/sanitizedWorkspaces'][workspaceId];
const getToken = () => store.getters['data/couchdbTokensBySub'][workspaceId];
const getWorkspace = () => store.getters['data/sanitizedWorkspacesById'][workspaceId];
if (!getToken()) {
// Create token
store.dispatch('data/setCouchdbToken', {
store.dispatch('data/addCouchdbToken', {
sub: workspaceId,
dbUrl,
});
@ -38,7 +38,7 @@ export default new Provider({
} catch (e) {
throw new Error(`${dbUrl} is not accessible. Make sure you have the proper permissions.`);
}
store.dispatch('data/patchWorkspaces', {
store.dispatch('data/patchWorkspacesById', {
[workspaceId]: {
id: workspaceId,
name: db.db_name,
@ -52,7 +52,7 @@ export default new Provider({
// Fix the URL hash
utils.setQueryParams(workspaceParams);
if (workspace.url !== window.location.href) {
store.dispatch('data/patchWorkspaces', {
store.dispatch('data/patchWorkspacesById', {
[workspace.id]: {
...workspace,
url: window.location.href,
@ -91,7 +91,7 @@ export default new Provider({
syncLastSeq,
});
},
async saveWorkspaceItem(item, syncData) {
async saveWorkspaceItem({ item, syncData }) {
const syncToken = store.getters['workspace/syncToken'];
const { id, rev } = couchdbHelper.uploadDocument({
token: syncToken,
@ -108,24 +108,24 @@ export default new Provider({
rev,
};
},
removeWorkspaceItem(syncData) {
removeWorkspaceItem({ syncData }) {
const syncToken = store.getters['workspace/syncToken'];
return couchdbHelper.removeDocument(syncToken, syncData.id, syncData.rev);
},
async downloadWorkspaceContent(token, syncData) {
const body = await couchdbHelper.retrieveDocumentWithAttachments(token, syncData.id);
async downloadWorkspaceContent({ token, contentSyncData }) {
const body = await couchdbHelper.retrieveDocumentWithAttachments(token, contentSyncData.id);
const rev = body._rev; // eslint-disable-line no-underscore-dangle
const item = Provider.parseContent(body.attachments.data, body.item.id);
const content = Provider.parseContent(body.attachments.data, body.item.id);
return {
item,
syncData: {
...syncData,
hash: item.hash,
content,
contentSyncData: {
...contentSyncData,
hash: content.hash,
rev,
},
};
},
async downloadWorkspaceData(token, dataId, syncData) {
async downloadWorkspaceData({ token, syncData }) {
if (!syncData) {
return {};
}
@ -142,30 +142,32 @@ export default new Provider({
},
};
},
async uploadWorkspaceContent(token, item, syncData) {
async uploadWorkspaceContent({ token, content, contentSyncData }) {
const res = await couchdbHelper.uploadDocument({
token,
item: {
id: item.id,
type: item.type,
hash: item.hash,
id: content.id,
type: content.type,
hash: content.hash,
},
data: Provider.serializeContent(item),
data: Provider.serializeContent(content),
dataType: 'text/plain',
documentId: syncData && syncData.id,
rev: syncData && syncData.rev,
documentId: contentSyncData && contentSyncData.id,
rev: contentSyncData && contentSyncData.rev,
});
// Return new sync data
return {
id: res.id,
itemId: item.id,
type: item.type,
hash: item.hash,
rev: res.rev,
contentSyncData: {
id: res.id,
itemId: content.id,
type: content.type,
hash: content.hash,
rev: res.rev,
},
};
},
async uploadWorkspaceData(token, item, syncData) {
async uploadWorkspaceData({ token, item, syncData }) {
const res = await couchdbHelper.uploadDocument({
token,
item: {
@ -181,11 +183,13 @@ export default new Provider({
// Return new sync data
return {
id: res.id,
itemId: item.id,
type: item.type,
hash: item.hash,
rev: res.rev,
syncData: {
id: res.id,
itemId: item.id,
type: item.type,
hash: item.hash,
rev: res.rev,
},
};
},
async listRevisions(token, fileId) {

View File

@ -20,7 +20,7 @@ const makePathRelative = (token, path) => {
export default new Provider({
id: 'dropbox',
getToken(location) {
return store.getters['data/dropboxTokens'][location.sub];
return store.getters['data/dropboxTokensBySub'][location.sub];
},
getUrl(location) {
const pathComponents = location.path.split('/').map(encodeURIComponent);

View File

@ -6,7 +6,7 @@ import utils from '../utils';
export default new Provider({
id: 'gist',
getToken(location) {
return store.getters['data/githubTokens'][location.sub];
return store.getters['data/githubTokensBySub'][location.sub];
},
getUrl(location) {
return `https://gist.github.com/${location.gistId}`;
@ -23,7 +23,7 @@ export default new Provider({
return Provider.parseContent(content, `${syncLocation.fileId}/content`);
},
async uploadContent(token, content, syncLocation) {
const file = store.state.file.itemMap[syncLocation.fileId];
const file = store.state.file.itemsById[syncLocation.fileId];
const description = utils.sanitizeName(file && file.name);
const gist = await githubHelper.uploadGist({
...syncLocation,

View File

@ -9,7 +9,7 @@ const savedSha = {};
export default new Provider({
id: 'github',
getToken(location) {
return store.getters['data/githubTokens'][location.sub];
return store.getters['data/githubTokensBySub'][location.sub];
},
getUrl(location) {
return `https://github.com/${encodeURIComponent(location.owner)}/${encodeURIComponent(location.repo)}/blob/${encodeURIComponent(location.branch)}/${encodeURIComponent(location.path)}`;
@ -20,12 +20,12 @@ export default new Provider({
},
async downloadContent(token, syncLocation) {
try {
const { sha, content } = await githubHelper.downloadFile({
const { sha, data } = await githubHelper.downloadFile({
...syncLocation,
token,
});
savedSha[syncLocation.id] = sha;
return Provider.parseContent(content, `${syncLocation.fileId}/content`);
return Provider.parseContent(data, `${syncLocation.fileId}/content`);
} catch (e) {
// Ignore error, upload is going to fail anyway
return null;

View File

@ -28,7 +28,6 @@ const endsWith = (str, suffix) => str.slice(-suffix.length) === suffix;
export default new Provider({
id: 'githubWorkspace',
isGit: true,
getToken() {
return store.getters['workspace/syncToken'];
},
@ -47,13 +46,13 @@ export default new Provider({
workspaceParams.path = path;
}
const workspaceId = utils.makeWorkspaceId(workspaceParams);
let workspace = store.getters['data/sanitizedWorkspaces'][workspaceId];
let workspace = store.getters['data/sanitizedWorkspacesById'][workspaceId];
// See if we already have a token
let token;
if (workspace) {
// Token sub is in the workspace
token = store.getters['data/githubTokens'][workspace.sub];
token = store.getters['data/githubTokensBySub'][workspace.sub];
}
if (!token) {
await store.dispatch('modal/open', { type: 'githubAccount' });
@ -74,27 +73,27 @@ export default new Provider({
// Fix the URL hash
utils.setQueryParams(workspaceParams);
if (workspace.url !== window.location.href) {
store.dispatch('data/patchWorkspaces', {
store.dispatch('data/patchWorkspacesById', {
[workspaceId]: {
...workspace,
url: window.location.href,
},
});
}
return store.getters['data/sanitizedWorkspaces'][workspaceId];
return store.getters['data/sanitizedWorkspacesById'][workspaceId];
},
async getChanges() {
getChanges() {
const syncToken = store.getters['workspace/syncToken'];
const { owner, repo, branch } = getWorkspaceWithOwner();
const tree = await githubHelper.getTree({
return githubHelper.getTree({
token: syncToken,
owner,
repo,
branch,
});
},
prepareChanges(tree) {
const workspacePath = store.getters['workspace/currentWorkspace'].path || '';
const syncDataByPath = store.getters['data/syncData'];
const syncDataByItemId = store.getters['data/syncDataByItemId'];
// Store all blobs sha
treeShaMap = Object.create(null);
@ -136,9 +135,20 @@ export default new Provider({
const changes = [];
const pathIds = {};
const syncDataToKeep = Object.create(null);
const syncDataByPath = store.getters['data/syncDataById'];
const { itemsByGitPath } = store.getters;
const getId = (path) => {
const syncData = syncDataByPath[path];
const id = syncData ? syncData.itemId : utils.uid();
const existingItem = itemsByGitPath[path];
// Use the item ID only if the item was already synced
if (existingItem && syncDataByPath[path]) {
pathIds[path] = existingItem.id;
return existingItem.id;
}
// Generate a new ID
let id = utils.uid();
if (path[0] === '/') {
id += '/content';
}
pathIds[path] = id;
return id;
};
@ -146,9 +156,8 @@ export default new Provider({
// Folder creations/updates
// Assume map entries are sorted from top to bottom
Object.entries(treeFolderMap).forEach(([path, parentPath]) => {
const id = getId(path);
const item = utils.addItemHash({
id,
id: getId(path),
type: 'folder',
name: path.slice(parentPath.length, -1),
parentId: pathIds[parentPath] || null,
@ -158,18 +167,22 @@ export default new Provider({
item,
syncData: {
id: path,
itemId: id,
type: item.type,
hash: item.hash,
},
});
});
// File creations/updates
// File/content creations/updates
Object.entries(treeFileMap).forEach(([path, parentPath]) => {
const id = getId(path);
// Look for content sync data as it's created before file sync data
const contentPath = `/${path}`;
const contentId = getId(contentPath);
// File creations/updates
const [fileId] = contentId.split('/');
const item = utils.addItemHash({
id,
id: fileId,
type: 'file',
name: path.slice(parentPath.length, -'.md'.length),
parentId: pathIds[parentPath] || null,
@ -179,31 +192,31 @@ export default new Provider({
item,
syncData: {
id: path,
itemId: id,
type: item.type,
hash: item.hash,
},
});
// Content creations/updates
const contentSyncData = syncDataByItemId[`${id}/content`];
const contentSyncData = syncDataByPath[contentPath];
if (contentSyncData) {
syncDataToKeep[contentSyncData.id] = true;
syncDataToKeep[path] = true;
syncDataToKeep[contentPath] = true;
}
if (!contentSyncData || contentSyncData.sha !== treeShaMap[path]) {
const type = 'content';
// Use `/` as a prefix to get a unique syncData id
changes.push({
syncDataId: `/${path}`,
syncDataId: contentPath,
item: {
id: `${id}/content`,
type: 'content',
// Need a truthy value to force saving sync data
id: contentId,
type,
// Need a truthy value to force downloading the content
hash: 1,
},
syncData: {
id: `/${path}`,
itemId: `${id}/content`,
type: 'content',
id: contentPath,
type,
// Need a truthy value to force downloading the content
hash: 1,
},
@ -212,35 +225,34 @@ export default new Provider({
});
// Data creations/updates
const syncDataByItemId = store.getters['data/syncDataByItemId'];
Object.keys(treeDataMap).forEach((path) => {
try {
// Only template data are stored
const [, id] = path.match(/^\.stackedit-data\/(templates)\.json$/) || [];
// Only template data are stored
const [, id] = path.match(/^\.stackedit-data\/(templates)\.json$/) || [];
if (id) {
pathIds[path] = id;
const syncData = syncDataByItemId[id];
if (syncData) {
syncDataToKeep[syncData.id] = true;
}
if (!syncData || syncData.sha !== treeShaMap[path]) {
const type = 'data';
changes.push({
syncDataId: path,
item: {
id,
type: 'data',
type,
// Need a truthy value to force saving sync data
hash: 1,
},
syncData: {
id: path,
itemId: id,
type: 'data',
type,
// Need a truthy value to force downloading the content
hash: 1,
},
});
}
} catch (e) {
// Ignore parsing errors
}
});
@ -255,12 +267,18 @@ export default new Provider({
pathMatcher: /^([\s\S]+)\.([\w-]+)\.publish$/,
}]
.forEach(({ type, map, pathMatcher }) => Object.keys(map).forEach((path) => {
try {
const [, filePath, data] = path.match(pathMatcher);
const [, filePath, data] = path.match(pathMatcher) || [];
if (filePath) {
// If there is a corresponding md file in the tree
const fileId = pathIds[`${filePath}.md`];
if (fileId) {
const id = getId(path);
// Reuse existing ID or create a new one
const existingItem = itemsByGitPath[path];
const id = existingItem
? existingItem.id
: utils.uid();
pathIds[path] = id;
const item = utils.addItemHash({
...JSON.parse(utils.decodeBase64(data)),
id,
@ -272,14 +290,11 @@ export default new Provider({
item,
syncData: {
id: path,
itemId: id,
type: item.type,
hash: item.hash,
},
});
}
} catch (e) {
// Ignore parsing errors
}
}));
@ -292,10 +307,9 @@ export default new Provider({
return changes;
},
async saveWorkspaceItem(item) {
async saveWorkspaceItem({ item }) {
const syncData = {
id: store.getters.itemGitPaths[item.id],
itemId: item.id,
id: store.getters.gitPathsByItemId[item.id],
type: item.type,
hash: item.hash,
};
@ -316,7 +330,7 @@ export default new Provider({
});
return syncData;
},
async removeWorkspaceItem(syncData) {
async removeWorkspaceItem({ syncData }) {
if (treeShaMap[syncData.id]) {
const syncToken = store.getters['workspace/syncToken'];
await githubHelper.removeFile({
@ -327,41 +341,40 @@ export default new Provider({
});
}
},
async downloadWorkspaceContent(token, contentSyncData) {
const [fileId] = contentSyncData.itemId.split('/');
const path = store.getters.itemGitPaths[fileId];
const syncData = store.getters['data/syncData'][path];
if (!syncData) {
return {};
}
const { sha, content } = await githubHelper.downloadFile({
async downloadWorkspaceContent({
token,
contentId,
contentSyncData,
fileSyncData,
}) {
const { sha, data } = await githubHelper.downloadFile({
...getWorkspaceWithOwner(),
token,
path: getAbsolutePath(syncData),
path: getAbsolutePath(fileSyncData),
});
treeShaMap[path] = sha;
const item = Provider.parseContent(content, `${fileId}/content`);
treeShaMap[fileSyncData.id] = sha;
const content = Provider.parseContent(data, contentId);
return {
item,
syncData: {
content,
contentSyncData: {
...contentSyncData,
hash: item.hash,
hash: content.hash,
sha,
},
};
},
async downloadWorkspaceData(token, dataId, syncData) {
async downloadWorkspaceData({ token, syncData }) {
if (!syncData) {
return {};
}
const { sha, content } = await githubHelper.downloadFile({
const { sha, data } = await githubHelper.downloadFile({
...getWorkspaceWithOwner(),
token,
path: getAbsolutePath(syncData),
});
treeShaMap[syncData.id] = sha;
const item = JSON.parse(content);
const item = JSON.parse(data);
return {
item,
syncData: {
@ -371,32 +384,36 @@ export default new Provider({
},
};
},
async uploadWorkspaceContent(token, item) {
const [fileId] = item.id.split('/');
const path = store.getters.itemGitPaths[fileId];
async uploadWorkspaceContent({ token, content, file }) {
const path = store.getters.gitPathsByItemId[file.id];
const absolutePath = `${store.getters['workspace/currentWorkspace'].path || ''}${path}`;
const res = await githubHelper.uploadFile({
...getWorkspaceWithOwner(),
token,
path: absolutePath,
content: Provider.serializeContent(item),
content: Provider.serializeContent(content),
sha: treeShaMap[path],
});
// Return new sync data
return {
id: store.getters.itemGitPaths[item.id],
itemId: item.id,
type: item.type,
hash: item.hash,
sha: res.content.sha,
contentSyncData: {
id: store.getters.gitPathsByItemId[content.id],
type: content.type,
hash: content.hash,
sha: res.content.sha,
},
fileSyncData: {
id: path,
type: 'file',
hash: file.hash,
},
};
},
async uploadWorkspaceData(token, item) {
const path = store.getters.itemGitPaths[item.id];
async uploadWorkspaceData({ token, item }) {
const path = store.getters.gitPathsByItemId[item.id];
const syncData = {
id: path,
itemId: item.id,
type: item.type,
hash: item.hash,
};
@ -409,8 +426,10 @@ export default new Provider({
});
return {
...syncData,
sha: res.content.sha,
syncData: {
...syncData,
sha: res.content.sha,
},
};
},
onSyncEnd() {
@ -458,12 +477,12 @@ export default new Provider({
},
async getRevisionContent(token, fileId, revisionId) {
const syncData = Provider.getContentSyncData(fileId);
const { content } = await githubHelper.downloadFile({
const { data } = await githubHelper.downloadFile({
...getWorkspaceWithOwner(),
token,
branch: revisionId,
path: getAbsolutePath(syncData),
});
return Provider.parseContent(content, `${fileId}/content`);
return Provider.parseContent(data, `${fileId}/content`);
},
});

View File

@ -15,7 +15,7 @@ export default new Provider({
// Remove the URL hash
utils.setQueryParams();
// Return the main workspace
return store.getters['data/workspaces'].main;
return store.getters['data/workspacesById'].main;
},
async getChanges() {
const syncToken = store.getters['workspace/syncToken'];
@ -48,7 +48,7 @@ export default new Provider({
syncStartPageToken,
});
},
async saveWorkspaceItem(item, syncData, ifNotTooLate) {
async saveWorkspaceItem({ item, syncData, ifNotTooLate }) {
const syncToken = store.getters['workspace/syncToken'];
const file = await googleHelper.uploadAppDataFile({
token: syncToken,
@ -64,22 +64,22 @@ export default new Provider({
hash: item.hash,
};
},
removeWorkspaceItem(syncData, ifNotTooLate) {
removeWorkspaceItem({ syncData, ifNotTooLate }) {
const syncToken = store.getters['workspace/syncToken'];
return googleHelper.removeAppDataFile(syncToken, syncData.id, ifNotTooLate);
},
async downloadWorkspaceContent(token, syncData) {
const data = await googleHelper.downloadAppDataFile(token, syncData.id);
const item = utils.addItemHash(JSON.parse(data));
async downloadWorkspaceContent({ token, contentSyncData }) {
const data = await googleHelper.downloadAppDataFile(token, contentSyncData.id);
const content = utils.addItemHash(JSON.parse(data));
return {
item,
syncData: {
...syncData,
hash: item.hash,
content,
contentSyncData: {
...contentSyncData,
hash: content.hash,
},
};
},
async downloadWorkspaceData(token, dataId, syncData) {
async downloadWorkspaceData({ token, syncData }) {
if (!syncData) {
return {};
}
@ -94,28 +94,40 @@ export default new Provider({
},
};
},
async uploadWorkspaceContent(token, item, syncData, ifNotTooLate) {
const file = await googleHelper.uploadAppDataFile({
async uploadWorkspaceContent({
token,
content,
contentSyncData,
ifNotTooLate,
}) {
const gdriveFile = await googleHelper.uploadAppDataFile({
token,
name: JSON.stringify({
id: item.id,
type: item.type,
hash: item.hash,
id: content.id,
type: content.type,
hash: content.hash,
}),
media: JSON.stringify(item),
fileId: syncData && syncData.id,
media: JSON.stringify(content),
fileId: contentSyncData && contentSyncData.id,
ifNotTooLate,
});
// Return new sync data
return {
id: file.id,
itemId: item.id,
type: item.type,
hash: item.hash,
contentSyncData: {
id: gdriveFile.id,
itemId: content.id,
type: content.type,
hash: content.hash,
},
};
},
async uploadWorkspaceData(token, item, syncData, ifNotTooLate) {
async uploadWorkspaceData({
token,
item,
syncData,
ifNotTooLate,
}) {
const file = await googleHelper.uploadAppDataFile({
token,
name: JSON.stringify({
@ -130,10 +142,12 @@ export default new Provider({
// Return new sync data
return {
id: file.id,
itemId: item.id,
type: item.type,
hash: item.hash,
syncData: {
id: file.id,
itemId: item.id,
type: item.type,
hash: item.hash,
},
};
},
async listRevisions(token, fileId) {

View File

@ -7,7 +7,7 @@ import fileSvc from '../fileSvc';
export default new Provider({
id: 'googleDrive',
getToken(location) {
const token = store.getters['data/googleTokens'][location.sub];
const token = store.getters['data/googleTokensBySub'][location.sub];
return token && token.isDrive ? token : null;
},
getUrl(location) {
@ -21,7 +21,7 @@ export default new Provider({
const state = googleHelper.driveState || {};
if (state.userId) {
// Try to find the token corresponding to the user ID
let token = store.getters['data/googleTokens'][state.userId];
let token = store.getters['data/googleTokensBySub'][state.userId];
// If not found or not enough permission, popup an OAuth2 window
if (!token || !token.isDrive) {
await store.dispatch('modal/open', { type: 'googleDriveAccount' });
@ -42,7 +42,7 @@ export default new Provider({
folderId,
};
const workspaceId = utils.makeWorkspaceId(workspaceParams);
const workspace = store.getters['data/sanitizedWorkspaces'][workspaceId];
const workspace = store.getters['data/sanitizedWorkspacesById'][workspaceId];
// If we have the workspace, open it by changing the current URL
if (workspace) {
utils.setQueryParams(workspaceParams);
@ -83,7 +83,7 @@ export default new Provider({
},
async performAction() {
const state = googleHelper.driveState || {};
const token = store.getters['data/googleTokens'][state.userId];
const token = store.getters['data/googleTokensBySub'][state.userId];
switch (token && state.action) {
case 'create': {
const file = await fileSvc.createFile({}, true);
@ -106,7 +106,7 @@ export default new Provider({
return Provider.parseContent(content, `${syncLocation.fileId}/content`);
},
async uploadContent(token, content, syncLocation, ifNotTooLate) {
const file = store.state.file.itemMap[syncLocation.fileId];
const file = store.state.file.itemsById[syncLocation.fileId];
const name = utils.sanitizeName(file && file.name);
const parents = [];
if (syncLocation.driveParentId) {

View File

@ -22,7 +22,7 @@ export default new Provider({
&& utils.makeWorkspaceId(makeWorkspaceParams(folderId));
const getWorkspace = folderId =>
store.getters['data/sanitizedWorkspaces'][makeWorkspaceId(folderId)];
store.getters['data/sanitizedWorkspacesById'][makeWorkspaceId(folderId)];
const initFolder = async (token, folder) => {
const appProperties = {
@ -68,7 +68,7 @@ export default new Provider({
// Update workspace in the store
const workspaceId = makeWorkspaceId(folder.id);
store.dispatch('data/patchWorkspaces', {
store.dispatch('data/patchWorkspacesById', {
[workspaceId]: {
id: workspaceId,
sub: token.sub,
@ -86,7 +86,7 @@ export default new Provider({
// Token sub is in the workspace or in the url if workspace is about to be created
const { sub } = getWorkspace(utils.queryParams.folderId) || utils.queryParams;
// See if we already have a token
let token = store.getters['data/googleTokens'][sub];
let token = store.getters['data/googleTokensBySub'][sub];
// If no token has been found, popup an authorize window and get one
if (!token || !token.isDrive || !token.driveFullAccess) {
await store.dispatch('modal/open', 'workspaceGoogleRedirection');
@ -130,14 +130,14 @@ export default new Provider({
// Fix the URL hash
utils.setQueryParams(makeWorkspaceParams(workspace.folderId));
if (workspace.url !== window.location.href) {
store.dispatch('data/patchWorkspaces', {
store.dispatch('data/patchWorkspacesById', {
[workspace.id]: {
...workspace,
url: window.location.href,
},
});
}
return store.getters['data/sanitizedWorkspaces'][workspace.id];
return store.getters['data/sanitizedWorkspacesById'][workspace.id];
},
async performAction() {
const state = googleHelper.driveState || {};
@ -145,21 +145,21 @@ export default new Provider({
switch (token && state.action) {
case 'create': {
const driveFolder = googleHelper.driveActionFolder;
let syncData = store.getters['data/syncData'][driveFolder.id];
let syncData = store.getters['data/syncDataById'][driveFolder.id];
if (!syncData && driveFolder.appProperties.id) {
// Create folder if not already synced
store.commit('folder/setItem', {
id: driveFolder.appProperties.id,
name: driveFolder.name,
});
const item = store.state.folder.itemMap[driveFolder.appProperties.id];
const item = store.state.folder.itemsById[driveFolder.appProperties.id];
syncData = {
id: driveFolder.id,
itemId: item.id,
type: item.type,
hash: item.hash,
};
store.dispatch('data/patchSyncData', {
store.dispatch('data/patchSyncDataById', {
[syncData.id]: syncData,
});
}
@ -173,7 +173,7 @@ export default new Provider({
case 'open': {
// open first file only
const firstFile = googleHelper.driveActionFiles[0];
const syncData = store.getters['data/syncData'][firstFile.id];
const syncData = store.getters['data/syncDataById'][firstFile.id];
if (!syncData) {
fileIdToOpen = firstFile.id;
} else {
@ -191,6 +191,10 @@ export default new Provider({
const { changes, startPageToken } = await googleHelper
.getChanges(syncToken, lastStartPageToken, false, workspace.teamDriveId);
syncStartPageToken = startPageToken;
return changes;
},
prepareChanges(changes) {
// Collect possible parent IDs
const parentIds = {};
Object.entries(store.getters['data/syncDataByItemId']).forEach(([id, syncData]) => {
@ -204,6 +208,7 @@ export default new Provider({
});
// Collect changes
const workspace = store.getters['workspace/currentWorkspace'];
const result = [];
changes.forEach((change) => {
// Ignore changes on StackEdit own folders
@ -259,21 +264,23 @@ export default new Provider({
if (type === 'file') {
// create a fake change as a file content change
const id = `${appProperties.id}/content`;
const syncDataId = `${change.fileId}/content`;
contentChange = {
item: {
id: `${appProperties.id}/content`,
id,
type: 'content',
// Need a truthy value to force saving sync data
hash: 1,
},
syncData: {
id: `${change.fileId}/content`,
itemId: `${appProperties.id}/content`,
id: syncDataId,
itemId: id,
type: 'content',
// Need a truthy value to force downloading the content
hash: 1,
},
syncDataId: `${change.fileId}/content`,
syncDataId,
};
}
}
@ -288,7 +295,7 @@ export default new Provider({
};
} else {
// Item was removed
const syncData = store.getters['data/syncData'][change.fileId];
const syncData = store.getters['data/syncDataById'][change.fileId];
if (syncData && syncData.type === 'file') {
// create a fake change as a file content change
contentChange = {
@ -304,7 +311,7 @@ export default new Provider({
result.push(contentChange);
}
});
syncStartPageToken = startPageToken;
return result;
},
onChangesApplied() {
@ -312,7 +319,7 @@ export default new Provider({
syncStartPageToken,
});
},
async saveWorkspaceItem(item, syncData, ifNotTooLate) {
async saveWorkspaceItem({ item, syncData, ifNotTooLate }) {
const workspace = store.getters['workspace/currentWorkspace'];
const syncToken = store.getters['workspace/syncToken'];
let file;
@ -363,40 +370,35 @@ export default new Provider({
hash: item.hash,
};
},
async removeWorkspaceItem(syncData, ifNotTooLate) {
async removeWorkspaceItem({ syncData, ifNotTooLate }) {
// Ignore content deletion
if (syncData.type !== 'content') {
const syncToken = store.getters['workspace/syncToken'];
await googleHelper.removeFile(syncToken, syncData.id, ifNotTooLate);
}
},
async downloadWorkspaceContent(token, contentSyncData) {
const [fileId] = contentSyncData.itemId.split('/');
const syncData = store.getters['data/syncDataByItemId'][fileId];
if (!syncData) {
return {};
}
const content = await googleHelper.downloadFile(token, syncData.id);
const item = Provider.parseContent(content, contentSyncData.itemId);
async downloadWorkspaceContent({ token, contentSyncData, fileSyncData }) {
const data = await googleHelper.downloadFile(token, fileSyncData.id);
const content = Provider.parseContent(data, contentSyncData.itemId);
// Open the file requested by action if it wasn't synced yet
if (fileIdToOpen && fileIdToOpen === syncData.id) {
if (fileIdToOpen && fileIdToOpen === fileSyncData.id) {
fileIdToOpen = null;
// Open the file once downloaded content has been stored
setTimeout(() => {
store.commit('file/setCurrentId', fileId);
store.commit('file/setCurrentId', fileSyncData.itemId);
}, 10);
}
return {
item,
syncData: {
content,
contentSyncData: {
...contentSyncData,
hash: item.hash,
hash: content.hash,
},
};
},
async downloadWorkspaceData(token, dataId, syncData) {
async downloadWorkspaceData({ token, syncData }) {
if (!syncData) {
return {};
}
@ -411,57 +413,66 @@ export default new Provider({
},
};
},
async uploadWorkspaceContent(token, content, contentSyncData, ifNotTooLate) {
const [fileId] = content.id.split('/');
const syncData = store.getters['data/syncDataByItemId'][fileId];
let file;
async uploadWorkspaceContent({
token,
content,
file,
fileSyncData,
ifNotTooLate,
}) {
let gdriveFile;
let newFileSyncData;
if (syncData) {
if (fileSyncData) {
// Only update file media
file = await googleHelper.uploadFile({
gdriveFile = await googleHelper.uploadFile({
token,
media: Provider.serializeContent(content),
fileId: syncData.id,
fileId: fileSyncData.id,
ifNotTooLate,
});
} else {
// Create file with media
const workspace = store.getters['workspace/currentWorkspace'];
// Use deepCopy to freeze objects
const item = utils.deepCopy(store.state.file.itemMap[fileId]);
const parentSyncData = store.getters['data/syncDataByItemId'][item.parentId];
file = await googleHelper.uploadFile({
const parentSyncData = store.getters['data/syncDataByItemId'][file.parentId];
gdriveFile = await googleHelper.uploadFile({
token,
name: item.name,
name: file.name,
parents: [parentSyncData ? parentSyncData.id : workspace.folderId],
appProperties: {
id: item.id,
id: file.id,
folderId: workspace.folderId,
},
media: Provider.serializeContent(content),
ifNotTooLate,
});
// Create file syncData
store.dispatch('data/patchSyncData', {
[file.id]: {
id: file.id,
itemId: item.id,
type: item.type,
hash: item.hash,
},
});
// Create file sync data
newFileSyncData = {
id: gdriveFile.id,
itemId: file.id,
type: file.type,
hash: file.hash,
};
}
// Return new sync data
return {
id: `${file.id}/content`,
itemId: content.id,
type: content.type,
hash: content.hash,
contentSyncData: {
id: `${gdriveFile.id}/content`,
itemId: content.id,
type: content.type,
hash: content.hash,
},
fileSyncData: newFileSyncData,
};
},
async uploadWorkspaceData(token, item, syncData, ifNotTooLate) {
async uploadWorkspaceData({
token,
item,
syncData,
ifNotTooLate,
}) {
const workspace = store.getters['workspace/currentWorkspace'];
const file = await googleHelper.uploadFile({
token,
@ -482,10 +493,12 @@ export default new Provider({
// Return new sync data
return {
id: file.id,
itemId: item.id,
type: item.type,
hash: item.hash,
syncData: {
id: file.id,
itemId: item.id,
type: item.type,
hash: item.hash,
},
};
},
async listRevisions(token, fileId) {

View File

@ -4,7 +4,7 @@ import store from '../../../store';
const request = async (token, options = {}) => {
const baseUrl = `${token.dbUrl}/`;
const getLastToken = () => store.getters['data/couchdbTokens'][token.sub];
const getLastToken = () => store.getters['data/couchdbTokensBySub'][token.sub];
const assertUnauthorized = (err) => {
if (err.status !== 401) {

View File

@ -53,8 +53,8 @@ export default {
fullAccess,
};
// Add token to dropboxTokens
store.dispatch('data/setDropboxToken', token);
// Add token to dropbox tokens
store.dispatch('data/addDropboxToken', token);
return token;
},
addAccount(fullAccess = false) {

View File

@ -76,8 +76,8 @@ export default {
repoFullAccess: scopes.indexOf('repo') !== -1,
};
// Add token to githubTokens
store.dispatch('data/setGithubToken', token);
// Add token to github tokens
store.dispatch('data/addGithubToken', token);
return token;
},
async addAccount(repoFullAccess = false) {
@ -204,7 +204,7 @@ export default {
});
return {
sha,
content: utils.decodeBase64(content),
data: utils.decodeBase64(content),
};
},

View File

@ -52,7 +52,7 @@ export default {
const { reason } = (((err.body || {}).error || {}).errors || [])[0] || {};
if (reason === 'authError') {
// Mark the token as revoked and get a new one
store.dispatch('data/setGoogleToken', {
store.dispatch('data/addGoogleToken', {
...token,
expiresOn: 0,
});
@ -101,7 +101,7 @@ export default {
}
// Build token object including scopes and sub
const existingToken = store.getters['data/googleTokens'][body.sub];
const existingToken = store.getters['data/googleTokensBySub'][body.sub];
const token = {
scopes,
accessToken,
@ -158,13 +158,13 @@ export default {
}
}
// Add token to googleTokens
store.dispatch('data/setGoogleToken', token);
// Add token to google tokens
store.dispatch('data/addGoogleToken', token);
return token;
},
async refreshToken(token, scopes = []) {
const { sub } = token;
const lastToken = store.getters['data/googleTokens'][sub];
const lastToken = store.getters['data/googleTokensBySub'][sub];
const mergedScopes = [...new Set([
...scopes,
...lastToken.scopes,

View File

@ -44,13 +44,13 @@ export default {
name: body.display_name,
sub: `${body.ID}`,
};
// Add token to wordpressTokens
store.dispatch('data/setWordpressToken', token);
// Add token to wordpress tokens
store.dispatch('data/addWordpressToken', token);
return token;
},
async refreshToken(token) {
const { sub } = token;
const lastToken = store.getters['data/wordpressTokens'][sub];
const lastToken = store.getters['data/wordpressTokensBySub'][sub];
if (lastToken.expiresOn > Date.now() + tokenExpirationMargin) {
return lastToken;

View File

@ -45,8 +45,8 @@ export default {
sub: uniqueSub,
};
// Add token to zendeskTokens
store.dispatch('data/setZendeskToken', token);
// Add token to zendesk tokens
store.dispatch('data/addZendeskToken', token);
return token;
},
addAccount(subdomain, clientId) {

View File

@ -5,7 +5,7 @@ import Provider from './common/Provider';
export default new Provider({
id: 'wordpress',
getToken(location) {
return store.getters['data/wordpressTokens'][location.sub];
return store.getters['data/wordpressTokensBySub'][location.sub];
},
getUrl(location) {
return `https://wordpress.com/post/${location.siteId}/${location.postId}`;

View File

@ -5,7 +5,7 @@ import Provider from './common/Provider';
export default new Provider({
id: 'zendesk',
getToken(location) {
return store.getters['data/zendeskTokens'][location.sub];
return store.getters['data/zendeskTokensBySub'][location.sub];
},
getUrl(location) {
const token = this.getToken(location);

View File

@ -40,10 +40,10 @@ const ensureDate = (value, defaultValue) => {
const publish = async (publishLocation) => {
const { fileId } = publishLocation;
const template = store.getters['data/allTemplates'][publishLocation.templateId];
const template = store.getters['data/allTemplatesById'][publishLocation.templateId];
const html = await exportSvc.applyTemplate(fileId, template);
const content = await localDbSvc.loadItem(`${fileId}/content`);
const file = store.state.file.itemMap[fileId];
const file = store.state.file.itemsById[fileId];
const properties = utils.computeProperties(content.properties);
const provider = providerRegistry.providers[publishLocation.providerId];
const token = provider.getToken(publishLocation);
@ -90,7 +90,7 @@ const publishFile = async (fileId) => {
},
});
});
const file = store.state.file.itemMap[fileId];
const file = store.state.file.itemsById[fileId];
store.dispatch('notification/info', `"${file.name}" was published to ${counter} location(s).`);
} finally {
await localDbSvc.unloadContents();

View File

@ -108,7 +108,7 @@ const upgradeSyncedContent = (syncedContent) => {
const cleanSyncedContent = (syncedContent) => {
// Clean syncHistory from removed syncLocations
Object.keys(syncedContent.syncHistory).forEach((syncLocationId) => {
if (syncLocationId !== 'main' && !store.state.syncLocation.itemMap[syncLocationId]) {
if (syncLocationId !== 'main' && !store.state.syncLocation.itemsById[syncLocationId]) {
delete syncedContent.syncHistory[syncLocationId];
}
});
@ -129,28 +129,43 @@ const cleanSyncedContent = (syncedContent) => {
* Apply changes retrieved from the main provider. Update sync data accordingly.
*/
const applyChanges = (changes) => {
const storeItemMap = { ...store.getters.allItemMap };
const syncData = { ...store.getters['data/syncData'] };
const allItemsById = { ...store.getters.allItemsById };
const syncDataById = { ...store.getters['data/syncDataById'] };
let getExistingItem;
if (workspaceProvider.isGit) {
const { itemsByGitPath } = store.getters;
getExistingItem = (existingSyncData) => {
const items = existingSyncData && itemsByGitPath[existingSyncData.id];
return items ? items[0] : null;
};
} else {
getExistingItem = existingSyncData => existingSyncData && allItemsById[existingSyncData.itemId];
}
const idsToKeep = {};
let saveSyncData = false;
// Process each change
changes.forEach((change) => {
const existingSyncData = syncData[change.syncDataId];
const existingItem = existingSyncData && storeItemMap[existingSyncData.itemId];
const existingSyncData = syncDataById[change.syncDataId];
const existingItem = getExistingItem(existingSyncData);
// If item was removed
if (!change.item && existingSyncData) {
// Item was removed
if (syncData[change.syncDataId]) {
delete syncData[change.syncDataId];
if (syncDataById[change.syncDataId]) {
delete syncDataById[change.syncDataId];
saveSyncData = true;
}
if (existingItem) {
// Remove object from the store
store.commit(`${existingItem.type}/deleteItem`, existingItem.id);
delete storeItemMap[existingItem.id];
delete allItemsById[existingItem.id];
}
// If item was modified
} else if (change.item && change.item.hash) {
// Item was modifed
idsToKeep[change.item.id] = true;
if ((existingSyncData || {}).hash !== change.syncData.hash) {
syncData[change.syncDataId] = change.syncData;
syncDataById[change.syncDataId] = change.syncData;
saveSyncData = true;
}
if (
@ -162,13 +177,14 @@ const applyChanges = (changes) => {
&& change.item.type !== 'content' && change.item.type !== 'data'
) {
store.commit(`${change.item.type}/setItem`, change.item);
storeItemMap[change.item.id] = change.item;
allItemsById[change.item.id] = change.item;
}
}
});
if (saveSyncData) {
store.dispatch('data/setSyncData', syncData);
store.dispatch('data/setSyncDataById', syncDataById);
fileSvc.ensureUniquePaths(idsToKeep);
}
};
@ -192,7 +208,7 @@ const createSyncLocation = (syncLocation) => {
history: [content.hash],
}, syncLocation);
await localDbSvc.loadSyncedContent(fileId);
const newSyncedContent = utils.deepCopy(upgradeSyncedContent(store.state.syncedContent.itemMap[`${fileId}/syncedContent`]));
const newSyncedContent = utils.deepCopy(upgradeSyncedContent(store.state.syncedContent.itemsById[`${fileId}/syncedContent`]));
const newSyncHistoryItem = [];
newSyncedContent.syncHistory[syncLocation.id] = newSyncHistoryItem;
newSyncHistoryItem[LAST_SEEN] = content.hash;
@ -220,7 +236,7 @@ const tooLateChecker = (timeout) => {
};
/**
* Return true if file is in the temp folder or it's a welcome file.
* Return true if file is in the temp folder or is a welcome file.
*/
const isTempFile = (fileId) => {
const contentId = `${fileId}/content`;
@ -228,8 +244,8 @@ const isTempFile = (fileId) => {
// If file has already been synced, it's not a temp file
return false;
}
const file = store.state.file.itemMap[fileId];
const content = store.state.content.itemMap[contentId];
const file = store.state.file.itemsById[fileId];
const content = store.state.content.itemsById[contentId];
if (!file || !content) {
return false;
}
@ -251,6 +267,24 @@ const isTempFile = (fileId) => {
return file.name === 'Welcome file' && welcomeFileHashes[hash] && !hasDiscussions;
};
/**
* Patch sync data if some have changed in the result.
*/
const updateSyncData = (result) => {
['syncData', 'contentSyncData', 'fileSyncData'].forEach((field) => {
const syncData = result[field];
if (syncData) {
const oldSyncData = store.getters['data/syncDataById'][syncData.id];
if (utils.serializeObject(oldSyncData) !== utils.serializeObject(syncData)) {
store.dispatch('data/patchSyncDataById', {
[syncData.id]: syncData,
});
}
}
});
return result;
};
class SyncContext {
restart = false;
attempted = {};
@ -270,8 +304,7 @@ const syncFile = async (fileId, syncContext = new SyncContext()) => {
// Item may not exist if content has not been downloaded yet
}
const getContent = () => store.state.content.itemMap[contentId];
const getSyncedContent = () => upgradeSyncedContent(store.state.syncedContent.itemMap[`${fileId}/syncedContent`]);
const getSyncedContent = () => upgradeSyncedContent(store.state.syncedContent.itemsById[`${fileId}/syncedContent`]);
const getSyncHistoryItem = syncLocationId => getSyncedContent().syncHistory[syncLocationId];
try {
@ -303,57 +336,45 @@ const syncFile = async (fileId, syncContext = new SyncContext()) => {
}
// On workspace provider, call downloadWorkspaceContent
const oldSyncData = provider.isGit
? store.getters['data/syncData'][store.getters.itemGitPaths[contentId]]
: store.getters['data/syncDataByItemId'][contentId];
if (!oldSyncData) {
return null;
}
const { item, syncData } = await provider.downloadWorkspaceContent(token, oldSyncData);
if (!item) {
const oldContentSyncData = store.getters['data/syncDataByItemId'][contentId];
const oldFileSyncData = store.getters['data/syncDataByItemId'][fileId];
if (!oldContentSyncData || !oldFileSyncData) {
return null;
}
// Update sync data if changed
if (syncData
&& utils.serializeObject(oldSyncData) !== utils.serializeObject(syncData)
) {
store.dispatch('data/patchSyncData', {
[syncData.id]: syncData,
});
}
return item;
const { content } = updateSyncData(await provider.downloadWorkspaceContent({
token,
contentId,
contentSyncData: oldContentSyncData,
fileSyncData: oldFileSyncData,
}));
// Return the downloaded content
return content;
};
const uploadContent = async (item, ifNotTooLate) => {
const uploadContent = async (content, ifNotTooLate) => {
// On simple provider, call simply uploadContent
if (syncLocation.id !== 'main') {
return provider.uploadContent(token, item, syncLocation, ifNotTooLate);
return provider.uploadContent(token, content, syncLocation, ifNotTooLate);
}
// On workspace provider, call uploadWorkspaceContent
const oldSyncData = provider.isGit
? store.getters['data/syncData'][store.getters.itemGitPaths[contentId]]
: store.getters['data/syncDataByItemId'][contentId];
if (oldSyncData && oldSyncData.hash === item.hash) {
const oldContentSyncData = store.getters['data/syncDataByItemId'][contentId];
if (oldContentSyncData && oldContentSyncData.hash === content.hash) {
return syncLocation;
}
const oldFileSyncData = store.getters['data/syncDataByItemId'][fileId];
const syncData = await provider.uploadWorkspaceContent(
updateSyncData(await provider.uploadWorkspaceContent({
token,
item,
oldSyncData,
content,
// Use deepCopy to freeze item
file: utils.deepCopy(store.state.file.itemsById[fileId]),
contentSyncData: oldContentSyncData,
fileSyncData: oldFileSyncData,
ifNotTooLate,
);
// Update sync data if changed
if (syncData
&& utils.serializeObject(oldSyncData) !== utils.serializeObject(syncData)
) {
store.dispatch('data/patchSyncData', {
[syncData.id]: syncData,
});
}
}));
// Return syncLocation
return syncLocation;
@ -366,7 +387,7 @@ const syncFile = async (fileId, syncContext = new SyncContext()) => {
// Merge content
let mergedContent;
const clientContent = utils.deepCopy(getContent());
const clientContent = utils.deepCopy(store.state.content.itemsById[contentId]);
if (!clientContent) {
mergedContent = utils.deepCopy(serverContent || null);
} else if (!serverContent // If sync location has not been created yet
@ -401,7 +422,7 @@ const syncFile = async (fileId, syncContext = new SyncContext()) => {
});
// Retrieve content with its new hash value and freeze it
mergedContent = utils.deepCopy(getContent());
mergedContent = utils.deepCopy(store.state.content.itemsById[contentId]);
// Make merged content history
const mergedContentHistory = serverContent ? serverContent.history.slice() : [];
@ -504,8 +525,8 @@ const syncFile = async (fileId, syncContext = new SyncContext()) => {
* Sync a data item, typically settings, workspaces and templates.
*/
const syncDataItem = async (dataId) => {
const getItem = () => store.state.data.itemMap[dataId]
|| store.state.data.lsItemMap[dataId];
const getItem = () => store.state.data.itemsById[dataId]
|| store.state.data.lsItemsById[dataId];
const oldItem = getItem();
const oldSyncData = store.getters['data/syncDataByItemId'][dataId];
@ -515,23 +536,13 @@ const syncDataItem = async (dataId) => {
}
const token = workspaceProvider.getToken();
const { item, syncData } = await workspaceProvider.downloadWorkspaceData(
const { item } = updateSyncData(await workspaceProvider.downloadWorkspaceData({
token,
dataId,
oldSyncData,
);
// Update sync data if changed
if (syncData
&& utils.serializeObject(oldSyncData) !== utils.serializeObject(syncData)
) {
store.dispatch('data/patchSyncData', {
[syncData.id]: syncData,
});
}
syncData: oldSyncData,
}));
const serverItem = item;
const dataSyncData = store.getters['data/dataSyncData'][dataId];
const dataSyncData = store.getters['data/dataSyncDataById'][dataId];
let mergedItem = (() => {
const clientItem = utils.deepCopy(getItem());
if (!clientItem) {
@ -575,24 +586,15 @@ const syncDataItem = async (dataId) => {
}
// Upload merged data item
const newSyncData = await workspaceProvider.uploadWorkspaceData(
updateSyncData(await workspaceProvider.uploadWorkspaceData({
token,
mergedItem,
syncData,
tooLateChecker(restartContentSyncAfter),
);
// Update sync data if changed
if (newSyncData
&& utils.serializeObject(syncData) !== utils.serializeObject(newSyncData)
) {
store.dispatch('data/patchSyncData', {
[newSyncData.id]: newSyncData,
});
}
item: mergedItem,
syncData: store.getters['data/syncDataByItemId'][dataId],
ifNotTooLate: tooLateChecker(restartContentSyncAfter),
}));
// Update data sync data
store.dispatch('data/patchDataSyncData', {
store.dispatch('data/patchDataSyncDataById', {
[dataId]: utils.deepCopy(store.getters['data/syncDataByItemId'][dataId]),
});
};
@ -617,11 +619,10 @@ const syncWorkspace = async () => {
}
const changes = await workspaceProvider.getChanges();
// Apply changes
applyChanges(changes);
if (workspaceProvider.onChangesApplied) {
workspaceProvider.onChangesApplied();
}
applyChanges(workspaceProvider.prepareChanges(changes));
workspaceProvider.onChangesApplied();
// Prevent from sending items too long after changes have been retrieved
const ifNotTooLate = tooLateChecker(restartSyncAfter);
@ -629,33 +630,25 @@ const syncWorkspace = async () => {
// Called until no item to save
const saveNextItem = () => ifNotTooLate(async () => {
const storeItemMap = {
...store.state.file.itemMap,
...store.state.folder.itemMap,
...store.state.syncLocation.itemMap,
...store.state.publishLocation.itemMap,
...store.state.file.itemsById,
...store.state.folder.itemsById,
...store.state.syncLocation.itemsById,
...store.state.publishLocation.itemsById,
// Deal with contents and data later
};
let getSyncData;
if (workspaceProvider.isGit) {
const syncData = store.getters['data/syncData'];
getSyncData = id => syncData[store.getters.itemGitPaths[id]];
} else {
const syncDataByItemId = store.getters['data/syncDataByItemId'];
getSyncData = id => syncDataByItemId[id];
}
const syncDataByItemId = store.getters['data/syncDataByItemId'];
const [changedItem, syncDataToUpdate] = utils.someResult(
Object.entries(storeItemMap),
([id, item]) => {
const existingSyncData = getSyncData(id);
if ((!existingSyncData || existingSyncData.hash !== item.hash)
const syncData = syncDataByItemId[id];
if ((!syncData || syncData.hash !== item.hash)
// Add file/folder if parent has been added
&& (!storeItemMap[item.parentId] || getSyncData(item.parentId))
&& (!storeItemMap[item.parentId] || syncDataByItemId[item.parentId])
// Add file if content has been added
&& (item.type !== 'file' || getSyncData(`${id}/content`))
&& (item.type !== 'file' || syncDataByItemId[`${id}/content`])
) {
return [item, existingSyncData];
return [item, syncData];
}
return null;
},
@ -663,13 +656,13 @@ const syncWorkspace = async () => {
if (changedItem) {
const resultSyncData = await workspaceProvider
.saveWorkspaceItem(
.saveWorkspaceItem({
// Use deepCopy to freeze objects
utils.deepCopy(changedItem),
utils.deepCopy(syncDataToUpdate),
item: utils.deepCopy(changedItem),
syncData: utils.deepCopy(syncDataToUpdate),
ifNotTooLate,
);
store.dispatch('data/patchSyncData', {
});
store.dispatch('data/patchSyncDataById', {
[resultSyncData.id]: resultSyncData,
});
await saveNextItem();
@ -682,38 +675,40 @@ const syncWorkspace = async () => {
let getItem;
let getFileItem;
if (workspaceProvider.isGit) {
const { gitPathItems } = store.getters;
getItem = syncData => gitPathItems[syncData.id];
getFileItem = syncData => gitPathItems[syncData.id.slice(1)]; // Remove leading /
const { itemsByGitPath } = store.getters;
getItem = syncData => itemsByGitPath[syncData.id];
getFileItem = syncData => itemsByGitPath[syncData.id.slice(1)]; // Remove leading /
} else {
const { allItemMap } = store.getters;
getItem = syncData => allItemMap[syncData.itemId];
getFileItem = syncData => allItemMap[syncData.itemId.split('/')[0]];
const { allItemsById } = store.getters;
getItem = syncData => allItemsById[syncData.itemId];
getFileItem = syncData => allItemsById[syncData.itemId.split('/')[0]];
}
const syncData = store.getters['data/syncData'];
const syncDataById = store.getters['data/syncDataById'];
const syncDataToRemove = utils.deepCopy(utils.someResult(
Object.values(syncData),
(existingSyncData) => {
if (!getItem(existingSyncData)
Object.values(syncDataById),
(syncData) => {
if (!getItem(syncData)
// We don't want to delete data items, especially on first sync
&& existingSyncData.type !== 'data'
&& syncData.type !== 'data'
// Remove content only if file has been removed
&& (existingSyncData.type !== 'content'
|| !getFileItem(existingSyncData))
&& (syncData.type !== 'content'
|| !getFileItem(syncData))
) {
return existingSyncData;
return syncData;
}
return null;
},
));
if (syncDataToRemove) {
// Use deepCopy to freeze objects
await workspaceProvider.removeWorkspaceItem(syncDataToRemove, ifNotTooLate);
const syncDataCopy = { ...store.getters['data/syncData'] };
await workspaceProvider.removeWorkspaceItem({
syncData: syncDataToRemove,
ifNotTooLate,
});
const syncDataCopy = { ...store.getters['data/syncDataById'] };
delete syncDataCopy[syncDataToRemove.id];
store.dispatch('data/setSyncData', syncDataCopy);
store.dispatch('data/setSyncDataById', syncDataCopy);
await removeNextItem();
}
});
@ -727,33 +722,38 @@ const syncWorkspace = async () => {
await syncDataItem('templates');
const getOneFileIdToSync = () => {
const contentIds = [...new Set([
...Object.keys(localDbSvc.hashMap.content),
...store.getters['file/items'].map(file => `${file.id}/content`),
])];
const contentMap = store.state.content.itemMap;
const syncDataById = store.getters['data/syncData'];
let getSyncData;
if (workspaceProvider.isGit) {
const { itemGitPaths } = store.getters;
getSyncData = contentId => syncDataById[itemGitPaths[contentId]];
const { gitPathsByItemId } = store.getters;
const syncDataById = store.getters['data/syncDataById'];
// Use file git path as content may not exist or not be loaded
getSyncData = fileId => syncDataById[`/${gitPathsByItemId[fileId]}`];
} else {
const syncDataByItemId = store.getters['data/syncDataByItemId'];
getSyncData = contentId => syncDataByItemId[contentId];
getSyncData = (fileId, contentId) => syncDataByItemId[contentId];
}
return utils.someResult(contentIds, (contentId) => {
// Get content hash from itemMap or from localDbSvc if not loaded
// Collect all [fileId, contentId]
const ids = [
...Object.keys(localDbSvc.hashMap.content)
.map(contentId => [contentId.split('/')[0], contentId]),
...store.getters['file/items']
.map(file => [file.id, `${file.id}/content`]),
];
// Find the first content out of sync
const contentMap = store.state.content.itemsById;
return utils.someResult(ids, ([fileId, contentId]) => {
// Get content hash from itemsById or from localDbSvc if not loaded
const loadedContent = contentMap[contentId];
const hash = loadedContent ? loadedContent.hash : localDbSvc.hashMap.content[contentId];
const syncData = getSyncData(contentId);
const syncData = getSyncData(fileId, contentId);
if (
// Sync if content syncing was not attempted yet
!syncContext.attempted[contentId] &&
// And if syncData does not exist or if content hash and syncData hash are inconsistent
(!syncData || syncData.hash !== hash)
) {
const [fileId] = contentId.split('/');
return fileId;
}
return null;
@ -843,7 +843,7 @@ const requestSync = () => {
// Clean files
Object.entries(fileHashesToClean).forEach(([fileId, fileHash]) => {
const file = store.state.file.itemMap[fileId];
const file = store.state.file.itemsById[fileId];
if (file && file.hash === fileHash) {
fileSvc.deleteFile(fileId);
}

View File

@ -38,12 +38,11 @@ export default {
parentId: 'temp',
}, true);
const fileItemMap = store.state.file.itemMap;
// Sanitize file creations
const lastCreated = {};
const fileItemsById = store.state.file.itemsById;
Object.entries(store.getters['data/lastCreated']).forEach(([id, createdOn]) => {
if (fileItemMap[id] && fileItemMap[id].parentId === 'temp') {
if (fileItemsById[id] && fileItemsById[id].parentId === 'temp') {
lastCreated[id] = createdOn;
}
});

View File

@ -21,7 +21,7 @@ export default {
const [type, sub] = parseUserId(userId);
// Try to find a token with this sub
const token = store.getters[`data/${type}Tokens`][sub];
const token = store.getters[`data/${type}TokensBySub`][sub];
if (token) {
store.commit('userInfo/addItem', {
id: userId,

View File

@ -31,11 +31,11 @@ module.mutations = {
module.getters = {
...module.getters,
current: ({ itemMap, revisionContent }, getters, rootState, rootGetters) => {
current: ({ itemsById, revisionContent }, getters, rootState, rootGetters) => {
if (revisionContent) {
return revisionContent;
}
return itemMap[`${rootGetters['file/current'].id}/content`] || empty();
return itemsById[`${rootGetters['file/current'].id}/content`] || empty();
},
currentChangeTrigger: (state, getters) => {
const { current } = getters;
@ -63,7 +63,7 @@ module.actions = {
},
setRevisionContent({ state, rootGetters, commit }, value) {
const currentFile = rootGetters['file/current'];
const currentContent = state.itemMap[`${currentFile.id}/content`];
const currentContent = state.itemsById[`${currentFile.id}/content`];
if (currentContent) {
const diffs = diffMatchPatch.diff_main(currentContent.text, value.text);
diffMatchPatch.diff_cleanupSemantic(diffs);

View File

@ -5,8 +5,8 @@ const module = moduleTemplate(empty, true);
module.getters = {
...module.getters,
current: ({ itemMap }, getters, rootState, rootGetters) =>
itemMap[`${rootGetters['file/current'].id}/contentState`] || empty(),
current: ({ itemsById }, getters, rootState, rootGetters) =>
itemsById[`${rootGetters['file/current'].id}/contentState`] || empty(),
};
module.actions = {

View File

@ -37,13 +37,13 @@ const lsItemIdSet = new Set(utils.localStorageDataIds);
// Getter/setter/patcher factories
const getter = id => state => ((lsItemIdSet.has(id)
? state.lsItemMap
: state.itemMap)[id] || {}).data || empty(id).data;
? state.lsItemsById
: state.itemsById)[id] || {}).data || empty(id).data;
const setter = id => ({ commit }, data) => commit('setItem', itemTemplate(id, data));
const patcher = id => ({ state, commit }, data) => {
const item = Object.assign(empty(id), (lsItemIdSet.has(id)
? state.lsItemMap
: state.itemMap)[id]);
? state.lsItemsById
: state.itemsById)[id]);
commit('setItem', {
...empty(id),
data: typeof data === 'object' ? {
@ -83,10 +83,10 @@ const additionalTemplates = {
};
// For tokens
const tokenSetter = providerId => ({ getters, dispatch }, token) => {
dispatch('patchTokens', {
const tokenAdder = providerId => ({ getters, dispatch }, token) => {
dispatch('patchTokensByProviderId', {
[providerId]: {
...getters[`${providerId}Tokens`],
...getters[`${providerId}TokensBySub`],
[token.sub]: token,
},
});
@ -99,12 +99,12 @@ export default {
namespaced: true,
state: {
// Data items stored in the DB
itemMap: {},
itemsById: {},
// Data items stored in the localStorage
lsItemMap: {},
lsItemsById: {},
},
mutations: {
setItem: ({ itemMap, lsItemMap }, value) => {
setItem: ({ itemsById, lsItemsById }, value) => {
// Create an empty item and override its data field
const emptyItem = empty(value.id);
const data = typeof value.data === 'object'
@ -117,20 +117,20 @@ export default {
data,
});
// Store item in itemMap or lsItemMap if its stored in the localStorage
Vue.set(lsItemIdSet.has(item.id) ? lsItemMap : itemMap, item.id, item);
// Store item in itemsById or lsItemsById if its stored in the localStorage
Vue.set(lsItemIdSet.has(item.id) ? lsItemsById : itemsById, item.id, item);
},
deleteItem({ itemMap }, id) {
// Only used by localDbSvc to clean itemMap from object moved to localStorage
Vue.delete(itemMap, id);
deleteItem({ itemsById }, id) {
// Only used by localDbSvc to clean itemsById from object moved to localStorage
Vue.delete(itemsById, id);
},
},
getters: {
workspaces: getter('workspaces'),
sanitizedWorkspaces: (state, { workspaces }, rootState, rootGetters) => {
const sanitizedWorkspaces = {};
workspacesById: getter('workspaces'),
sanitizedWorkspacesById: (state, { workspacesById }, rootState, rootGetters) => {
const sanitizedWorkspacesById = {};
const mainWorkspaceToken = rootGetters['workspace/mainWorkspaceToken'];
Object.entries(workspaces).forEach(([id, workspace]) => {
Object.entries(workspacesById).forEach(([id, workspace]) => {
const sanitizedWorkspace = {
id,
providerId: mainWorkspaceToken && 'googleDriveAppData',
@ -141,9 +141,9 @@ export default {
urlParser.href = workspace.url || 'app';
const params = utils.parseQueryParams(urlParser.hash.slice(1));
sanitizedWorkspace.url = utils.addQueryParams('app', params, true);
sanitizedWorkspaces[id] = sanitizedWorkspace;
sanitizedWorkspacesById[id] = sanitizedWorkspace;
});
return sanitizedWorkspaces;
return sanitizedWorkspacesById;
},
settings: getter('settings'),
computedSettings: (state, { settings }) => {
@ -170,9 +170,9 @@ export default {
},
localSettings: getter('localSettings'),
layoutSettings: getter('layoutSettings'),
templates: getter('templates'),
allTemplates: (state, { templates }) => ({
...templates,
templatesById: getter('templates'),
allTemplatesById: (state, { templatesById }) => ({
...templatesById,
...additionalTemplates,
}),
lastCreated: getter('lastCreated'),
@ -186,42 +186,51 @@ export default {
result[currentFileId] = Date.now();
}
return Object.keys(result)
.filter(id => rootState.file.itemMap[id])
.filter(id => rootState.file.itemsById[id])
.sort((id1, id2) => result[id2] - result[id1])
.slice(0, 20);
},
syncData: getter('syncData'),
syncDataByItemId: (state, { syncData }) => {
syncDataById: getter('syncData'),
syncDataByItemId: (state, { syncDataById }, rootState, rootGetters) => {
const result = {};
Object.entries(syncData).forEach(([, value]) => {
result[value.itemId] = value;
});
if (rootGetters['workspace/currentWorkspaceIsGit']) {
Object.entries(rootGetters.gitPathsByItemId).forEach(([id, path]) => {
const syncDataEntry = syncDataById[path];
if (syncDataEntry) {
result[id] = syncDataEntry;
}
});
} else {
Object.entries(syncDataById).forEach(([, syncDataEntry]) => {
result[syncDataEntry.itemId] = syncDataEntry;
});
}
return result;
},
syncDataByType: (state, { syncData }) => {
syncDataByType: (state, { syncDataById }) => {
const result = {};
utils.types.forEach((type) => {
result[type] = {};
});
Object.entries(syncData).forEach(([, item]) => {
Object.entries(syncDataById).forEach(([, item]) => {
if (result[item.type]) {
result[item.type][item.itemId] = item;
}
});
return result;
},
dataSyncData: getter('dataSyncData'),
tokens: getter('tokens'),
googleTokens: (state, { tokens }) => tokens.google || {},
couchdbTokens: (state, { tokens }) => tokens.couchdb || {},
dropboxTokens: (state, { tokens }) => tokens.dropbox || {},
githubTokens: (state, { tokens }) => tokens.github || {},
wordpressTokens: (state, { tokens }) => tokens.wordpress || {},
zendeskTokens: (state, { tokens }) => tokens.zendesk || {},
dataSyncDataById: getter('dataSyncData'),
tokensByProviderId: getter('tokens'),
googleTokensBySub: (state, { tokensByProviderId }) => tokensByProviderId.google || {},
couchdbTokensBySub: (state, { tokensByProviderId }) => tokensByProviderId.couchdb || {},
dropboxTokensBySub: (state, { tokensByProviderId }) => tokensByProviderId.dropbox || {},
githubTokensBySub: (state, { tokensByProviderId }) => tokensByProviderId.github || {},
wordpressTokensBySub: (state, { tokensByProviderId }) => tokensByProviderId.wordpress || {},
zendeskTokensBySub: (state, { tokensByProviderId }) => tokensByProviderId.zendesk || {},
},
actions: {
setWorkspaces: setter('workspaces'),
patchWorkspaces: patcher('workspaces'),
setWorkspacesById: setter('workspaces'),
patchWorkspacesById: patcher('workspaces'),
setSettings: setter('settings'),
patchLocalSettings: patcher('localSettings'),
patchLayoutSettings: patcher('layoutSettings'),
@ -257,15 +266,15 @@ export default {
setSideBarPanel: ({ dispatch }, value) => dispatch('patchLayoutSettings', {
sideBarPanel: value === undefined ? 'menu' : value,
}),
setTemplates: ({ commit }, data) => {
const dataToCommit = {
...data,
setTemplatesById: ({ commit }, templatesById) => {
const templatesToCommit = {
...templatesById,
};
// We don't store additional templates
Object.keys(additionalTemplates).forEach((id) => {
delete dataToCommit[id];
delete templatesToCommit[id];
});
commit('setItem', itemTemplate('templates', dataToCommit));
commit('setItem', itemTemplate('templates', templatesToCommit));
},
setLastCreated: setter('lastCreated'),
setLastOpenedId: ({ getters, commit, rootState }, fileId) => {
@ -274,21 +283,21 @@ export default {
// Remove entries that don't exist anymore
const cleanedLastOpened = {};
Object.entries(lastOpened).forEach(([id, value]) => {
if (rootState.file.itemMap[id]) {
if (rootState.file.itemsById[id]) {
cleanedLastOpened[id] = value;
}
});
commit('setItem', itemTemplate('lastOpened', cleanedLastOpened));
},
setSyncData: setter('syncData'),
patchSyncData: patcher('syncData'),
patchDataSyncData: patcher('dataSyncData'),
patchTokens: patcher('tokens'),
setGoogleToken: tokenSetter('google'),
setCouchdbToken: tokenSetter('couchdb'),
setDropboxToken: tokenSetter('dropbox'),
setGithubToken: tokenSetter('github'),
setWordpressToken: tokenSetter('wordpress'),
setZendeskToken: tokenSetter('zendesk'),
setSyncDataById: setter('syncData'),
patchSyncDataById: patcher('syncData'),
patchDataSyncDataById: patcher('dataSyncData'),
patchTokensByProviderId: patcher('tokens'),
addGoogleToken: tokenAdder('google'),
addCouchdbToken: tokenAdder('couchdb'),
addDropboxToken: tokenAdder('dropbox'),
addGithubToken: tokenAdder('github'),
addWordpressToken: tokenAdder('wordpress'),
addZendeskToken: tokenAdder('zendesk'),
},
};

View File

@ -10,10 +10,10 @@ module.state = {
module.getters = {
...module.getters,
current: ({ itemMap, currentId }) => itemMap[currentId] || empty(),
current: ({ itemsById, currentId }) => itemsById[currentId] || empty(),
isCurrentTemp: (state, { current }) => current.parentId === 'temp',
lastOpened: ({ itemMap }, { items }, rootState, rootGetters) =>
itemMap[rootGetters['data/lastOpenedIds'][0]] || items[0] || empty(),
lastOpened: ({ itemsById }, { items }, rootState, rootGetters) =>
itemsById[rootGetters['data/lastOpenedIds'][0]] || items[0] || empty(),
};
module.mutations = {

View File

@ -75,14 +75,14 @@ const store = new Vuex.Store({
},
},
getters: {
allItemMap: (state) => {
allItemsById: (state) => {
const result = {};
utils.types.forEach(type => Object.assign(result, state[type].itemMap));
utils.types.forEach(type => Object.assign(result, state[type].itemsById));
return result;
},
itemPaths: (state, getters) => {
pathsByItemId: (state, getters) => {
const result = {};
const folderMap = state.folder.itemMap;
const foldersById = state.folder.itemsById;
const getPath = (item) => {
let itemPath = result[item.id];
if (!itemPath) {
@ -90,10 +90,10 @@ const store = new Vuex.Store({
itemPath = `.stackedit-trash/${item.name}`;
} else {
let { name } = item;
if (folderMap[item.id]) {
if (foldersById[item.id]) {
name += '/';
}
const parentFolder = folderMap[item.parentId];
const parentFolder = foldersById[item.parentId];
if (parentFolder) {
itemPath = getPath(parentFolder) + name;
} else {
@ -104,33 +104,38 @@ const store = new Vuex.Store({
result[item.id] = itemPath;
return itemPath;
};
[
...getters['folder/items'],
...getters['file/items'],
].forEach(item => getPath(item));
return result;
},
pathItems: (state, { allItemMap, itemPaths }) => {
itemsByPath: (state, { allItemsById, pathsByItemId }) => {
const result = {};
Object.entries(itemPaths).forEach(([id, path]) => {
Object.entries(pathsByItemId).forEach(([id, path]) => {
const items = result[path] || [];
items.push(allItemMap[id]);
items.push(allItemsById[id]);
result[path] = items;
});
return result;
},
itemGitPaths: (state, { allItemMap, itemPaths }) => {
gitPathsByItemId: (state, { allItemsById, pathsByItemId }) => {
const result = {};
Object.entries(allItemMap).forEach(([id, item]) => {
Object.entries(allItemsById).forEach(([id, item]) => {
if (item.type === 'data') {
result[id] = `.stackedit-data/${id}.json`;
} else if (item.type === 'file') {
result[id] = `${itemPaths[id]}.md`;
const filePath = pathsByItemId[id];
result[id] = `${filePath}.md`;
result[`${id}/content`] = `/${filePath}.md`;
} else if (item.type === 'content') {
const [fileId] = id.split('/');
result[id] = `/${itemPaths[fileId]}.md`;
const filePath = pathsByItemId[fileId];
result[fileId] = `${filePath}.md`;
result[id] = `/${filePath}.md`;
} else if (item.type === 'folder') {
result[id] = itemPaths[id];
result[id] = pathsByItemId[id];
} else if (item.type === 'syncLocation' || item.type === 'publishLocation') {
// locations are stored as paths
const encodedItem = utils.encodeBase64(utils.serializeObject({
@ -140,17 +145,20 @@ const store = new Vuex.Store({
fileId: undefined,
}), true);
const extension = item.type === 'syncLocation' ? 'sync' : 'publish';
result[id] = `${itemPaths[item.fileId]}.${encodedItem}.${extension}`;
result[id] = `${pathsByItemId[item.fileId]}.${encodedItem}.${extension}`;
}
});
return result;
},
gitPathItems: (state, { allItemMap, itemGitPaths }) => {
itemsByGitPath: (state, { allItemsById, gitPathsByItemId }) => {
const result = {};
Object.entries(itemGitPaths).forEach(([id, path]) => {
const items = result[path] || [];
items.push(allItemMap[id]);
result[path] = items;
Object.entries(gitPathsByItemId).forEach(([id, path]) => {
const item = allItemsById[id];
if (item) {
const items = result[path] || [];
items.push(item);
result[path] = items;
}
});
return result;
},

View File

@ -8,10 +8,10 @@ export default (empty, simpleHash = false) => {
return {
namespaced: true,
state: {
itemMap: {},
itemsById: {},
},
getters: {
items: ({ itemMap }) => Object.values(itemMap),
items: ({ itemsById }) => Object.values(itemsById),
},
mutations: {
setItem(state, value) {
@ -19,20 +19,20 @@ export default (empty, simpleHash = false) => {
if (!item.hash || !simpleHash) {
item.hash = hashFunc(item);
}
Vue.set(state.itemMap, item.id, item);
Vue.set(state.itemsById, item.id, item);
},
patchItem(state, patch) {
const item = state.itemMap[patch.id];
const item = state.itemsById[patch.id];
if (item) {
Object.assign(item, patch);
item.hash = hashFunc(item);
Vue.set(state.itemMap, item.id, item);
Vue.set(state.itemsById, item.id, item);
return true;
}
return false;
},
deleteItem(state, id) {
Vue.delete(state.itemMap, id);
Vue.delete(state.itemsById, id);
},
},
actions: {},

View File

@ -5,8 +5,8 @@ const module = moduleTemplate(empty, true);
module.getters = {
...module.getters,
current: ({ itemMap }, getters, rootState, rootGetters) =>
itemMap[`${rootGetters['file/current'].id}/syncedContent`] || empty(),
current: ({ itemsById }, getters, rootState, rootGetters) =>
itemsById[`${rootGetters['file/current'].id}/syncedContent`] || empty(),
};
export default module;

View File

@ -3,11 +3,11 @@ import Vue from 'vue';
export default {
namespaced: true,
state: {
itemMap: {},
itemsById: {},
},
mutations: {
addItem: ({ itemMap }, item) => {
Vue.set(itemMap, item.id, item);
addItem: ({ itemsById }, item) => {
Vue.set(itemsById, item.id, item);
},
},
};

View File

@ -16,13 +16,14 @@ export default {
},
getters: {
mainWorkspace: (state, getters, rootState, rootGetters) => {
const workspaces = rootGetters['data/sanitizedWorkspaces'];
return workspaces.main;
const sanitizedWorkspacesById = rootGetters['data/sanitizedWorkspacesById'];
return sanitizedWorkspacesById.main;
},
currentWorkspace: ({ currentWorkspaceId }, { mainWorkspace }, rootState, rootGetters) => {
const workspaces = rootGetters['data/sanitizedWorkspaces'];
return workspaces[currentWorkspaceId] || mainWorkspace;
const sanitizedWorkspacesById = rootGetters['data/sanitizedWorkspacesById'];
return sanitizedWorkspacesById[currentWorkspaceId] || mainWorkspace;
},
currentWorkspaceIsGit: (state, { currentWorkspace }) => currentWorkspace.providerId === 'githubWorkspace',
hasUniquePaths: (state, { currentWorkspace }) =>
currentWorkspace.providerId === 'githubWorkspace',
lastSyncActivityKey: (state, { currentWorkspace }) => `${currentWorkspace.id}/lastSyncActivity`,

View File

@ -9,8 +9,8 @@ const select = (id) => {
store.commit('explorer/setSelectedId', id);
expect(store.getters['explorer/selectedNode'].item.id).toEqual(id);
};
const ensureExists = file => expect(store.getters.allItemMap).toHaveProperty(file.id);
const ensureNotExists = file => expect(store.getters.allItemMap).not.toHaveProperty(file.id);
const ensureExists = file => expect(store.getters.allItemsById).toHaveProperty(file.id);
const ensureNotExists = file => expect(store.getters.allItemsById).not.toHaveProperty(file.id);
describe('Explorer.vue', () => {
it('should create new files in the root folder', () => {