diff --git a/src/components/modals/providers/ZendeskPublishModal.vue b/src/components/modals/providers/ZendeskPublishModal.vue
index d1f61632..6dbcf81d 100644
--- a/src/components/modals/providers/ZendeskPublishModal.vue
+++ b/src/components/modals/providers/ZendeskPublishModal.vue
@@ -4,7 +4,7 @@
diff --git a/src/data/defaultLocalSettings.js b/src/data/defaultLocalSettings.js
index fa4d2d48..7ca075f3 100644
--- a/src/data/defaultLocalSettings.js
+++ b/src/data/defaultLocalSettings.js
@@ -15,6 +15,7 @@ export default () => ({
dropboxPublishTemplate: 'styledHtml',
githubRepoFullAccess: false,
githubRepoUrl: '',
+ githubWorkspaceRepoUrl: '',
githubPublishTemplate: 'jekyllSite',
gistIsPublic: false,
gistPublishTemplate: 'plainText',
diff --git a/src/data/defaultSettings.yml b/src/data/defaultSettings.yml
index ffd80772..64797b17 100644
--- a/src/data/defaultSettings.yml
+++ b/src/data/defaultSettings.yml
@@ -1,11 +1,11 @@
# light or dark
colorTheme: light
-# Auto-sync frequency (in ms). Minimum is 60000.
-autoSyncEvery: 60000
# Adjust font size in editor and preview
fontSizeFactor: 1
# Adjust maximum text width in editor and preview
maxWidthFactor: 1
+# Auto-sync frequency (in ms). Minimum is 60000.
+autoSyncEvery: 60000
# Editor settings
editor:
@@ -54,7 +54,7 @@ wkhtmltopdf:
marginRight: 25
marginBottom: 25
marginLeft: 25
- # `A3`, `A4`, `Legal` or `Letter`
+ # A3, A4, Legal or Letter
pageSize: A4
# Options passed to pandoc
@@ -77,6 +77,11 @@ turndown:
linkStyle: inlined
linkReferenceStyle: full
+github:
+ createFileMessage: Create {{path}} from https://stackedit.io/
+ updateFileMessage: Update {{path}} from https://stackedit.io/
+ deleteFileMessage: Delete {{path}} from https://stackedit.io/
+
# Default content for new files
newFileContent: |
diff --git a/src/data/emptySyncedContent.js b/src/data/emptySyncedContent.js
index cf25fa94..20891404 100644
--- a/src/data/emptySyncedContent.js
+++ b/src/data/emptySyncedContent.js
@@ -3,5 +3,6 @@ export default (id = null) => ({
type: 'syncedContent',
historyData: {},
syncHistory: {},
+ v: 0,
hash: 0,
});
diff --git a/src/data/welcomeFile.md b/src/data/welcomeFile.md
index 0f8d76ee..1a2b147e 100644
--- a/src/data/welcomeFile.md
+++ b/src/data/welcomeFile.md
@@ -1,6 +1,6 @@
# Welcome to StackEdit!
-Hi! I'm your first Markdown file in **StackEdit**. If you want to learn about StackEdit, you can read me. If you want to play with Markdown, you can edit me. If you have finished with me, you can just create new files by opening the **file explorer** on the left corner of the navigation bar.
+Hi! I'm your first Markdown file in **StackEdit**. If you want to learn about StackEdit, you can read me. If you want to play with Markdown, you can edit me. Once you have finished with me, you can create new files by opening the **file explorer** on the left corner of the navigation bar.
# Files
diff --git a/src/icons/Provider.vue b/src/icons/Provider.vue
index aaa51bfb..5ba1dce4 100644
--- a/src/icons/Provider.vue
+++ b/src/icons/Provider.vue
@@ -16,8 +16,8 @@ export default {
return 'google-drive';
case 'googlePhotos':
return 'google-photos';
- case 'dropboxRestricted':
- return 'dropbox';
+ case 'githubWorkspace':
+ return 'github';
case 'gist':
return 'github';
case 'bloggerPage':
diff --git a/src/services/providers/bloggerPageProvider.js b/src/services/providers/bloggerPageProvider.js
index 83231525..4b317589 100644
--- a/src/services/providers/bloggerPageProvider.js
+++ b/src/services/providers/bloggerPageProvider.js
@@ -1,8 +1,8 @@
import store from '../../store';
import googleHelper from './helpers/googleHelper';
-import providerRegistry from './providerRegistry';
+import Provider from './common/Provider';
-export default providerRegistry.register({
+export default new Provider({
id: 'bloggerPage',
getToken(location) {
const token = store.getters['data/googleTokens'][location.sub];
diff --git a/src/services/providers/bloggerProvider.js b/src/services/providers/bloggerProvider.js
index f43fc1bb..df87d688 100644
--- a/src/services/providers/bloggerProvider.js
+++ b/src/services/providers/bloggerProvider.js
@@ -1,8 +1,8 @@
import store from '../../store';
import googleHelper from './helpers/googleHelper';
-import providerRegistry from './providerRegistry';
+import Provider from './common/Provider';
-export default providerRegistry.register({
+export default new Provider({
id: 'blogger',
getToken(location) {
const token = store.getters['data/googleTokens'][location.sub];
diff --git a/src/services/providers/providerUtils.js b/src/services/providers/common/Provider.js
similarity index 60%
rename from src/services/providers/providerUtils.js
rename to src/services/providers/common/Provider.js
index 52301ce4..13c32a1c 100644
--- a/src/services/providers/providerUtils.js
+++ b/src/services/providers/common/Provider.js
@@ -1,11 +1,20 @@
-import emptyContent from '../../data/emptyContent';
-import store from '../../store';
-import utils from '../utils';
+import providerRegistry from './providerRegistry';
+import emptyContent from '../../../data/emptyContent';
+import utils from '../../utils';
+import store from '../../../store';
const dataExtractor = /$/;
-export default {
- serializeContent(content) {
+export default class Provider {
+ constructor(props) {
+ Object.assign(this, props);
+ providerRegistry.register(this);
+ }
+
+ /**
+ * Serialize content in a self contain Markdown compatible format
+ */
+ static serializeContent(content) {
let result = content.text;
const data = {};
if (content.properties.length > 1) {
@@ -25,8 +34,12 @@ export default {
result += ``;
}
return result;
- },
- parseContent(serializedContent, id) {
+ }
+
+ /**
+ * Parse content serialized with serializeContent()
+ */
+ static parseContent(serializedContent, id) {
const result = utils.deepCopy(store.state.content.itemMap[id]) || emptyContent(id);
result.text = utils.sanitizeText(serializedContent);
result.history = [];
@@ -51,29 +64,26 @@ export default {
}
}
return utils.addItemHash(result);
- },
+ }
+
/**
- * Find and open a file location that fits the criteria
+ * Find and open a file with location that meets the criteria
*/
- openFileWithLocation(allLocations, criteria) {
- return allLocations.some((location) => {
- // If every field fits the criteria
- if (Object.entries(criteria).every(([key, value]) => value === location[key])) {
- // Found one location that fits, open it if it exists
- const file = store.state.file.itemMap[location.fileId];
- if (file) {
- store.commit('file/setCurrentId', file.id);
- // If file is in the trash, restore it
- if (file.parentId === 'trash') {
- store.commit('file/patchItem', {
- ...file,
- parentId: null,
- });
- }
- return true;
+ static openFileWithLocation(allLocations, criteria) {
+ const location = utils.search(allLocations, criteria);
+ if (location) {
+ // Found one, open it if it exists
+ const file = store.state.file.itemMap[location.fileId];
+ if (file) {
+ store.commit('file/setCurrentId', file.id);
+ // If file is in the trash, restore it
+ if (file.parentId === 'trash') {
+ store.commit('file/patchItem', {
+ ...file,
+ parentId: null,
+ });
}
}
- return false;
- });
- },
-};
+ }
+ }
+}
diff --git a/src/services/providers/providerRegistry.js b/src/services/providers/common/providerRegistry.js
similarity index 100%
rename from src/services/providers/providerRegistry.js
rename to src/services/providers/common/providerRegistry.js
diff --git a/src/services/providers/couchdbWorkspaceProvider.js b/src/services/providers/couchdbWorkspaceProvider.js
index 3b815d61..e0789f9f 100644
--- a/src/services/providers/couchdbWorkspaceProvider.js
+++ b/src/services/providers/couchdbWorkspaceProvider.js
@@ -1,7 +1,6 @@
import store from '../../store';
import couchdbHelper from './helpers/couchdbHelper';
-import providerRegistry from './providerRegistry';
-import providerUtils from './providerUtils';
+import Provider from './common/Provider';
import utils from '../utils';
const getSyncData = (fileId) => {
@@ -11,18 +10,20 @@ const getSyncData = (fileId) => {
: Promise.reject(); // No need for a proper error message.
};
-export default providerRegistry.register({
+let syncLastSeq;
+
+export default new Provider({
id: 'couchdbWorkspace',
getToken() {
return store.getters['workspace/syncToken'];
},
initWorkspace() {
const dbUrl = (utils.queryParams.dbUrl || '').replace(/\/?$/, ''); // Remove trailing /
- const workspaceIdParams = {
+ const workspaceParams = {
providerId: this.id,
dbUrl,
};
- const workspaceId = utils.makeWorkspaceId(workspaceIdParams);
+ const workspaceId = utils.makeWorkspaceId(workspaceParams);
const getToken = () => store.getters['data/couchdbTokens'][workspaceId];
const getWorkspace = () => store.getters['data/sanitizedWorkspaces'][workspaceId];
@@ -51,7 +52,7 @@ export default providerRegistry.register({
}))
.then((workspace) => {
// Fix the URL hash
- utils.setQueryParams(workspaceIdParams);
+ utils.setQueryParams(workspaceParams);
if (workspace.url !== location.href) {
store.dispatch('data/patchWorkspaces', {
[workspace.id]: {
@@ -86,13 +87,13 @@ export default providerRegistry.register({
change.syncDataId = change.id;
return true;
});
- changes.lastSeq = result.lastSeq;
+ syncLastSeq = result.lastSeq;
return changes;
});
},
- setAppliedChanges(changes) {
+ onChangesApplied() {
store.dispatch('data/patchLocalSettings', {
- syncLastSeq: changes.lastSeq,
+ syncLastSeq,
});
},
saveSimpleItem(item, syncData) {
@@ -131,9 +132,9 @@ export default providerRegistry.register({
.then((body) => {
let item;
if (body.item.type === 'content') {
- item = providerUtils.parseContent(body.attachments.data, body.item.id);
+ item = Provider.parseContent(body.attachments.data, body.item.id);
} else {
- item = JSON.parse(body.attachments.data);
+ item = utils.addItemHash(JSON.parse(body.attachments.data));
}
const rev = body._rev; // eslint-disable-line no-underscore-dangle
if (item.hash !== syncData.hash || rev !== syncData.rev) {
@@ -149,18 +150,18 @@ export default providerRegistry.register({
});
},
uploadContent(token, content, syncLocation) {
- return this.uploadData(content, `${syncLocation.fileId}/content`)
+ return this.uploadData(content)
.then(() => syncLocation);
},
- uploadData(item, dataId) {
- const syncData = store.getters['data/syncDataByItemId'][dataId];
+ uploadData(item) {
+ const syncData = store.getters['data/syncDataByItemId'][item.id];
if (syncData && syncData.hash === item.hash) {
return Promise.resolve();
}
let data;
let dataType;
if (item.type === 'content') {
- data = providerUtils.serializeContent(item);
+ data = Provider.serializeContent(item);
dataType = 'text/plain';
} else {
data = JSON.stringify(item);
@@ -219,6 +220,6 @@ export default providerRegistry.register({
return getSyncData(fileId)
.then(syncData => couchdbHelper
.retrieveDocumentWithAttachments(token, syncData.id, revisionId))
- .then(body => providerUtils.parseContent(body.attachments.data, body.item.id));
+ .then(body => Provider.parseContent(body.attachments.data, body.item.id));
},
});
diff --git a/src/services/providers/dropboxProvider.js b/src/services/providers/dropboxProvider.js
index 4bb118bf..a95acbe7 100644
--- a/src/services/providers/dropboxProvider.js
+++ b/src/services/providers/dropboxProvider.js
@@ -1,7 +1,6 @@
import store from '../../store';
import dropboxHelper from './helpers/dropboxHelper';
-import providerUtils from './providerUtils';
-import providerRegistry from './providerRegistry';
+import Provider from './common/Provider';
import utils from '../utils';
const makePathAbsolute = (token, path) => {
@@ -17,7 +16,7 @@ const makePathRelative = (token, path) => {
return path;
};
-export default providerRegistry.register({
+export default new Provider({
id: 'dropbox',
getToken(location) {
return store.getters['data/dropboxTokens'][location.sub];
@@ -40,13 +39,13 @@ export default providerRegistry.register({
makePathRelative(token, syncLocation.path),
syncLocation.dropboxFileId,
)
- .then(({ content }) => providerUtils.parseContent(content, `${syncLocation.fileId}/content`));
+ .then(({ content }) => Provider.parseContent(content, `${syncLocation.fileId}/content`));
},
uploadContent(token, content, syncLocation) {
return dropboxHelper.uploadFile(
token,
makePathRelative(token, syncLocation.path),
- providerUtils.serializeContent(content),
+ Provider.serializeContent(content),
syncLocation.dropboxFileId,
)
.then(dropboxFile => ({
@@ -74,7 +73,7 @@ export default providerRegistry.register({
if (!path) {
return null;
}
- if (providerUtils.openFileWithLocation(store.getters['syncLocation/items'], {
+ if (Provider.openFileWithLocation(store.getters['syncLocation/items'], {
providerId: this.id,
path,
})) {
diff --git a/src/services/providers/gistProvider.js b/src/services/providers/gistProvider.js
index ffa3b96d..eef09bab 100644
--- a/src/services/providers/gistProvider.js
+++ b/src/services/providers/gistProvider.js
@@ -1,10 +1,9 @@
import store from '../../store';
import githubHelper from './helpers/githubHelper';
-import providerUtils from './providerUtils';
-import providerRegistry from './providerRegistry';
+import Provider from './common/Provider';
import utils from '../utils';
-export default providerRegistry.register({
+export default new Provider({
id: 'gist',
getToken(location) {
return store.getters['data/githubTokens'][location.sub];
@@ -18,7 +17,7 @@ export default providerRegistry.register({
},
downloadContent(token, syncLocation) {
return githubHelper.downloadGist(token, syncLocation.gistId, syncLocation.filename)
- .then(content => providerUtils.parseContent(content, `${syncLocation.fileId}/content`));
+ .then(content => Provider.parseContent(content, `${syncLocation.fileId}/content`));
},
uploadContent(token, content, syncLocation) {
const file = store.state.file.itemMap[syncLocation.fileId];
@@ -27,7 +26,7 @@ export default providerRegistry.register({
token,
description,
syncLocation.filename,
- providerUtils.serializeContent(content),
+ Provider.serializeContent(content),
syncLocation.isPublic,
syncLocation.gistId,
)
diff --git a/src/services/providers/githubProvider.js b/src/services/providers/githubProvider.js
index 58915417..7777abba 100644
--- a/src/services/providers/githubProvider.js
+++ b/src/services/providers/githubProvider.js
@@ -1,12 +1,11 @@
import store from '../../store';
import githubHelper from './helpers/githubHelper';
-import providerUtils from './providerUtils';
-import providerRegistry from './providerRegistry';
+import Provider from './common/Provider';
import utils from '../utils';
const savedSha = {};
-export default providerRegistry.register({
+export default new Provider({
id: 'github',
getToken(location) {
return store.getters['data/githubTokens'][location.sub];
@@ -24,9 +23,9 @@ export default providerRegistry.register({
)
.then(({ sha, content }) => {
savedSha[syncLocation.id] = sha;
- return providerUtils.parseContent(content, `${syncLocation.fileId}/content`);
+ return Provider.parseContent(content, `${syncLocation.fileId}/content`);
})
- .catch(() => null); // Ignore error, without the sha upload is going to fail anyway
+ .catch(() => null); // Ignore error, upload is going to fail anyway
},
uploadContent(token, content, syncLocation) {
let result = Promise.resolve();
@@ -43,7 +42,7 @@ export default providerRegistry.register({
syncLocation.repo,
syncLocation.branch,
syncLocation.path,
- providerUtils.serializeContent(content),
+ Provider.serializeContent(content),
sha,
);
})
@@ -69,7 +68,7 @@ export default providerRegistry.register({
openFile(token, syncLocation) {
return Promise.resolve()
.then(() => {
- if (providerUtils.openFileWithLocation(store.getters['syncLocation/items'], syncLocation)) {
+ if (Provider.openFileWithLocation(store.getters['syncLocation/items'], syncLocation)) {
// File exists and has just been opened. Next...
return null;
}
@@ -109,7 +108,7 @@ export default providerRegistry.register({
});
},
parseRepoUrl(url) {
- const parsedRepo = url.match(/[/:]?([^/:]+)\/([^/]+?)(?:\.git|\/)?$/);
+ const parsedRepo = url && url.match(/([^/:]+)\/([^/]+?)(?:\.git|\/)?$/);
return parsedRepo && {
owner: parsedRepo[1],
repo: parsedRepo[2],
diff --git a/src/services/providers/githubWorkspaceProvider.js b/src/services/providers/githubWorkspaceProvider.js
new file mode 100644
index 00000000..38ad18e9
--- /dev/null
+++ b/src/services/providers/githubWorkspaceProvider.js
@@ -0,0 +1,500 @@
+import store from '../../store';
+import githubHelper from './helpers/githubHelper';
+import Provider from './common/Provider';
+import utils from '../utils';
+import userSvc from '../userSvc';
+
+const getSyncData = (fileId) => {
+ const syncData = store.getters['data/syncDataByItemId'][`${fileId}/content`];
+ return syncData
+ ? Promise.resolve(syncData)
+ : Promise.reject(); // No need for a proper error message.
+};
+
+const getAbsolutePath = syncData =>
+ (store.getters['workspace/currentWorkspace'].path || '') + syncData.id;
+
+const getWorkspaceWithOwner = () => {
+ const workspace = store.getters['workspace/currentWorkspace'];
+ const [owner, repo] = workspace.repo.split('/');
+ return {
+ ...workspace,
+ owner,
+ repo,
+ };
+};
+
+let treeShaMap;
+let treeFolderMap;
+let treeFileMap;
+let treeDataMap;
+let treeSyncLocationMap;
+let treePublishLocationMap;
+
+const endsWith = (str, suffix) => str.slice(-suffix.length) === suffix;
+
+export default new Provider({
+ id: 'githubWorkspace',
+ getToken() {
+ return store.getters['workspace/syncToken'];
+ },
+ initWorkspace() {
+ const [owner, repo] = (utils.queryParams.repo || '').split('/');
+ const branch = utils.queryParams.branch;
+ const workspaceParams = {
+ providerId: this.id,
+ repo: `${owner}/${repo}`,
+ branch,
+ };
+ const path = (utils.queryParams.path || '')
+ .replace(/^\/*/, '') // Remove leading `/`
+ .replace(/\/*$/, '/'); // Add trailing `/`
+ if (path !== '/') {
+ workspaceParams.path = path;
+ }
+ const workspaceId = utils.makeWorkspaceId(workspaceParams);
+ let workspace = store.getters['data/sanitizedWorkspaces'][workspaceId];
+
+ return Promise.resolve()
+ .then(() => {
+ // See if we already have a token
+ if (workspace) {
+ // Token sub is in the workspace
+ const token = store.getters['data/githubTokens'][workspace.sub];
+ if (token) {
+ return token;
+ }
+ }
+ // If no token has been found, popup an authorize window and get one
+ return store.dispatch('modal/open', {
+ type: 'githubAccount',
+ onResolve: () => githubHelper.addAccount(store.getters['data/localSettings'].githubRepoFullAccess),
+ });
+ })
+ .then((token) => {
+ if (!workspace) {
+ const pathEntries = (path || '').split('/');
+ const name = pathEntries[pathEntries.length - 2] || repo; // path ends with `/`
+ workspace = {
+ ...workspaceParams,
+ id: workspaceId,
+ sub: token.sub,
+ name,
+ };
+ }
+ // Fix the URL hash
+ utils.setQueryParams(workspaceParams);
+ if (workspace.url !== location.href) {
+ store.dispatch('data/patchWorkspaces', {
+ [workspaceId]: {
+ ...workspace,
+ url: location.href,
+ },
+ });
+ }
+ return store.getters['data/sanitizedWorkspaces'][workspaceId];
+ });
+ },
+ getChanges() {
+ const syncToken = store.getters['workspace/syncToken'];
+ const { owner, repo, branch } = getWorkspaceWithOwner();
+ return githubHelper.getHeadTree(syncToken, owner, repo, branch)
+ .then((tree) => {
+ const workspacePath = store.getters['workspace/currentWorkspace'].path || '';
+ const syncDataByPath = store.getters['data/syncData'];
+ const syncDataByItemId = store.getters['data/syncDataByItemId'];
+
+ // Store all blobs sha
+ treeShaMap = Object.create(null);
+ // Store interesting paths
+ treeFolderMap = Object.create(null);
+ treeFileMap = Object.create(null);
+ treeDataMap = Object.create(null);
+ treeSyncLocationMap = Object.create(null);
+ treePublishLocationMap = Object.create(null);
+
+ tree.filter(({ type, path }) => type === 'blob' && path.indexOf(workspacePath) === 0)
+ .forEach((blobEntry) => {
+ // Make path relative
+ const path = blobEntry.path.slice(workspacePath.length);
+ // Collect blob sha
+ treeShaMap[path] = blobEntry.sha;
+ // Collect parents path
+ let parentPath = '';
+ path.split('/').slice(0, -1).forEach((folderName) => {
+ const folderPath = `${parentPath}${folderName}/`;
+ treeFolderMap[folderPath] = parentPath;
+ parentPath = folderPath;
+ });
+ // Collect file path
+ if (path.indexOf('.stackedit-data/') === 0) {
+ treeDataMap[path] = true;
+ } else if (endsWith(path, '.md')) {
+ treeFileMap[path] = parentPath;
+ } else if (endsWith(path, '.sync')) {
+ treeSyncLocationMap[path] = true;
+ } else if (endsWith(path, '.publish')) {
+ treePublishLocationMap[path] = true;
+ }
+ });
+
+ // Collect changes
+ const changes = [];
+ const pathIds = {};
+ const syncDataToIgnore = Object.create(null);
+ const getId = (path) => {
+ const syncData = syncDataByPath[path];
+ const id = syncData ? syncData.itemId : utils.uid();
+ pathIds[path] = id;
+ return id;
+ };
+
+ // Folder creations/updates
+ // Assume map entries are sorted from top to bottom
+ Object.entries(treeFolderMap).forEach(([path, parentPath]) => {
+ const id = getId(path);
+ const item = utils.addItemHash({
+ id,
+ type: 'folder',
+ name: path.slice(parentPath.length, -1),
+ parentId: pathIds[parentPath] || null,
+ });
+ changes.push({
+ syncDataId: path,
+ item,
+ syncData: {
+ id: path,
+ itemId: id,
+ type: item.type,
+ hash: item.hash,
+ },
+ });
+ });
+
+ // File creations/updates
+ Object.entries(treeFileMap).forEach(([path, parentPath]) => {
+ const id = getId(path);
+ const item = utils.addItemHash({
+ id,
+ type: 'file',
+ name: path.slice(parentPath.length, -'.md'.length),
+ parentId: pathIds[parentPath] || null,
+ });
+ changes.push({
+ syncDataId: path,
+ item,
+ syncData: {
+ id: path,
+ itemId: id,
+ type: item.type,
+ hash: item.hash,
+ },
+ });
+
+ // Content creations/updates
+ const contentSyncData = syncDataByItemId[`${id}/content`];
+ if (contentSyncData) {
+ syncDataToIgnore[contentSyncData.id] = true;
+ }
+ if (!contentSyncData || contentSyncData.sha !== treeShaMap[path]) {
+ // Use `/` as a prefix to get a unique syncData id
+ changes.push({
+ syncDataId: `/${path}`,
+ item: {
+ id: `${id}/content`,
+ type: 'content',
+ // Need a truthy value to force saving sync data
+ hash: 1,
+ },
+ syncData: {
+ id: `/${path}`,
+ itemId: `${id}/content`,
+ type: 'content',
+ // Need a truthy value to force downloading the content
+ hash: 1,
+ },
+ });
+ }
+ });
+
+ // Data creations/updates
+ Object.keys(treeDataMap).forEach((path) => {
+ try {
+ const [, id] = path.match(/^\.stackedit-data\/([\s\S]+)\.json$/);
+ pathIds[path] = id;
+ const syncData = syncDataByItemId[id];
+ if (syncData) {
+ syncDataToIgnore[syncData.id] = true;
+ }
+ if (!syncData || syncData.sha !== treeShaMap[path]) {
+ changes.push({
+ syncDataId: path,
+ item: {
+ id,
+ type: 'data',
+ // Need a truthy value to force saving sync data
+ hash: 1,
+ },
+ syncData: {
+ id: path,
+ itemId: id,
+ type: 'data',
+ // Need a truthy value to force downloading the content
+ hash: 1,
+ },
+ });
+ }
+ } catch (e) {
+ // Ignore parsing errors
+ }
+ });
+
+ // Location creations/updates
+ [{
+ type: 'syncLocation',
+ map: treeSyncLocationMap,
+ pathMatcher: /^([\s\S]+)\.([\w-]+)\.sync$/,
+ }, {
+ type: 'publishLocation',
+ map: treePublishLocationMap,
+ pathMatcher: /^([\s\S]+)\.([\w-]+)\.publish$/,
+ }]
+ .forEach(({ type, map, pathMatcher }) => Object.keys(map).forEach((path) => {
+ try {
+ const [, filePath, data] = path.match(pathMatcher);
+ // If there is a corresponding md file in the tree
+ const fileId = pathIds[`${filePath}.md`];
+ if (fileId) {
+ const id = getId(path);
+ const item = utils.addItemHash({
+ ...JSON.parse(utils.decodeBase64(data)),
+ id,
+ type,
+ fileId,
+ });
+ changes.push({
+ syncDataId: path,
+ item,
+ syncData: {
+ id: path,
+ itemId: id,
+ type: item.type,
+ hash: item.hash,
+ },
+ });
+ }
+ } catch (e) {
+ // Ignore parsing errors
+ }
+ }));
+
+ // Deletions
+ Object.keys(syncDataByPath).forEach((path) => {
+ if (!pathIds[path] && !syncDataToIgnore[path]) {
+ changes.push({ syncDataId: path });
+ }
+ });
+
+ return changes;
+ });
+ },
+ saveSimpleItem(item) {
+ const path = store.getters.itemPaths[item.fileId || item.id];
+ return Promise.resolve()
+ .then(() => {
+ const syncToken = store.getters['workspace/syncToken'];
+ const { owner, repo, branch } = getWorkspaceWithOwner();
+ const syncData = {
+ itemId: item.id,
+ type: item.type,
+ hash: item.hash,
+ };
+
+ if (item.type === 'file') {
+ syncData.id = `${path}.md`;
+ } else if (item.type === 'folder') {
+ syncData.id = path;
+ }
+ if (syncData.id) {
+ return syncData;
+ }
+
+ // locations are stored as paths, so we upload an empty file
+ const data = utils.encodeBase64(utils.serializeObject({
+ ...item,
+ id: undefined,
+ type: undefined,
+ fileId: undefined,
+ }), true);
+ const extension = item.type === 'syncLocation' ? 'sync' : 'publish';
+ syncData.id = `${path}.${data}.${extension}`;
+ return githubHelper.uploadFile(
+ syncToken,
+ owner,
+ repo,
+ branch,
+ getAbsolutePath(syncData),
+ '',
+ treeShaMap[syncData.id],
+ ).then(() => syncData);
+ });
+ },
+ removeItem(syncData) {
+ // Ignore content deletion
+ if (syncData.type === 'content') {
+ return Promise.resolve();
+ }
+ const syncToken = store.getters['workspace/syncToken'];
+ const { owner, repo, branch } = getWorkspaceWithOwner();
+ return githubHelper.removeFile(
+ syncToken,
+ owner,
+ repo,
+ branch,
+ getAbsolutePath(syncData),
+ treeShaMap[syncData.id],
+ );
+ },
+ downloadContent(token, syncLocation) {
+ const syncData = store.getters['data/syncDataByItemId'][syncLocation.fileId];
+ const contentSyncData = store.getters['data/syncDataByItemId'][`${syncLocation.fileId}/content`];
+ if (!syncData || !contentSyncData) {
+ return Promise.resolve();
+ }
+ const { owner, repo, branch } = getWorkspaceWithOwner();
+ return githubHelper.downloadFile(token, owner, repo, branch, getAbsolutePath(syncData))
+ .then(({ sha, content }) => {
+ const item = Provider.parseContent(content, `${syncLocation.fileId}/content`);
+ if (item.hash !== contentSyncData.hash) {
+ store.dispatch('data/patchSyncData', {
+ [contentSyncData.id]: {
+ ...contentSyncData,
+ hash: item.hash,
+ sha,
+ },
+ });
+ }
+ return item;
+ });
+ },
+ downloadData(dataId) {
+ const syncData = store.getters['data/syncDataByItemId'][dataId];
+ if (!syncData) {
+ return Promise.resolve();
+ }
+ const syncToken = store.getters['workspace/syncToken'];
+ const { owner, repo, branch } = getWorkspaceWithOwner();
+ return githubHelper.downloadFile(syncToken, owner, repo, branch, getAbsolutePath(syncData))
+ .then(({ sha, content }) => {
+ const item = JSON.parse(content);
+ if (item.hash !== syncData.hash) {
+ store.dispatch('data/patchSyncData', {
+ [syncData.id]: {
+ ...syncData,
+ hash: item.hash,
+ sha,
+ },
+ });
+ }
+ return item;
+ });
+ },
+ uploadContent(token, content, syncLocation) {
+ const contentSyncData = store.getters['data/syncDataByItemId'][`${syncLocation.fileId}/content`];
+ if (contentSyncData && contentSyncData.hash === content.hash) {
+ return Promise.resolve(syncLocation);
+ }
+ const syncData = store.getters['data/syncDataByItemId'][syncLocation.fileId];
+ const { owner, repo, branch } = getWorkspaceWithOwner();
+ return githubHelper.uploadFile(
+ token,
+ owner,
+ repo,
+ branch,
+ getAbsolutePath(syncData),
+ Provider.serializeContent(content),
+ treeShaMap[syncData.id],
+ )
+ .then((res) => {
+ const id = `/${syncData.id}`;
+ store.dispatch('data/patchSyncData', {
+ [id]: {
+ // Build sync data
+ id,
+ itemId: content.id,
+ type: content.type,
+ hash: content.hash,
+ sha: res.content.sha,
+ },
+ });
+ return syncLocation;
+ });
+ },
+ uploadData(item) {
+ const oldSyncData = store.getters['data/syncDataByItemId'][item.id];
+ if (oldSyncData && oldSyncData.hash === item.hash) {
+ return Promise.resolve();
+ }
+ const syncData = {
+ id: `.stackedit-data/${item.id}.json`,
+ itemId: item.id,
+ type: item.type,
+ hash: item.hash,
+ };
+ const syncToken = store.getters['workspace/syncToken'];
+ const { owner, repo, branch } = getWorkspaceWithOwner();
+ return githubHelper.uploadFile(
+ syncToken,
+ owner,
+ repo,
+ branch,
+ getAbsolutePath(syncData),
+ JSON.stringify(item),
+ oldSyncData && oldSyncData.sha,
+ )
+ .then(res => store.dispatch('data/patchSyncData', {
+ [syncData.id]: {
+ ...syncData,
+ sha: res.content.sha,
+ },
+ }));
+ },
+ onSyncEnd() {
+ // Clean up
+ treeShaMap = null;
+ treeFolderMap = null;
+ treeFileMap = null;
+ treeDataMap = null;
+ treeSyncLocationMap = null;
+ treePublishLocationMap = null;
+ },
+ listRevisions(token, fileId) {
+ const { owner, repo, branch } = getWorkspaceWithOwner();
+ return getSyncData(fileId)
+ .then(syncData => githubHelper.getCommits(token, owner, repo, branch, syncData.id))
+ .then(entries => entries.map((entry) => {
+ let user;
+ if (entry.author && entry.author.login) {
+ user = entry.author;
+ } else if (entry.committer && entry.committer.login) {
+ user = entry.committer;
+ }
+ userSvc.addInfo({ id: user.login, name: user.login, imageUrl: user.avatar_url });
+ const date = (entry.commit.author && entry.commit.author.date)
+ || (entry.commit.committer && entry.commit.committer.date);
+ return {
+ id: entry.sha,
+ sub: user.login,
+ created: date ? new Date(date).getTime() : 1,
+ };
+ })
+ .sort((revision1, revision2) => revision2.created - revision1.created));
+ },
+ getRevisionContent(token, fileId, revisionId) {
+ const { owner, repo } = getWorkspaceWithOwner();
+ return getSyncData(fileId)
+ .then(syncData => githubHelper.downloadFile(
+ token, owner, repo, revisionId, getAbsolutePath(syncData)))
+ .then(({ content }) => Provider.parseContent(content, `${fileId}/content`));
+ },
+});
diff --git a/src/services/providers/googleDriveAppDataProvider.js b/src/services/providers/googleDriveAppDataProvider.js
index ff3017b3..fca5b53d 100644
--- a/src/services/providers/googleDriveAppDataProvider.js
+++ b/src/services/providers/googleDriveAppDataProvider.js
@@ -1,9 +1,11 @@
import store from '../../store';
import googleHelper from './helpers/googleHelper';
-import providerRegistry from './providerRegistry';
+import Provider from './common/Provider';
import utils from '../utils';
-export default providerRegistry.register({
+let syncStartPageToken;
+
+export default new Provider({
id: 'googleDriveAppData',
getToken() {
return store.getters['workspace/syncToken'];
@@ -42,13 +44,13 @@ export default providerRegistry.register({
change.syncDataId = change.fileId;
return true;
});
- changes.startPageToken = result.startPageToken;
+ syncStartPageToken = result.startPageToken;
return changes;
});
},
- setAppliedChanges(changes) {
+ onChangesApplied() {
store.dispatch('data/patchLocalSettings', {
- syncStartPageToken: changes.startPageToken,
+ syncStartPageToken,
});
},
saveSimpleItem(item, syncData, ifNotTooLate) {
@@ -83,7 +85,7 @@ export default providerRegistry.register({
const syncToken = store.getters['workspace/syncToken'];
return googleHelper.downloadAppDataFile(syncToken, syncData.id)
.then((data) => {
- const item = JSON.parse(data);
+ const item = utils.addItemHash(JSON.parse(data));
if (item.hash !== syncData.hash) {
store.dispatch('data/patchSyncData', {
[syncData.id]: {
@@ -96,11 +98,11 @@ export default providerRegistry.register({
});
},
uploadContent(token, content, syncLocation, ifNotTooLate) {
- return this.uploadData(content, `${syncLocation.fileId}/content`, ifNotTooLate)
+ return this.uploadData(content, ifNotTooLate)
.then(() => syncLocation);
},
- uploadData(item, dataId, ifNotTooLate) {
- const syncData = store.getters['data/syncDataByItemId'][dataId];
+ uploadData(item, ifNotTooLate) {
+ const syncData = store.getters['data/syncDataByItemId'][item.id];
if (syncData && syncData.hash === item.hash) {
return Promise.resolve();
}
diff --git a/src/services/providers/googleDriveProvider.js b/src/services/providers/googleDriveProvider.js
index c747bfb0..e0fc988a 100644
--- a/src/services/providers/googleDriveProvider.js
+++ b/src/services/providers/googleDriveProvider.js
@@ -1,10 +1,9 @@
import store from '../../store';
import googleHelper from './helpers/googleHelper';
-import providerUtils from './providerUtils';
-import providerRegistry from './providerRegistry';
+import Provider from './common/Provider';
import utils from '../utils';
-export default providerRegistry.register({
+export default new Provider({
id: 'googleDrive',
getToken(location) {
const token = store.getters['data/googleTokens'][location.sub];
@@ -110,7 +109,7 @@ export default providerRegistry.register({
},
downloadContent(token, syncLocation) {
return googleHelper.downloadFile(token, syncLocation.driveFileId)
- .then(content => providerUtils.parseContent(content, `${syncLocation.fileId}/content`));
+ .then(content => Provider.parseContent(content, `${syncLocation.fileId}/content`));
},
uploadContent(token, content, syncLocation, ifNotTooLate) {
const file = store.state.file.itemMap[syncLocation.fileId];
@@ -124,7 +123,7 @@ export default providerRegistry.register({
name,
parents,
undefined,
- providerUtils.serializeContent(content),
+ Provider.serializeContent(content),
undefined,
syncLocation.driveFileId,
undefined,
@@ -156,7 +155,7 @@ export default providerRegistry.register({
if (!driveFile) {
return null;
}
- if (providerUtils.openFileWithLocation(store.getters['syncLocation/items'], {
+ if (Provider.openFileWithLocation(store.getters['syncLocation/items'], {
providerId: this.id,
driveFileId: driveFile.id,
})) {
diff --git a/src/services/providers/googleDriveWorkspaceProvider.js b/src/services/providers/googleDriveWorkspaceProvider.js
index 622975b5..6b56b00b 100644
--- a/src/services/providers/googleDriveWorkspaceProvider.js
+++ b/src/services/providers/googleDriveWorkspaceProvider.js
@@ -1,11 +1,8 @@
import store from '../../store';
import googleHelper from './helpers/googleHelper';
-import providerRegistry from './providerRegistry';
-import providerUtils from './providerUtils';
+import Provider from './common/Provider';
import utils from '../utils';
-let fileIdToOpen;
-
const getSyncData = (fileId) => {
const syncData = store.getters['data/syncDataByItemId'][`${fileId}/content`];
return syncData
@@ -13,19 +10,22 @@ const getSyncData = (fileId) => {
: Promise.reject(); // No need for a proper error message.
};
-export default providerRegistry.register({
+let fileIdToOpen;
+let syncStartPageToken;
+
+export default new Provider({
id: 'googleDriveWorkspace',
getToken() {
return store.getters['workspace/syncToken'];
},
initWorkspace() {
- const makeWorkspaceIdParams = folderId => ({
+ const makeWorkspaceParams = folderId => ({
providerId: this.id,
folderId,
});
const makeWorkspaceId = folderId => folderId && utils.makeWorkspaceId(
- makeWorkspaceIdParams(folderId));
+ makeWorkspaceParams(folderId));
const getWorkspace = folderId =>
store.getters['data/sanitizedWorkspaces'][makeWorkspaceId(folderId)];
@@ -155,7 +155,7 @@ export default providerRegistry.register({
}))
.then((workspace) => {
// Fix the URL hash
- utils.setQueryParams(makeWorkspaceIdParams(workspace.folderId));
+ utils.setQueryParams(makeWorkspaceParams(workspace.folderId));
if (workspace.url !== location.href) {
store.dispatch('data/patchWorkspaces', {
[workspace.id]: {
@@ -339,13 +339,13 @@ export default providerRegistry.register({
changes.push(contentChange);
}
});
- changes.startPageToken = result.startPageToken;
+ syncStartPageToken = result.startPageToken;
return changes;
});
},
- setAppliedChanges(changes) {
+ onChangesApplied() {
store.dispatch('data/patchLocalSettings', {
- syncStartPageToken: changes.startPageToken,
+ syncStartPageToken,
});
},
saveSimpleItem(item, syncData, ifNotTooLate) {
@@ -419,7 +419,7 @@ export default providerRegistry.register({
}
return googleHelper.downloadFile(token, syncData.id)
.then((content) => {
- const item = providerUtils.parseContent(content, `${syncLocation.fileId}/content`);
+ const item = Provider.parseContent(content, `${syncLocation.fileId}/content`);
if (item.hash !== contentSyncData.hash) {
store.dispatch('data/patchSyncData', {
[contentSyncData.id]: {
@@ -428,7 +428,7 @@ export default providerRegistry.register({
},
});
}
- // Open the file requested by action if it was to synced yet
+ // Open the file requested by action if it wasn't synced yet
if (fileIdToOpen && fileIdToOpen === syncData.id) {
fileIdToOpen = null;
// Open the file once downloaded content has been stored
@@ -474,7 +474,7 @@ export default providerRegistry.register({
undefined,
undefined,
undefined,
- providerUtils.serializeContent(content),
+ Provider.serializeContent(content),
undefined,
syncData.id,
undefined,
@@ -494,7 +494,7 @@ export default providerRegistry.register({
id: item.id,
folderId: workspace.folderId,
},
- providerUtils.serializeContent(content),
+ Provider.serializeContent(content),
undefined,
undefined,
undefined,
@@ -523,8 +523,8 @@ export default providerRegistry.register({
}))
.then(() => syncLocation);
},
- uploadData(item, dataId, ifNotTooLate) {
- const syncData = store.getters['data/syncDataByItemId'][dataId];
+ uploadData(item, ifNotTooLate) {
+ const syncData = store.getters['data/syncDataByItemId'][item.id];
if (syncData && syncData.hash === item.hash) {
return Promise.resolve();
}
@@ -570,6 +570,6 @@ export default providerRegistry.register({
getRevisionContent(token, fileId, revisionId) {
return getSyncData(fileId)
.then(syncData => googleHelper.downloadFileRevision(token, syncData.id, revisionId))
- .then(content => providerUtils.parseContent(content, `${fileId}/content`));
+ .then(content => Provider.parseContent(content, `${fileId}/content`));
},
});
diff --git a/src/services/providers/helpers/githubHelper.js b/src/services/providers/helpers/githubHelper.js
index cc2bf6c6..4ec99841 100644
--- a/src/services/providers/helpers/githubHelper.js
+++ b/src/services/providers/helpers/githubHelper.js
@@ -17,6 +17,16 @@ const request = (token, options) => networkSvc.request({
},
});
+const repoRequest = (token, owner, repo, options) => request(token, {
+ ...options,
+ url: `https://api.github.com/repos/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}/${options.url}`,
+});
+
+const getCommitMessage = (name, path) => {
+ const message = store.getters['data/computedSettings'].github[name];
+ return message.replace(/{{path}}/g, path);
+};
+
export default {
startOauth2(scopes, sub = null, silent = false) {
return networkSvc.startOauth2(
@@ -51,7 +61,7 @@ export default {
const token = {
scopes,
accessToken,
- name: res.body.name,
+ name: res.body.login,
sub: `${res.body.id}`,
repoFullAccess: scopes.indexOf('repo') !== -1,
};
@@ -63,21 +73,58 @@ export default {
addAccount(repoFullAccess = false) {
return this.startOauth2(getScopes({ repoFullAccess }));
},
+ getTree(token, owner, repo, sha) {
+ return repoRequest(token, owner, repo, {
+ url: `git/trees/${encodeURIComponent(sha)}?recursive=1`,
+ })
+ .then((res) => {
+ if (res.body.truncated) {
+ throw new Error('Git tree too big. Please remove some files in the repository.');
+ }
+ return res.body.tree;
+ });
+ },
+ getHeadTree(token, owner, repo, branch) {
+ return repoRequest(token, owner, repo, {
+ url: `branches/${encodeURIComponent(branch)}`,
+ })
+ .then(res => this.getTree(token, owner, repo, res.body.commit.commit.tree.sha));
+ },
+ getCommits(token, owner, repo, sha, path) {
+ return repoRequest(token, owner, repo, {
+ url: 'commits',
+ params: { sha, path },
+ })
+ .then(res => res.body);
+ },
uploadFile(token, owner, repo, branch, path, content, sha) {
- return request(token, {
+ return repoRequest(token, owner, repo, {
method: 'PUT',
- url: `https://api.github.com/repos/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}/contents/${encodeURIComponent(path)}`,
+ url: `contents/${encodeURIComponent(path)}`,
body: {
- message: 'Uploaded by https://stackedit.io/',
+ message: getCommitMessage(sha ? 'updateFileMessage' : 'createFileMessage', path),
content: utils.encodeBase64(content),
sha,
branch,
},
- });
+ })
+ .then(res => res.body);
+ },
+ removeFile(token, owner, repo, branch, path, sha) {
+ return repoRequest(token, owner, repo, {
+ method: 'DELETE',
+ url: `contents/${encodeURIComponent(path)}`,
+ body: {
+ message: getCommitMessage('deleteFileMessage', path),
+ sha,
+ branch,
+ },
+ })
+ .then(res => res.body);
},
downloadFile(token, owner, repo, branch, path) {
- return request(token, {
- url: `https://api.github.com/repos/${encodeURIComponent(owner)}/${encodeURIComponent(repo)}/contents/${encodeURIComponent(path)}`,
+ return repoRequest(token, owner, repo, {
+ url: `contents/${encodeURIComponent(path)}`,
params: { ref: branch },
})
.then(res => ({
diff --git a/src/services/providers/helpers/googleHelper.js b/src/services/providers/helpers/googleHelper.js
index 83119d91..4748eb66 100644
--- a/src/services/providers/helpers/googleHelper.js
+++ b/src/services/providers/helpers/googleHelper.js
@@ -614,11 +614,8 @@ export default {
break;
}
case 'img': {
- let view = new google.picker.PhotosView();
- view.setType('flat');
- pickerBuilder.addView(view);
- view = new google.picker.PhotosView();
- view.setType('ofuser');
+ const view = new google.picker.PhotosView();
+ view.setType('highlights');
pickerBuilder.addView(view);
pickerBuilder.addView(google.picker.ViewId.PHOTO_UPLOAD);
break;
diff --git a/src/services/providers/wordpressProvider.js b/src/services/providers/wordpressProvider.js
index 30780df1..f6374774 100644
--- a/src/services/providers/wordpressProvider.js
+++ b/src/services/providers/wordpressProvider.js
@@ -1,8 +1,8 @@
import store from '../../store';
import wordpressHelper from './helpers/wordpressHelper';
-import providerRegistry from './providerRegistry';
+import Provider from './common/Provider';
-export default providerRegistry.register({
+export default new Provider({
id: 'wordpress',
getToken(location) {
return store.getters['data/wordpressTokens'][location.sub];
diff --git a/src/services/providers/zendeskProvider.js b/src/services/providers/zendeskProvider.js
index aadf05db..633cd393 100644
--- a/src/services/providers/zendeskProvider.js
+++ b/src/services/providers/zendeskProvider.js
@@ -1,8 +1,8 @@
import store from '../../store';
import zendeskHelper from './helpers/zendeskHelper';
-import providerRegistry from './providerRegistry';
+import Provider from './common/Provider';
-export default providerRegistry.register({
+export default new Provider({
id: 'zendesk',
getToken(location) {
return store.getters['data/zendeskTokens'][location.sub];
diff --git a/src/services/publishSvc.js b/src/services/publishSvc.js
index 1042e722..de0dcd47 100644
--- a/src/services/publishSvc.js
+++ b/src/services/publishSvc.js
@@ -3,7 +3,7 @@ import store from '../store';
import utils from './utils';
import networkSvc from './networkSvc';
import exportSvc from './exportSvc';
-import providerRegistry from './providers/providerRegistry';
+import providerRegistry from './providers/common/providerRegistry';
const hasCurrentFilePublishLocations = () => !!store.getters['publishLocation/current'].length;
@@ -67,7 +67,7 @@ function publishFile(fileId) {
return loadContent(fileId)
.then(() => {
const publishLocations = [
- ...store.getters['publishLocation/groupedByFileId'][fileId] || [],
+ ...store.getters['publishLocation/filteredGroupedByFileId'][fileId] || [],
];
const publishOneContentLocation = () => {
const publishLocation = publishLocations.shift();
diff --git a/src/services/syncSvc.js b/src/services/syncSvc.js
index badd1430..15462c97 100644
--- a/src/services/syncSvc.js
+++ b/src/services/syncSvc.js
@@ -3,17 +3,23 @@ import store from '../store';
import utils from './utils';
import diffUtils from './diffUtils';
import networkSvc from './networkSvc';
-import providerRegistry from './providers/providerRegistry';
+import providerRegistry from './providers/common/providerRegistry';
import googleDriveAppDataProvider from './providers/googleDriveAppDataProvider';
-import './providers/googleDriveWorkspaceProvider';
import './providers/couchdbWorkspaceProvider';
+import './providers/githubWorkspaceProvider';
+import './providers/googleDriveWorkspaceProvider';
import tempFileSvc from './tempFileSvc';
+const minAutoSyncEvery = 60 * 1000; // 60 sec
const inactivityThreshold = 3 * 1000; // 3 sec
const restartSyncAfter = 30 * 1000; // 30 sec
-const minAutoSyncEvery = 60 * 1000; // 60 sec
+const restartContentSyncAfter = 500; // Restart if an authorize window pops up
const maxContentHistory = 20;
+const LAST_SEEN = 0;
+const LAST_MERGED = 1;
+const LAST_SENT = 2;
+
let actionProvider;
let workspaceProvider;
@@ -69,6 +75,32 @@ function setLastSyncActivity() {
localStorage.setItem(store.getters['workspace/lastSyncActivityKey'], currentDate);
}
+/**
+ * Upgrade hashes if syncedContent is from an old version
+ */
+function upgradeSyncedContent(syncedContent) {
+ if (syncedContent.v) {
+ return syncedContent;
+ }
+ const hashUpgrades = {};
+ const historyData = {};
+ const syncHistory = {};
+ Object.entries(syncedContent.historyData).forEach(([hash, content]) => {
+ const newContent = utils.addItemHash(content);
+ historyData[newContent.hash] = newContent;
+ hashUpgrades[hash] = newContent.hash;
+ });
+ Object.entries(syncedContent.syncHistory).forEach(([id, hashEntries]) => {
+ syncHistory[id] = hashEntries.map(hash => hashUpgrades[hash]);
+ });
+ return {
+ ...syncedContent,
+ historyData,
+ syncHistory,
+ v: 1,
+ };
+}
+
/**
* Clean a syncedContent.
*/
@@ -103,8 +135,10 @@ function applyChanges(changes) {
const existingItem = existingSyncData && storeItemMap[existingSyncData.itemId];
if (!change.item && existingSyncData) {
// Item was removed
- delete syncData[change.syncDataId];
- saveSyncData = true;
+ if (syncData[change.syncDataId]) {
+ delete syncData[change.syncDataId];
+ saveSyncData = true;
+ }
if (existingItem) {
// Remove object from the store
store.commit(`${existingItem.type}/deleteItem`, existingItem.id);
@@ -112,8 +146,10 @@ function applyChanges(changes) {
}
} else if (change.item && change.item.hash) {
// Item was modifed
- syncData[change.syncDataId] = change.syncData;
- saveSyncData = true;
+ if ((existingSyncData || {}).hash !== change.syncData.hash) {
+ syncData[change.syncDataId] = change.syncData;
+ saveSyncData = true;
+ }
if (
// If no sync data or existing one is different
(existingSyncData || {}).hash !== change.item.hash
@@ -133,10 +169,6 @@ function applyChanges(changes) {
}
}
-const LAST_SEEN = 0;
-const LAST_MERGED = 1;
-const LAST_SENT = 2;
-
/**
* Create a sync location by uploading the current file content.
*/
@@ -157,8 +189,8 @@ function createSyncLocation(syncLocation) {
}, syncLocation)
.then(syncLocationToStore => localDbSvc.loadSyncedContent(fileId)
.then(() => {
- const newSyncedContent = utils.deepCopy(
- store.state.syncedContent.itemMap[`${fileId}/syncedContent`]);
+ const newSyncedContent = utils.deepCopy(upgradeSyncedContent(
+ store.state.syncedContent.itemMap[`${fileId}/syncedContent`]));
const newSyncHistoryItem = [];
newSyncedContent.syncHistory[syncLocation.id] = newSyncHistoryItem;
newSyncHistoryItem[LAST_SEEN] = content.hash;
@@ -172,12 +204,23 @@ function createSyncLocation(syncLocation) {
});
}
+// Prevent from sending new data too long after old data has been fetched
+const tooLateChecker = (timeout) => {
+ const tooLateAfter = Date.now() + timeout;
+ return cb => (res) => {
+ if (tooLateAfter < Date.now()) {
+ throw new Error('TOO_LATE');
+ }
+ return cb(res);
+ };
+};
+
class SyncContext {
restart = false;
attempted = {};
}
-/**
+ /**
* Sync one file with all its locations.
*/
function syncFile(fileId, syncContext = new SyncContext()) {
@@ -189,7 +232,8 @@ function syncFile(fileId, syncContext = new SyncContext()) {
.then(() => {
const getFile = () => store.state.file.itemMap[fileId];
const getContent = () => store.state.content.itemMap[`${fileId}/content`];
- const getSyncedContent = () => store.state.syncedContent.itemMap[`${fileId}/syncedContent`];
+ const getSyncedContent = () => upgradeSyncedContent(
+ store.state.syncedContent.itemMap[`${fileId}/syncedContent`]);
const getSyncHistoryItem = syncLocationId => getSyncedContent().syncHistory[syncLocationId];
const isTempFile = () => {
@@ -206,8 +250,8 @@ function syncFile(fileId, syncContext = new SyncContext()) {
return true;
}
const locations = [
- ...store.getters['syncLocation/groupedByFileId'][fileId] || [],
- ...store.getters['publishLocation/groupedByFileId'][fileId] || [],
+ ...store.getters['syncLocation/filteredGroupedByFileId'][fileId] || [],
+ ...store.getters['publishLocation/filteredGroupedByFileId'][fileId] || [],
];
if (locations.length) {
// If file has explicit sync/publish locations, it's not a temp file
@@ -227,7 +271,7 @@ function syncFile(fileId, syncContext = new SyncContext()) {
const attemptedLocations = {};
const syncOneContentLocation = () => {
const syncLocations = [
- ...store.getters['syncLocation/groupedByFileId'][fileId] || [],
+ ...store.getters['syncLocation/filteredGroupedByFileId'][fileId] || [],
];
if (isWorkspaceSyncPossible()) {
syncLocations.unshift({ id: 'main', providerId: workspaceProvider.id, fileId });
@@ -289,7 +333,6 @@ function syncFile(fileId, syncContext = new SyncContext()) {
properties: utils.sanitizeText(mergedContent.properties),
discussions: mergedContent.discussions,
comments: mergedContent.comments,
- hash: 0,
});
// Retrieve content with new `hash` and freeze it
@@ -342,21 +385,11 @@ function syncFile(fileId, syncContext = new SyncContext()) {
return null;
}
- // Prevent from sending new content too long after old content has been fetched
- const syncStartTime = Date.now();
- const ifNotTooLate = cb => (res) => {
- // No time to refresh a token...
- if (syncStartTime + 500 < Date.now()) {
- throw new Error('TOO_LATE');
- }
- return cb(res);
- };
-
// Upload merged content
return provider.uploadContent(token, {
...mergedContent,
history: mergedContentHistory.slice(0, maxContentHistory),
- }, syncLocation, ifNotTooLate)
+ }, syncLocation, tooLateChecker(restartContentSyncAfter))
.then((syncLocationToStore) => {
// Replace sync location if modified
if (utils.serializeObject(syncLocation) !==
@@ -465,13 +498,11 @@ function syncDataItem(dataId) {
if (serverItem && serverItem.hash === mergedItem.hash) {
return null;
}
- return workspaceProvider.uploadData(mergedItem, dataId);
+ return workspaceProvider.uploadData(mergedItem, tooLateChecker(restartContentSyncAfter));
})
- .then(() => {
- store.dispatch('data/patchDataSyncData', {
- [dataId]: utils.deepCopy(store.getters['data/syncDataByItemId'][dataId]),
- });
- });
+ .then(() => store.dispatch('data/patchDataSyncData', {
+ [dataId]: utils.deepCopy(store.getters['data/syncDataByItemId'][dataId]),
+ }));
});
}
@@ -499,16 +530,12 @@ function syncWorkspace() {
.then((changes) => {
// Apply changes
applyChanges(changes);
- workspaceProvider.setAppliedChanges(changes);
+ if (workspaceProvider.onChangesApplied) {
+ workspaceProvider.onChangesApplied();
+ }
// Prevent from sending items too long after changes have been retrieved
- const syncStartTime = Date.now();
- const ifNotTooLate = cb => (res) => {
- if (syncStartTime + restartSyncAfter < Date.now()) {
- throw new Error('TOO_LATE');
- }
- return cb(res);
- };
+ const ifNotTooLate = tooLateChecker(restartSyncAfter);
// Called until no item to save
const saveNextItem = ifNotTooLate(() => {
@@ -529,12 +556,13 @@ function syncWorkspace() {
// Add file if content has been added
&& (item.type !== 'file' || syncDataByItemId[`${id}/content`])
) {
- promise = workspaceProvider.saveSimpleItem(
- // Use deepCopy to freeze objects
- utils.deepCopy(item),
- utils.deepCopy(existingSyncData),
- ifNotTooLate,
- )
+ promise = workspaceProvider
+ .saveSimpleItem(
+ // Use deepCopy to freeze objects
+ utils.deepCopy(item),
+ utils.deepCopy(existingSyncData),
+ ifNotTooLate,
+ )
.then(resultSyncData => store.dispatch('data/patchSyncData', {
[resultSyncData.id]: resultSyncData,
}))
@@ -612,6 +640,9 @@ function syncWorkspace() {
.then(() => syncNextFile());
};
+ const onSyncEnd = () => Promise.resolve(
+ workspaceProvider.onSyncEnd && workspaceProvider.onSyncEnd());
+
return Promise.resolve()
.then(() => saveNextItem())
.then(() => removeNextItem())
@@ -629,6 +660,13 @@ function syncWorkspace() {
}
return syncNextFile();
})
+ .then(
+ () => onSyncEnd(),
+ err => onSyncEnd().then(() => {
+ throw err;
+ }, () => {
+ throw err;
+ }))
.then(
() => {
if (syncContext.restart) {
diff --git a/src/services/userSvc.js b/src/services/userSvc.js
index ae7aa0cb..d0566d48 100644
--- a/src/services/userSvc.js
+++ b/src/services/userSvc.js
@@ -4,6 +4,10 @@ import store from '../store';
const promised = {};
export default {
+ addInfo({ id, name, imageUrl }) {
+ promised[id] = true;
+ store.commit('userInfo/addItem', { id, name, imageUrl });
+ },
getInfo(userId) {
if (!promised[userId]) {
// Try to find a token with this sub
diff --git a/src/services/utils.js b/src/services/utils.js
index a0ffda71..7e405fec 100644
--- a/src/services/utils.js
+++ b/src/services/utils.js
@@ -118,6 +118,18 @@ export default {
}, {});
});
},
+ search(items, criteria) {
+ let result;
+ items.some((item) => {
+ // If every field fits the criteria
+ if (Object.entries(criteria).every(([key, value]) => value === item[key])) {
+ result = item;
+ return true;
+ }
+ return false;
+ });
+ return result;
+ },
uid() {
crypto.getRandomValues(array);
return array.cl_map(value => alphabet[value % radix]).join('');
@@ -132,26 +144,39 @@ export default {
}
return hash;
},
+ getItemHash(item) {
+ return this.hash(this.serializeObject({
+ ...item,
+ // These properties must not be part of the hash
+ id: undefined,
+ hash: undefined,
+ history: undefined,
+ }));
+ },
addItemHash(item) {
return {
...item,
- hash: this.hash(this.serializeObject({
- ...item,
- // These properties must not be part of the hash
- history: undefined,
- hash: undefined,
- })),
+ hash: this.getItemHash(item),
};
},
makeWorkspaceId(params) {
return Math.abs(this.hash(this.serializeObject(params))).toString(36);
},
- encodeBase64(str) {
- return btoa(encodeURIComponent(str).replace(/%([0-9A-F]{2})/g,
+ encodeBase64(str, urlSafe = false) {
+ const result = btoa(encodeURIComponent(str).replace(/%([0-9A-F]{2})/g,
(match, p1) => String.fromCharCode(`0x${p1}`)));
+ if (!urlSafe) {
+ return result;
+ }
+ return result
+ .replace(/\//g, '_') // Replace `/` with `_`
+ .replace(/\+/g, '-') // Replace `+` with `-`
+ .replace(/=+$/, ''); // Remove trailing `=`
},
decodeBase64(str) {
- return decodeURIComponent(atob(str).split('').map(
+ // In case of URL safe base64
+ const sanitizedStr = str.replace(/_/g, '/').replace(/-/g, '+');
+ return decodeURIComponent(atob(sanitizedStr).split('').map(
c => `%${`00${c.charCodeAt(0).toString(16)}`.slice(-2)}`).join(''));
},
computeProperties(yamlProperties) {
@@ -214,6 +239,9 @@ export default {
}
return result;
},
+ concatPaths(...paths) {
+ return paths.join('/').replace(/\/+/g, '/');
+ },
getHostname(url) {
urlParser.href = url;
return urlParser.hostname;
@@ -221,7 +249,7 @@ export default {
createHiddenIframe(url) {
const iframeElt = document.createElement('iframe');
iframeElt.style.position = 'absolute';
- iframeElt.style.left = '-9999px';
+ iframeElt.style.left = '-99px';
iframeElt.style.width = '1px';
iframeElt.style.height = '1px';
iframeElt.src = url;
diff --git a/src/store/explorer.js b/src/store/explorer.js
index c67bc8bb..ea2066ef 100644
--- a/src/store/explorer.js
+++ b/src/store/explorer.js
@@ -96,8 +96,8 @@ export default {
rootGetters['folder/items'].forEach((item) => {
nodeMap[item.id] = new Node(item, [], true);
});
- const syncLocationsByFileId = rootGetters['syncLocation/groupedByFileId'];
- const publishLocationsByFileId = rootGetters['publishLocation/groupedByFileId'];
+ const syncLocationsByFileId = rootGetters['syncLocation/filteredGroupedByFileId'];
+ const publishLocationsByFileId = rootGetters['publishLocation/filteredGroupedByFileId'];
rootGetters['file/items'].forEach((item) => {
const locations = [
...syncLocationsByFileId[item.id] || [],
diff --git a/src/store/index.js b/src/store/index.js
index c9d05079..9bb536b2 100644
--- a/src/store/index.js
+++ b/src/store/index.js
@@ -14,12 +14,13 @@ import folder from './folder';
import layout from './layout';
import modal from './modal';
import notification from './notification';
-import publishLocation from './publishLocation';
import queue from './queue';
import syncedContent from './syncedContent';
-import syncLocation from './syncLocation';
import userInfo from './userInfo';
import workspace from './workspace';
+import locationTemplate from './locationTemplate';
+import emptyPublishLocation from '../data/emptyPublishLocation';
+import emptySyncLocation from '../data/emptySyncLocation';
Vue.use(Vuex);
@@ -39,10 +40,10 @@ const store = new Vuex.Store({
layout,
modal,
notification,
- publishLocation,
+ publishLocation: locationTemplate(emptyPublishLocation),
queue,
syncedContent,
- syncLocation,
+ syncLocation: locationTemplate(emptySyncLocation),
userInfo,
workspace,
},
@@ -59,6 +60,41 @@ const store = new Vuex.Store({
utils.types.forEach(type => Object.assign(result, state[type].itemMap));
return result;
},
+ itemPaths: (state) => {
+ const result = {};
+ const getPath = (item) => {
+ let itemPath = result[item.id];
+ if (!itemPath) {
+ if (item.parendId === 'trash') {
+ itemPath = `.stackedit-trash/${item.name}`;
+ } else {
+ let name = item.name;
+ if (item.type === 'folder') {
+ name += '/';
+ }
+ const parent = state.folder.itemMap[item.parentId];
+ if (!parent) {
+ itemPath = name;
+ } else {
+ itemPath = getPath(parent) + name;
+ }
+ }
+ }
+ result[item.id] = itemPath;
+ return itemPath;
+ };
+ [...state.folder.items, ...state.file.items].forEach(item => getPath(item));
+ return result;
+ },
+ pathItems: (state, getters) => {
+ const result = {};
+ const itemPaths = getters.itemPaths;
+ const allItemMap = getters.allItemMap;
+ Object.entries(itemPaths).forEach(([id, path]) => {
+ result[path] = allItemMap[id];
+ });
+ return result;
+ },
isSponsor: (state, getters) => {
const sponsorToken = getters['workspace/sponsorToken'];
return state.light || state.monetizeSponsor || (sponsorToken && sponsorToken.isSponsor);
diff --git a/src/store/locationTemplate.js b/src/store/locationTemplate.js
new file mode 100644
index 00000000..f4932791
--- /dev/null
+++ b/src/store/locationTemplate.js
@@ -0,0 +1,43 @@
+import moduleTemplate from './moduleTemplate';
+import providerRegistry from '../services/providers/common/providerRegistry';
+
+const addToGroup = (groups, item) => {
+ const list = groups[item.fileId] || [];
+ list.push(item);
+ groups[item.fileId] = list;
+};
+
+export default (empty) => {
+ const module = moduleTemplate(empty);
+
+ module.getters = {
+ ...module.getters,
+ groupedByFileId: (state, getters) => {
+ const groups = {};
+ getters.items.forEach(item => addToGroup(groups, item));
+ return groups;
+ },
+ filteredGroupedByFileId: (state, getters) => {
+ const groups = {};
+ getters.items.filter((item) => {
+ // Filter items that we can't use
+ const provider = providerRegistry.providers[item.providerId];
+ return provider && provider.getToken(item);
+ }).forEach(item => addToGroup(groups, item));
+ return groups;
+ },
+ current: (state, getters, rootState, rootGetters) => {
+ const locations = getters.filteredGroupedByFileId[rootGetters['file/current'].id] || [];
+ return locations.map((location) => {
+ const provider = providerRegistry.providers[location.providerId];
+ return {
+ ...location,
+ description: provider.getDescription(location),
+ url: provider.getUrl(location),
+ };
+ });
+ },
+ };
+
+ return module;
+};
diff --git a/src/store/moduleTemplate.js b/src/store/moduleTemplate.js
index 58423519..68524c03 100644
--- a/src/store/moduleTemplate.js
+++ b/src/store/moduleTemplate.js
@@ -3,10 +3,7 @@ import utils from '../services/utils';
export default (empty, simpleHash = false) => {
// Use Date.now() as a simple hash function, which is ok for not-synced types
- const hashFunc = simpleHash ? Date.now : item => utils.hash(utils.serializeObject({
- ...item,
- hash: undefined,
- }));
+ const hashFunc = simpleHash ? Date.now : item => utils.getItemHash(item);
return {
namespaced: true,
@@ -19,7 +16,7 @@ export default (empty, simpleHash = false) => {
mutations: {
setItem(state, value) {
const item = Object.assign(empty(value.id), value);
- if (!item.hash) {
+ if (!item.hash || !simpleHash) {
item.hash = hashFunc(item);
}
Vue.set(state.itemMap, item.id, item);
diff --git a/src/store/publishLocation.js b/src/store/publishLocation.js
deleted file mode 100644
index 931503e7..00000000
--- a/src/store/publishLocation.js
+++ /dev/null
@@ -1,35 +0,0 @@
-import moduleTemplate from './moduleTemplate';
-import empty from '../data/emptyPublishLocation';
-import providerRegistry from '../services/providers/providerRegistry';
-
-const module = moduleTemplate(empty);
-
-module.getters = {
- ...module.getters,
- groupedByFileId: (state, getters) => {
- const result = {};
- getters.items.forEach((item) => {
- // Filter items that we can't use
- const provider = providerRegistry.providers[item.providerId];
- if (provider && provider.getToken(item)) {
- const list = result[item.fileId] || [];
- list.push(item);
- result[item.fileId] = list;
- }
- });
- return result;
- },
- current: (state, getters, rootState, rootGetters) => {
- const locations = getters.groupedByFileId[rootGetters['file/current'].id] || [];
- return locations.map((location) => {
- const provider = providerRegistry.providers[location.providerId];
- return {
- ...location,
- description: provider.getDescription(location),
- url: provider.getUrl(location),
- };
- });
- },
-};
-
-export default module;
diff --git a/src/store/syncLocation.js b/src/store/syncLocation.js
deleted file mode 100644
index c1277c97..00000000
--- a/src/store/syncLocation.js
+++ /dev/null
@@ -1,35 +0,0 @@
-import moduleTemplate from './moduleTemplate';
-import empty from '../data/emptySyncLocation';
-import providerRegistry from '../services/providers/providerRegistry';
-
-const module = moduleTemplate(empty);
-
-module.getters = {
- ...module.getters,
- groupedByFileId: (state, getters) => {
- const result = {};
- getters.items.forEach((item) => {
- // Filter items that we can't use
- const provider = providerRegistry.providers[item.providerId];
- if (provider && provider.getToken(item)) {
- const list = result[item.fileId] || [];
- list.push(item);
- result[item.fileId] = list;
- }
- });
- return result;
- },
- current: (state, getters, rootState, rootGetters) => {
- const locations = getters.groupedByFileId[rootGetters['file/current'].id] || [];
- return locations.map((location) => {
- const provider = providerRegistry.providers[location.providerId];
- return {
- ...location,
- description: provider.getDescription(location),
- url: provider.getUrl(location),
- };
- });
- },
-};
-
-export default module;
diff --git a/src/store/workspace.js b/src/store/workspace.js
index 9facee00..4d647b7f 100644
--- a/src/store/workspace.js
+++ b/src/store/workspace.js
@@ -36,6 +36,10 @@ export default {
const googleTokens = rootGetters['data/googleTokens'];
return googleTokens[workspace.sub];
}
+ case 'githubWorkspace': {
+ const githubTokens = rootGetters['data/githubTokens'];
+ return githubTokens[workspace.sub];
+ }
case 'couchdbWorkspace': {
const couchdbTokens = rootGetters['data/couchdbTokens'];
return couchdbTokens[workspace.id];