Content merge
This commit is contained in:
parent
d258d1c9c4
commit
0280df2bbb
@ -1,7 +1,7 @@
|
||||
export default () => ({
|
||||
id: null,
|
||||
type: 'syncContent',
|
||||
contentRevisions: {},
|
||||
historyData: {},
|
||||
syncLocationData: {},
|
||||
updated: 0,
|
||||
});
|
||||
|
@ -1,482 +0,0 @@
|
||||
import 'clunderscore';
|
||||
import DiffMatchPatch from 'diff-match-patch';
|
||||
|
||||
var clDiffUtils = {
|
||||
cloneObject: cloneObject,
|
||||
offsetToPatch: offsetToPatch,
|
||||
patchToOffset: patchToOffset,
|
||||
serializeObject: serializeObject,
|
||||
flattenContent: flattenContent,
|
||||
makePatchableText: makePatchableText,
|
||||
restoreDiscussionOffsets: restoreDiscussionOffsets,
|
||||
makeContentChange: makeContentChange,
|
||||
applyContentChanges: applyContentChanges,
|
||||
getTextPatches: getTextPatches,
|
||||
getObjectPatches: getObjectPatches,
|
||||
quickPatch: quickPatch,
|
||||
mergeObjects: mergeObjects,
|
||||
mergeFlattenContent: mergeFlattenContent
|
||||
}
|
||||
|
||||
var marker = '\uF111\uF222\uF333\uF444'
|
||||
var DIFF_DELETE = -1
|
||||
var DIFF_INSERT = 1
|
||||
var DIFF_EQUAL = 0
|
||||
var diffMatchPatch = new DiffMatchPatch() // eslint-disable-line new-cap
|
||||
var diffMatchPatchStrict = new DiffMatchPatch() // eslint-disable-line new-cap
|
||||
diffMatchPatchStrict.Match_Threshold = 0
|
||||
diffMatchPatchStrict.Patch_DeleteThreshold = 0
|
||||
var diffMatchPatchPermissive = new DiffMatchPatch() // eslint-disable-line new-cap
|
||||
diffMatchPatchPermissive.Match_Distance = 999999999
|
||||
|
||||
function cloneObject (obj) {
|
||||
return JSON.parse(JSON.stringify(obj))
|
||||
}
|
||||
|
||||
function offsetToPatch (text, offset) {
|
||||
var patch = diffMatchPatchPermissive.patch_make(text, [
|
||||
[0, text.slice(0, offset)],
|
||||
[1, marker],
|
||||
[0, text.slice(offset)]
|
||||
])[0]
|
||||
var diffs = patch.diffs.cl_map(function (diff) {
|
||||
if (!diff[0]) {
|
||||
return diff[1]
|
||||
} else if (diff[1] === marker) {
|
||||
return ''
|
||||
}
|
||||
})
|
||||
return {
|
||||
diffs: diffs,
|
||||
length: patch.length1,
|
||||
start: patch.start1
|
||||
}
|
||||
}
|
||||
|
||||
function patchToOffset (text, patch) {
|
||||
var markersLength = 0
|
||||
var diffs = patch.diffs.cl_map(function (diff) {
|
||||
if (!diff) {
|
||||
markersLength += marker.length
|
||||
return [1, marker]
|
||||
} else {
|
||||
return [0, diff]
|
||||
}
|
||||
})
|
||||
return diffMatchPatchPermissive.patch_apply([{
|
||||
diffs: diffs,
|
||||
length1: patch.length,
|
||||
length2: patch.length + markersLength,
|
||||
start1: patch.start,
|
||||
start2: patch.start
|
||||
}], text)[0].indexOf(marker)
|
||||
}
|
||||
|
||||
function flattenObject (obj) {
|
||||
return obj.cl_reduce(function (result, value, key) {
|
||||
result[key] = value[1]
|
||||
return result
|
||||
}, {})
|
||||
}
|
||||
|
||||
function flattenContent (content) {
|
||||
var result = ({}).cl_extend(content)
|
||||
result.properties = flattenObject(content.properties)
|
||||
result.discussions = flattenObject(content.discussions)
|
||||
result.comments = flattenObject(content.comments)
|
||||
result.text = content.text.cl_reduce(function (text, item) {
|
||||
switch (item.type) {
|
||||
case 'discussion':
|
||||
if (result.discussions[item.id]) {
|
||||
result.discussions[item.id][item.name] = text.length
|
||||
}
|
||||
return text
|
||||
default:
|
||||
return text + item[1]
|
||||
}
|
||||
}, '')
|
||||
return result
|
||||
}
|
||||
|
||||
function getTextPatches (oldText, newText) {
|
||||
var diffs = diffMatchPatch.diff_main(oldText, newText)
|
||||
diffMatchPatch.diff_cleanupEfficiency(diffs)
|
||||
var patches = []
|
||||
var startOffset = 0
|
||||
diffs.cl_each(function (change) {
|
||||
var changeType = change[0]
|
||||
var changeText = change[1]
|
||||
switch (changeType) {
|
||||
case DIFF_EQUAL:
|
||||
startOffset += changeText.length
|
||||
break
|
||||
case DIFF_DELETE:
|
||||
changeText && patches.push({
|
||||
o: startOffset,
|
||||
d: changeText
|
||||
})
|
||||
break
|
||||
case DIFF_INSERT:
|
||||
changeText && patches.push({
|
||||
o: startOffset,
|
||||
a: changeText
|
||||
})
|
||||
startOffset += changeText.length
|
||||
break
|
||||
}
|
||||
})
|
||||
return patches.length ? patches : undefined
|
||||
}
|
||||
|
||||
function getObjectPatches (oldObject, newObjects) {
|
||||
var valueHash = Object.create(null)
|
||||
var valueArray = []
|
||||
oldObject = hashObject(oldObject, valueHash, valueArray)
|
||||
newObjects = hashObject(newObjects, valueHash, valueArray)
|
||||
var diffs = diffMatchPatch.diff_main(oldObject, newObjects)
|
||||
var patches = []
|
||||
diffs.cl_each(function (change) {
|
||||
var changeType = change[0]
|
||||
var changeHash = change[1]
|
||||
if (changeType === DIFF_EQUAL) {
|
||||
return
|
||||
}
|
||||
changeHash.split('').cl_each(function (objHash) {
|
||||
var obj = valueArray[objHash.charCodeAt(0)]
|
||||
var patch = {
|
||||
k: obj[0]
|
||||
}
|
||||
patch[changeType === DIFF_DELETE ? 'd' : 'a'] = obj[1]
|
||||
patches.push(patch)
|
||||
})
|
||||
})
|
||||
return patches.length ? patches : undefined
|
||||
}
|
||||
|
||||
function makePatchableText (content, markerKeys, markerIdxMap) {
|
||||
var markers = []
|
||||
// Sort keys to have predictable marker positions, in case of same offset
|
||||
var discussionKeys = Object.keys(content.discussions).sort()
|
||||
discussionKeys.cl_each(function (discussionId) {
|
||||
function addMarker (offsetName) {
|
||||
var markerKey = discussionId + offsetName
|
||||
if (discussion[offsetName] !== undefined) {
|
||||
var idx = markerIdxMap[markerKey]
|
||||
if (idx === undefined) {
|
||||
idx = markerKeys.length
|
||||
markerIdxMap[markerKey] = idx
|
||||
markerKeys.push({
|
||||
id: discussionId,
|
||||
offsetName: offsetName
|
||||
})
|
||||
}
|
||||
markers.push({
|
||||
idx: idx,
|
||||
offset: discussion[offsetName]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
var discussion = content.discussions[discussionId]
|
||||
if (discussion.offset0 === discussion.offset1) {
|
||||
// Remove discussion offsets if markers are at the same position
|
||||
discussion.offset0 = discussion.offset1 = undefined
|
||||
} else {
|
||||
addMarker('offset0')
|
||||
addMarker('offset1')
|
||||
}
|
||||
})
|
||||
|
||||
var lastOffset = 0
|
||||
var result = ''
|
||||
markers
|
||||
.sort(function (marker1, marker2) {
|
||||
return marker1.offset - marker2.offset
|
||||
})
|
||||
.cl_each(function (marker) {
|
||||
result +=
|
||||
content.text.slice(lastOffset, marker.offset) +
|
||||
String.fromCharCode(0xe000 + marker.idx) // Use a character from the private use area
|
||||
lastOffset = marker.offset
|
||||
})
|
||||
return result + content.text.slice(lastOffset)
|
||||
}
|
||||
|
||||
function stripDiscussionOffsets (objectMap) {
|
||||
return objectMap.cl_reduce(function (result, object, id) {
|
||||
result[id] = {
|
||||
text: object.text
|
||||
}
|
||||
return result
|
||||
}, {})
|
||||
}
|
||||
|
||||
function restoreDiscussionOffsets (content, markerKeys) {
|
||||
var len = content.text.length
|
||||
var maxIdx = markerKeys.length
|
||||
for (var i = 0; i < len; i++) {
|
||||
var idx = content.text.charCodeAt(i) - 0xe000
|
||||
if (idx >= 0 && idx < maxIdx) {
|
||||
var markerKey = markerKeys[idx]
|
||||
content.text = content.text.slice(0, i) + content.text.slice(i + 1)
|
||||
var discussion = content.discussions[markerKey.id]
|
||||
if (discussion) {
|
||||
discussion[markerKey.offsetName] = i
|
||||
}
|
||||
i-- // We just removed the current character, we may have multiple markers with same offset
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function makeContentChange (oldContent, newContent) {
|
||||
var markerKeys = []
|
||||
var markerIdxMap = Object.create(null)
|
||||
var oldText = makePatchableText(oldContent, markerKeys, markerIdxMap)
|
||||
var newText = makePatchableText(newContent, markerKeys, markerIdxMap)
|
||||
var textPatches = getTextPatches(oldText, newText)
|
||||
textPatches && textPatches.cl_each(function (patch) {
|
||||
// If markers are present, replace changeText with an array of text and markers
|
||||
var changeText = patch.a || patch.d
|
||||
var textItems = []
|
||||
var lastItem = ''
|
||||
var len = changeText.length
|
||||
var maxIdx = markerKeys.length
|
||||
for (var i = 0; i < len; i++) {
|
||||
var idx = changeText.charCodeAt(i) - 0xe000
|
||||
if (idx >= 0 && idx < maxIdx) {
|
||||
var markerKey = markerKeys[idx]
|
||||
lastItem.length && textItems.push(lastItem)
|
||||
textItems.push({
|
||||
type: 'discussion',
|
||||
name: markerKey.offsetName,
|
||||
id: markerKey.id
|
||||
})
|
||||
lastItem = ''
|
||||
} else {
|
||||
lastItem += changeText[i]
|
||||
}
|
||||
}
|
||||
if (textItems.length) {
|
||||
lastItem.length && textItems.push(lastItem)
|
||||
if (patch.a) {
|
||||
patch.a = textItems
|
||||
} else {
|
||||
patch.d = textItems
|
||||
}
|
||||
}
|
||||
})
|
||||
var propertiesPatches = getObjectPatches(oldContent.properties, newContent.properties)
|
||||
var discussionsPatches = getObjectPatches(
|
||||
stripDiscussionOffsets(oldContent.discussions),
|
||||
stripDiscussionOffsets(newContent.discussions)
|
||||
)
|
||||
var commentsPatches = getObjectPatches(oldContent.comments, newContent.comments)
|
||||
if (textPatches || propertiesPatches || discussionsPatches || commentsPatches) {
|
||||
return {
|
||||
text: textPatches,
|
||||
properties: propertiesPatches,
|
||||
discussions: discussionsPatches,
|
||||
comments: commentsPatches
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function applyContentChanges (content, contentChanges, isBackward) {
|
||||
function applyObjectPatches (obj, patches) {
|
||||
if (patches) {
|
||||
patches.cl_each(function (patch) {
|
||||
if (!patch.a ^ !isBackward) {
|
||||
obj[patch.k] = patch.a || patch.d
|
||||
} else {
|
||||
delete obj[patch.k]
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
var markerKeys = []
|
||||
var markerIdxMap = Object.create(null)
|
||||
var result = {
|
||||
text: makePatchableText(content, markerKeys, markerIdxMap),
|
||||
properties: cloneObject(content.properties),
|
||||
discussions: stripDiscussionOffsets(content.discussions),
|
||||
comments: cloneObject(content.comments)
|
||||
}
|
||||
|
||||
contentChanges.cl_each(function (contentChange) {
|
||||
var textPatches = contentChange.text || []
|
||||
if (isBackward) {
|
||||
textPatches = textPatches.slice().reverse()
|
||||
}
|
||||
result.text = textPatches.cl_reduce(function (text, patch) {
|
||||
var isAdd = !patch.a ^ !isBackward
|
||||
var textChanges = patch.a || patch.d || ''
|
||||
// When no marker is present, textChanges is a string
|
||||
if (typeof textChanges === 'string') {
|
||||
textChanges = [textChanges]
|
||||
}
|
||||
var textChange = textChanges.cl_map(function (textChange) {
|
||||
if (!textChange.type) {
|
||||
// textChange is a string
|
||||
return textChange
|
||||
}
|
||||
// textChange is a marker
|
||||
var markerKey = textChange.id + textChange.name
|
||||
var idx = markerIdxMap[markerKey]
|
||||
if (idx === undefined) {
|
||||
idx = markerKeys.length
|
||||
markerIdxMap[markerKey] = idx
|
||||
markerKeys.push({
|
||||
id: textChange.id,
|
||||
offsetName: textChange.name
|
||||
})
|
||||
}
|
||||
return String.fromCharCode(0xe000 + idx)
|
||||
}).join('')
|
||||
if (!textChange) {
|
||||
return text
|
||||
} else if (isAdd) {
|
||||
return text.slice(0, patch.o).concat(textChange).concat(text.slice(patch.o))
|
||||
} else {
|
||||
return text.slice(0, patch.o).concat(text.slice(patch.o + textChange.length))
|
||||
}
|
||||
}, result.text)
|
||||
|
||||
applyObjectPatches(result.properties, contentChange.properties)
|
||||
applyObjectPatches(result.discussions, contentChange.discussions)
|
||||
applyObjectPatches(result.comments, contentChange.comments)
|
||||
})
|
||||
|
||||
restoreDiscussionOffsets(result, markerKeys)
|
||||
return result
|
||||
}
|
||||
|
||||
function serializeObject (obj) {
|
||||
return JSON.stringify(obj, function (key, value) {
|
||||
return Object.prototype.toString.call(value) === '[object Object]'
|
||||
? Object.keys(value).sort().cl_reduce(function (sorted, key) {
|
||||
sorted[key] = value[key]
|
||||
return sorted
|
||||
}, {})
|
||||
: value
|
||||
})
|
||||
}
|
||||
|
||||
function hashArray (arr, valueHash, valueArray) {
|
||||
var hash = []
|
||||
arr.cl_each(function (obj) {
|
||||
var serializedObj = serializeObject(obj)
|
||||
var objHash = valueHash[serializedObj]
|
||||
if (objHash === undefined) {
|
||||
objHash = valueArray.length
|
||||
valueArray.push(obj)
|
||||
valueHash[serializedObj] = objHash
|
||||
}
|
||||
hash.push(objHash)
|
||||
})
|
||||
return String.fromCharCode.apply(null, hash)
|
||||
}
|
||||
|
||||
function hashObject (obj, valueHash, valueArray) {
|
||||
return hashArray(Object.keys(obj || {}).sort().cl_map(function (key) {
|
||||
return [key, obj[key]]
|
||||
}), valueHash, valueArray)
|
||||
}
|
||||
|
||||
function mergeText (oldText, newText, serverText) {
|
||||
var diffs = diffMatchPatch.diff_main(oldText, newText)
|
||||
diffMatchPatch.diff_cleanupSemantic(diffs)
|
||||
var patches = diffMatchPatch.patch_make(oldText, diffs)
|
||||
var patchResult = diffMatchPatch.patch_apply(patches, serverText)
|
||||
if (!patchResult[1]
|
||||
.cl_some(function (changeApplied) {
|
||||
return !changeApplied
|
||||
})) {
|
||||
return patchResult[0]
|
||||
}
|
||||
|
||||
diffs = diffMatchPatchStrict.diff_main(patchResult[0], newText)
|
||||
diffMatchPatch.diff_cleanupSemantic(diffs)
|
||||
return diffs.cl_map(function (diff) {
|
||||
return diff[1]
|
||||
}).join('')
|
||||
}
|
||||
|
||||
function quickPatch (oldStr, newStr, destStr, strict) {
|
||||
var dmp = strict ? diffMatchPatchStrict : diffMatchPatch
|
||||
var diffs = dmp.diff_main(oldStr, newStr)
|
||||
var patches = dmp.patch_make(oldStr, diffs)
|
||||
var patchResult = dmp.patch_apply(patches, destStr)
|
||||
return patchResult[0]
|
||||
}
|
||||
|
||||
function mergeObjects (oldObject, newObject, serverObject) {
|
||||
var mergedObject = ({}).cl_extend(newObject).cl_extend(serverObject)
|
||||
mergedObject.cl_each(function (value, key) {
|
||||
if (!oldObject[key]) {
|
||||
return // There might be conflict, keep the server value
|
||||
}
|
||||
var newValue = newObject[key] && serializeObject(newObject[key])
|
||||
var serverValue = serverObject[key] && serializeObject(serverObject[key])
|
||||
if (newValue === serverValue) {
|
||||
return // no conflict
|
||||
}
|
||||
var oldValue = serializeObject(oldObject[key])
|
||||
if (oldValue !== newValue && !serverValue) {
|
||||
return // Removed on server but changed on client
|
||||
}
|
||||
if (oldValue !== serverValue && !newValue) {
|
||||
return // Removed on client but changed on server
|
||||
}
|
||||
if (oldValue !== newValue && oldValue === serverValue) {
|
||||
// Take the client value
|
||||
if (!newValue) {
|
||||
delete mergedObject[key]
|
||||
} else {
|
||||
mergedObject[key] = newObject[key]
|
||||
}
|
||||
} else if (oldValue !== serverValue && oldValue === newValue) {
|
||||
// Take the server value
|
||||
if (!serverValue) {
|
||||
delete mergedObject[key]
|
||||
}
|
||||
}
|
||||
// Take the server value otherwise
|
||||
})
|
||||
return cloneObject(mergedObject)
|
||||
}
|
||||
|
||||
function mergeFlattenContent (oldContent, newContent, serverContent) {
|
||||
var markerKeys = []
|
||||
var markerIdxMap = Object.create(null)
|
||||
var oldText = makePatchableText(oldContent, markerKeys, markerIdxMap)
|
||||
var serverText = makePatchableText(serverContent, markerKeys, markerIdxMap)
|
||||
var localText = makePatchableText(newContent, markerKeys, markerIdxMap)
|
||||
var isServerTextChanges = oldText !== serverText
|
||||
var isTextSynchronized = serverText === localText
|
||||
|
||||
var result = {
|
||||
text: isTextSynchronized || !isServerTextChanges
|
||||
? localText
|
||||
: mergeText(oldText, serverText, localText),
|
||||
properties: mergeObjects(
|
||||
oldContent.properties,
|
||||
newContent.properties,
|
||||
serverContent.properties
|
||||
),
|
||||
discussions: mergeObjects(
|
||||
stripDiscussionOffsets(oldContent.discussions),
|
||||
stripDiscussionOffsets(newContent.discussions),
|
||||
stripDiscussionOffsets(serverContent.discussions)
|
||||
),
|
||||
comments: mergeObjects(
|
||||
oldContent.comments,
|
||||
newContent.comments,
|
||||
serverContent.comments
|
||||
)
|
||||
}
|
||||
restoreDiscussionOffsets(result, markerKeys)
|
||||
return result
|
||||
}
|
||||
|
||||
export default clDiffUtils;
|
204
src/services/diffUtils.js
Normal file
204
src/services/diffUtils.js
Normal file
@ -0,0 +1,204 @@
|
||||
import DiffMatchPatch from 'diff-match-patch';
|
||||
import utils from './utils';
|
||||
|
||||
const diffMatchPatch = new DiffMatchPatch();
|
||||
const diffMatchPatchStrict = new DiffMatchPatch();
|
||||
diffMatchPatchStrict.Match_Threshold = 0;
|
||||
diffMatchPatchStrict.Patch_DeleteThreshold = 0;
|
||||
const diffMatchPatchPermissive = new DiffMatchPatch();
|
||||
diffMatchPatchPermissive.Match_Distance = 999999999;
|
||||
|
||||
function makePatchableText(content, markerKeys, markerIdxMap) {
|
||||
const markers = [];
|
||||
// Sort keys to have predictable marker positions in case of same offset
|
||||
const discussionKeys = Object.keys(content.discussions).sort();
|
||||
discussionKeys.forEach((discussionId) => {
|
||||
const discussion = content.discussions[discussionId];
|
||||
|
||||
function addMarker(offsetName) {
|
||||
const markerKey = discussionId + offsetName;
|
||||
if (discussion[offsetName] !== undefined) {
|
||||
let idx = markerIdxMap[markerKey];
|
||||
if (idx === undefined) {
|
||||
idx = markerKeys.length;
|
||||
markerIdxMap[markerKey] = idx;
|
||||
markerKeys.push({
|
||||
id: discussionId,
|
||||
offsetName,
|
||||
});
|
||||
}
|
||||
markers.push({
|
||||
idx,
|
||||
offset: discussion[offsetName],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (discussion.offset0 === discussion.offset1) {
|
||||
// Remove discussion offsets if markers are at the same position
|
||||
discussion.offset0 = undefined;
|
||||
discussion.offset1 = undefined;
|
||||
} else {
|
||||
addMarker('offset0');
|
||||
addMarker('offset1');
|
||||
}
|
||||
});
|
||||
|
||||
let lastOffset = 0;
|
||||
let result = '';
|
||||
markers
|
||||
.sort((marker1, marker2) => marker1.offset - marker2.offset)
|
||||
.forEach((marker) => {
|
||||
result +=
|
||||
content.text.slice(lastOffset, marker.offset) +
|
||||
String.fromCharCode(0xe000 + marker.idx); // Use a character from the private use area
|
||||
lastOffset = marker.offset;
|
||||
});
|
||||
return result + content.text.slice(lastOffset);
|
||||
}
|
||||
|
||||
function stripDiscussionOffsets(objectMap) {
|
||||
const result = {};
|
||||
Object.keys(objectMap).forEach((id) => {
|
||||
result[id] = {
|
||||
text: objectMap[id].text,
|
||||
};
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
function restoreDiscussionOffsets(content, markerKeys) {
|
||||
const len = content.text.length;
|
||||
const maxIdx = markerKeys.length;
|
||||
for (let i = 0; i < len; i += 1) {
|
||||
const idx = content.text.charCodeAt(i) - 0xe000;
|
||||
if (idx >= 0 && idx < maxIdx) {
|
||||
const markerKey = markerKeys[idx];
|
||||
content.text = content.text.slice(0, i) + content.text.slice(i + 1);
|
||||
const discussion = content.discussions[markerKey.id];
|
||||
if (discussion) {
|
||||
discussion[markerKey.offsetName] = i;
|
||||
}
|
||||
// We just removed the current character, we may have multiple markers with same offset
|
||||
i -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function serializeObject(obj) {
|
||||
if (!obj) {
|
||||
return obj;
|
||||
}
|
||||
return JSON.stringify(obj, (key, value) => {
|
||||
if (Object.prototype.toString.call(value) !== '[object Object]') {
|
||||
return value;
|
||||
}
|
||||
return Object.keys(value).sort().reduce((sorted, valueKey) => {
|
||||
sorted[valueKey] = value[valueKey];
|
||||
return sorted;
|
||||
}, {});
|
||||
});
|
||||
}
|
||||
|
||||
function mergeText(oldText, newText, serverText) {
|
||||
let diffs = diffMatchPatch.diff_main(oldText, newText);
|
||||
diffMatchPatch.diff_cleanupSemantic(diffs);
|
||||
const patches = diffMatchPatch.patch_make(oldText, diffs);
|
||||
const patchResult = diffMatchPatch.patch_apply(patches, serverText);
|
||||
if (!patchResult[1].some(changeApplied => !changeApplied)) {
|
||||
return patchResult[0];
|
||||
}
|
||||
|
||||
diffs = diffMatchPatchStrict.diff_main(patchResult[0], newText);
|
||||
diffMatchPatch.diff_cleanupSemantic(diffs);
|
||||
return diffs.map(diff => diff[1]).join('');
|
||||
}
|
||||
|
||||
function quickPatch(oldStr, newStr, destStr, strict) {
|
||||
const dmp = strict ? diffMatchPatchStrict : diffMatchPatch;
|
||||
const diffs = dmp.diff_main(oldStr, newStr);
|
||||
const patches = dmp.patch_make(oldStr, diffs);
|
||||
const patchResult = dmp.patch_apply(patches, destStr);
|
||||
return patchResult[0];
|
||||
}
|
||||
|
||||
function mergeValue(oldValue, newValue, serverValue) {
|
||||
if (!oldValue) {
|
||||
return serverValue; // There might be conflict, keep the server value
|
||||
}
|
||||
const newSerializedValue = serializeObject(newValue);
|
||||
const serverSerializedValue = serializeObject(serverValue);
|
||||
if (newSerializedValue === serverSerializedValue) {
|
||||
return serverValue; // no conflict
|
||||
}
|
||||
const oldSerializedValue = serializeObject(oldValue);
|
||||
if (oldSerializedValue !== newSerializedValue && !serverValue) {
|
||||
return newValue; // Removed on server but changed on client
|
||||
}
|
||||
if (oldSerializedValue !== serverSerializedValue && !newValue) {
|
||||
return serverValue; // Removed on client but changed on server
|
||||
}
|
||||
if (oldSerializedValue !== newSerializedValue && oldSerializedValue === serverSerializedValue) {
|
||||
return newValue; // Take the client value
|
||||
}
|
||||
return serverValue; // Take the server value otherwise
|
||||
}
|
||||
|
||||
function mergeObjects(oldObject, newObject, serverObject) {
|
||||
const mergedObject = {};
|
||||
Object.keys({
|
||||
...newObject,
|
||||
...serverObject,
|
||||
}).forEach((key) => {
|
||||
const mergedValue = mergeValue(oldObject[key], newObject[key], serverObject[key]);
|
||||
if (mergedValue != null) {
|
||||
mergedObject[key] = mergedValue;
|
||||
}
|
||||
});
|
||||
return utils.deepCopy(mergedObject);
|
||||
}
|
||||
|
||||
function mergeContent(oldContent, newContent, serverContent) {
|
||||
const markerKeys = [];
|
||||
const markerIdxMap = Object.create(null);
|
||||
const oldText = makePatchableText(oldContent, markerKeys, markerIdxMap);
|
||||
const serverText = makePatchableText(serverContent, markerKeys, markerIdxMap);
|
||||
const localText = makePatchableText(newContent, markerKeys, markerIdxMap);
|
||||
const isServerTextChanges = oldText !== serverText;
|
||||
const isTextSynchronized = serverText === localText;
|
||||
|
||||
const result = {
|
||||
text: isTextSynchronized || !isServerTextChanges
|
||||
? localText
|
||||
: mergeText(oldText, serverText, localText),
|
||||
properties: mergeValue(
|
||||
oldContent.properties,
|
||||
newContent.properties,
|
||||
serverContent.properties,
|
||||
),
|
||||
discussions: mergeObjects(
|
||||
stripDiscussionOffsets(oldContent.discussions),
|
||||
stripDiscussionOffsets(newContent.discussions),
|
||||
stripDiscussionOffsets(serverContent.discussions),
|
||||
),
|
||||
comments: mergeObjects(
|
||||
oldContent.comments,
|
||||
newContent.comments,
|
||||
serverContent.comments,
|
||||
),
|
||||
};
|
||||
restoreDiscussionOffsets(result, markerKeys);
|
||||
return result
|
||||
}
|
||||
|
||||
export default {
|
||||
serializeObject,
|
||||
makePatchableText,
|
||||
restoreDiscussionOffsets,
|
||||
applyContentChanges,
|
||||
getTextPatches,
|
||||
getObjectPatches,
|
||||
quickPatch,
|
||||
mergeObjects,
|
||||
mergeContent,
|
||||
};
|
@ -23,11 +23,11 @@ const tokenExpirationMargin = 10 * 60 * 1000; // 10 min
|
||||
// ],
|
||||
// };
|
||||
|
||||
const request = (googleToken, options) => utils.request({
|
||||
const request = (token, options) => utils.request({
|
||||
...options,
|
||||
headers: {
|
||||
...options.headers,
|
||||
Authorization: `Bearer ${googleToken.accessToken}`,
|
||||
Authorization: `Bearer ${token.accessToken}`,
|
||||
},
|
||||
});
|
||||
|
||||
@ -68,28 +68,28 @@ export default {
|
||||
};
|
||||
}))
|
||||
// Call the tokeninfo endpoint
|
||||
.then(googleToken => request(googleToken, {
|
||||
.then(token => request(token, {
|
||||
method: 'GET',
|
||||
url: 'https://www.googleapis.com/plus/v1/people/me',
|
||||
}).then((res) => {
|
||||
// Add name to googleToken
|
||||
googleToken.name = res.body.displayName;
|
||||
const existingToken = store.getters['data/googleTokens'][googleToken.sub];
|
||||
// Add name to token
|
||||
token.name = res.body.displayName;
|
||||
const existingToken = store.getters['data/googleTokens'][token.sub];
|
||||
if (existingToken) {
|
||||
if (!sub) {
|
||||
throw new Error('Google account already linked.');
|
||||
}
|
||||
// Add isLogin and nextPageToken to googleToken
|
||||
googleToken.isLogin = existingToken.isLogin;
|
||||
googleToken.nextPageToken = existingToken.nextPageToken;
|
||||
// Add isLogin and nextPageToken to token
|
||||
token.isLogin = existingToken.isLogin;
|
||||
token.nextPageToken = existingToken.nextPageToken;
|
||||
}
|
||||
// Add googleToken to googleTokens
|
||||
store.dispatch('data/setGoogleToken', googleToken);
|
||||
return googleToken;
|
||||
// Add token to googleTokens
|
||||
store.dispatch('data/setGoogleToken', token);
|
||||
return token;
|
||||
}));
|
||||
},
|
||||
refreshToken(scopes, googleToken) {
|
||||
const sub = googleToken.sub;
|
||||
refreshToken(scopes, token) {
|
||||
const sub = token.sub;
|
||||
const lastToken = store.getters['data/googleTokens'][sub];
|
||||
const mergedScopes = [...new Set([
|
||||
...scopes,
|
||||
@ -115,9 +115,9 @@ export default {
|
||||
.catch(() => this.startOauth2(mergedScopes, sub));
|
||||
});
|
||||
},
|
||||
getChanges(googleToken) {
|
||||
getChanges(token) {
|
||||
let changes = [];
|
||||
return this.refreshToken(['https://www.googleapis.com/auth/drive.appdata'], googleToken)
|
||||
return this.refreshToken(['https://www.googleapis.com/auth/drive.appdata'], token)
|
||||
.then((refreshedToken) => {
|
||||
const getPage = (pageToken = '1') => request(refreshedToken, {
|
||||
method: 'GET',
|
||||
@ -158,8 +158,8 @@ export default {
|
||||
return getPage(refreshedToken.nextPageToken);
|
||||
});
|
||||
},
|
||||
updateNextPageToken(googleToken, changes) {
|
||||
const lastToken = store.getters['data/googleTokens'][googleToken.sub];
|
||||
updateNextPageToken(token, changes) {
|
||||
const lastToken = store.getters['data/googleTokens'][token.sub];
|
||||
if (changes.nextPageToken !== lastToken.nextPageToken) {
|
||||
store.dispatch('data/setGoogleToken', {
|
||||
...lastToken,
|
||||
@ -167,8 +167,8 @@ export default {
|
||||
});
|
||||
}
|
||||
},
|
||||
saveItem(googleToken, item, syncData, ifNotTooLate = cb => res => cb(res)) {
|
||||
return this.refreshToken(['https://www.googleapis.com/auth/drive.appdata'], googleToken)
|
||||
saveItem(token, item, syncData, ifNotTooLate = cb => res => cb(res)) {
|
||||
return this.refreshToken(['https://www.googleapis.com/auth/drive.appdata'], token)
|
||||
// Refreshing a token can take a while if an oauth window pops up, so check if it's too late
|
||||
.then(ifNotTooLate((refreshedToken) => {
|
||||
const options = {
|
||||
@ -229,12 +229,22 @@ export default {
|
||||
}));
|
||||
}));
|
||||
},
|
||||
removeItem(googleToken, syncData, ifNotTooLate = cb => res => cb(res)) {
|
||||
return this.refreshToken(['https://www.googleapis.com/auth/drive.appdata'], googleToken)
|
||||
removeItem(token, syncData, ifNotTooLate = cb => res => cb(res)) {
|
||||
return this.refreshToken(['https://www.googleapis.com/auth/drive.appdata'], token)
|
||||
// Refreshing a token can take a while if an oauth window pops up, so check if it's too late
|
||||
.then(ifNotTooLate(refreshedToken => request(refreshedToken, {
|
||||
method: 'DELETE',
|
||||
url: `https://www.googleapis.com/drive/v3/files/${syncData.id}`,
|
||||
})).then(() => syncData));
|
||||
},
|
||||
downloadFile(refreshedToken, id) {
|
||||
return request(refreshedToken, {
|
||||
method: 'GET',
|
||||
url: `https://www.googleapis.com/drive/v3/files/${id}?alt=media`,
|
||||
}).then(res => res.body);
|
||||
},
|
||||
downloadAppDataFile(token, id) {
|
||||
return this.refreshToken(['https://www.googleapis.com/auth/drive.appdata'], token)
|
||||
.then(refreshedToken => this.downloadFile(refreshedToken, id));
|
||||
},
|
||||
};
|
||||
|
@ -86,16 +86,11 @@ utils.types.forEach((type) => {
|
||||
updatedMap[type] = Object.create(null);
|
||||
});
|
||||
|
||||
function isContentType(type) {
|
||||
switch (type) {
|
||||
case 'content':
|
||||
case 'contentState':
|
||||
case 'syncContent':
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
const contentTypes = {
|
||||
content: true,
|
||||
contentState: true,
|
||||
syncContent: true,
|
||||
};
|
||||
|
||||
export default {
|
||||
lastTx: 0,
|
||||
@ -171,8 +166,8 @@ export default {
|
||||
Object.keys(this.updatedMap).forEach((type) => {
|
||||
// Remove this type only if file is deleted
|
||||
let checker = cb => id => !storeItemMap[id] && cb(id);
|
||||
if (isContentType(type)) {
|
||||
// For content types, remove only if file is deleted
|
||||
if (contentTypes[type]) {
|
||||
// For content types, remove item only if file is deleted
|
||||
checker = cb => (id) => {
|
||||
if (!storeItemMap[id]) {
|
||||
const [fileId] = id.split('/');
|
||||
@ -227,7 +222,7 @@ export default {
|
||||
// DB item is different from the corresponding store item
|
||||
this.updatedMap[dbItem.type][dbItem.id] = dbItem.updated;
|
||||
// Update content only if it exists in the store
|
||||
if (existingStoreItem || !isContentType(dbItem.type)) {
|
||||
if (existingStoreItem || !contentTypes[dbItem.type]) {
|
||||
// Put item in the store
|
||||
store.commit(`${dbItem.type}/setItem`, dbItem);
|
||||
storeItemMap[dbItem.id] = dbItem;
|
||||
@ -236,9 +231,9 @@ export default {
|
||||
},
|
||||
|
||||
/**
|
||||
* Retrieve an item from the DB.
|
||||
* Retrieve an item from the DB and put it in the store.
|
||||
*/
|
||||
retrieveItem(id) {
|
||||
loadItem(id) {
|
||||
// Check if item is in the store
|
||||
const itemInStore = store.getters.allItemMap[id];
|
||||
if (itemInStore) {
|
||||
@ -259,11 +254,30 @@ export default {
|
||||
this.updatedMap[dbItem.type][dbItem.id] = dbItem.updated;
|
||||
// Put item in the store
|
||||
store.commit(`${dbItem.type}/setItem`, dbItem);
|
||||
// Use deepCopy to freeze item
|
||||
resolve(dbItem);
|
||||
}
|
||||
};
|
||||
}, () => onError());
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Unload from the store contents that haven't been opened recently
|
||||
*/
|
||||
unloadContents() {
|
||||
const lastOpenedFileIds = store.getters['data/lastOpenedIds']
|
||||
.slice(0, 10).reduce((result, id) => {
|
||||
result[id] = true;
|
||||
return result;
|
||||
}, {});
|
||||
Object.keys(contentTypes).forEach((type) => {
|
||||
store.getters(`${type}/items`).forEach((item) => {
|
||||
const [fileId] = item.id.split('/');
|
||||
if (!lastOpenedFileIds[fileId]) {
|
||||
// Remove item from the store
|
||||
store.commit(`${type}/deleteItem`, item.id);
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
};
|
||||
|
@ -1 +1,24 @@
|
||||
export default {};
|
||||
import store from '../../store';
|
||||
import googleHelper from '../helpers/googleHelper';
|
||||
|
||||
export default {
|
||||
downloadContent(token, fileId) {
|
||||
const syncData = store.getters['data/syncDataByItemId'][`${fileId}/content`];
|
||||
return googleHelper.downloadAppDataFile(token, syncData.id)
|
||||
.then((content) => {
|
||||
if (content.updated !== syncData.updated) {
|
||||
store.dispatch('data/setSyncData', {
|
||||
...store.getters['data/syncData'],
|
||||
[syncData.id]: {
|
||||
...syncData,
|
||||
updated: content.updated,
|
||||
},
|
||||
});
|
||||
}
|
||||
return {
|
||||
history: [],
|
||||
...content,
|
||||
};
|
||||
});
|
||||
},
|
||||
};
|
||||
|
@ -5,8 +5,6 @@ import utils from './utils';
|
||||
import userActivitySvc from './userActivitySvc';
|
||||
import gdriveAppDataProvider from './providers/gdriveAppDataProvider';
|
||||
import googleHelper from './helpers/googleHelper';
|
||||
import emptyContent from '../data/emptyContent';
|
||||
import emptySyncContent from '../data/emptySyncContent';
|
||||
|
||||
const lastSyncActivityKey = 'lastSyncActivity';
|
||||
let lastSyncActivity;
|
||||
@ -36,6 +34,7 @@ function setLastSyncActivity() {
|
||||
|
||||
function getSyncProvider(syncLocation) {
|
||||
switch (syncLocation.provider) {
|
||||
case 'gdriveAppData':
|
||||
default:
|
||||
return gdriveAppDataProvider;
|
||||
}
|
||||
@ -43,11 +42,21 @@ function getSyncProvider(syncLocation) {
|
||||
|
||||
function getSyncToken(syncLocation) {
|
||||
switch (syncLocation.provider) {
|
||||
case 'gdriveAppData':
|
||||
default:
|
||||
return store.getters['data/loginToken'];
|
||||
}
|
||||
}
|
||||
|
||||
const loader = type => fileId => localDbSvc.loadItem(`${fileId}/${type}`)
|
||||
// Item does not exist, create it
|
||||
.catch(() => store.commit(`${type}/setItem`, {
|
||||
id: `${fileId}/${type}`,
|
||||
}));
|
||||
const loadContent = loader('content');
|
||||
const loadSyncContent = loader('syncContent');
|
||||
const loadContentState = loader('contentState');
|
||||
|
||||
function applyChanges(changes) {
|
||||
const storeItemMap = { ...store.getters.allItemMap };
|
||||
const syncData = { ...store.getters['data/syncData'] };
|
||||
@ -164,23 +173,51 @@ function sync() {
|
||||
const existingSyncData = store.getters['data/syncDataByItemId'][contentId];
|
||||
});
|
||||
|
||||
const syncOneContent = fileId => localDbSvc.retrieveItem(`${fileId}/syncContent`)
|
||||
.catch(() => ({ ...emptySyncContent(), id: `${fileId}/syncContent` }))
|
||||
.then(syncContent => localDbSvc.retrieveItem(`${fileId}/content`)
|
||||
.catch(() => ({ ...emptyContent(), id: `${fileId}/content` }))
|
||||
.then((content) => {
|
||||
const syncOneContentLocation = (syncLocation) => {
|
||||
return Promise.resolve()
|
||||
const syncOneContent = fileId => loadSyncContent(fileId)
|
||||
.then(() => loadContent(fileId))
|
||||
.then(() => {
|
||||
const getContent = () => store.state.content.itemMap[`${fileId}/content`];
|
||||
const getSyncContent = () => store.state.content.itemMap[`${fileId}/syncContent`];
|
||||
|
||||
const syncLocations = [
|
||||
{ id: 'main', provider: 'gdriveAppData' },
|
||||
...getContent().syncLocations.filter(syncLocation => getSyncToken(syncLocation),
|
||||
)];
|
||||
const downloadedLocations = {};
|
||||
|
||||
const syncOneContentLocation = () => {
|
||||
let result;
|
||||
syncLocations.some((syncLocation) => {
|
||||
if (!downloadedLocations[syncLocation.id]) {
|
||||
const provider = getSyncProvider(syncLocation);
|
||||
const token = getSyncToken(syncLocation);
|
||||
return provider && token && provider.downloadContent()
|
||||
result = provider && token && provider.downloadContent(fileId, syncLocation)
|
||||
.then((content) => {
|
||||
const syncContent = getSyncContent();
|
||||
const syncLocationData = syncContent.syncLocationData[syncLocation.id] || {
|
||||
history: [],
|
||||
};
|
||||
let lastMergedContent;
|
||||
syncLocationData.history.some((updated) => {
|
||||
if (content.history.indexOf(updated) !== -1) {
|
||||
|
||||
}
|
||||
return lastMergedContent;
|
||||
});
|
||||
})
|
||||
.then(() => syncOneContentLocation());
|
||||
}
|
||||
return result;
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
const syncLocations = [{ provider: null }, ...content.syncLocations];
|
||||
return syncOneContentLocation(syncLocations[0]);
|
||||
}));
|
||||
return syncOneContentLocation();
|
||||
})
|
||||
.then(() => localDbSvc.unloadContents(), (err) => {
|
||||
localDbSvc.unloadContents();
|
||||
throw err;
|
||||
});
|
||||
|
||||
// Called until no content to save
|
||||
const saveNextContent = ifNotTooLate(() => {
|
||||
@ -189,7 +226,7 @@ function sync() {
|
||||
const updated = getContentUpdated(contentId);
|
||||
const existingSyncData = store.getters['data/syncDataByItemId'][contentId];
|
||||
if (!existingSyncData || existingSyncData.updated !== updated) {
|
||||
saveContentPromise = localDbSvc.retrieveItem(contentId)
|
||||
saveContentPromise = localDbSvc.loadItem(contentId)
|
||||
.then(content => googleHelper.saveItem(
|
||||
googleToken,
|
||||
// Use deepCopy to freeze objects
|
||||
@ -299,19 +336,11 @@ localDbSvc.sync()
|
||||
store.dispatch('data/setLastOpenedId', currentFile.id);
|
||||
return Promise.resolve()
|
||||
// Load contentState from DB
|
||||
.then(() => localDbSvc.retrieveItem(`${currentFile.id}/contentState`)
|
||||
// contentState does not exist, create it
|
||||
.catch(() => store.commit('contentState/setItem', {
|
||||
id: `${currentFile.id}/contentState`,
|
||||
})))
|
||||
.then(() => loadContentState(currentFile.id))
|
||||
// Load syncContent from DB
|
||||
.then(() => localDbSvc.retrieveItem(`${currentFile.id}/syncContent`)
|
||||
// syncContent does not exist, create it
|
||||
.catch(() => store.commit('syncContent/setItem', {
|
||||
id: `${currentFile.id}/syncContent`,
|
||||
})))
|
||||
.then(() => loadSyncContent(currentFile.id))
|
||||
// Load content from DB
|
||||
.then(() => localDbSvc.retrieveItem(`${currentFile.id}/content`));
|
||||
.then(() => localDbSvc.loadItem(`${currentFile.id}/content`));
|
||||
}),
|
||||
{
|
||||
immediate: true,
|
||||
@ -320,6 +349,14 @@ localDbSvc.sync()
|
||||
// Sync local DB periodically
|
||||
utils.setInterval(() => localDbSvc.sync(), 1000);
|
||||
|
||||
// Unload contents from memory periodically
|
||||
utils.setInterval(() => {
|
||||
// Wait for sync and publish to finish
|
||||
if (store.state.queue.isEmpty) {
|
||||
localDbSvc.unloadContents();
|
||||
}
|
||||
}, 5000);
|
||||
|
||||
export default {
|
||||
isSyncAvailable,
|
||||
requestSync,
|
||||
|
@ -25,13 +25,13 @@ const patcher = id => ({ state, commit }, data) => {
|
||||
},
|
||||
});
|
||||
};
|
||||
const localSettingsToggler = propertyName => ({ getters, dispatch }, value) => dispatch('patchLocalSettings', {
|
||||
[propertyName]: value === undefined ? !getters.localSettings[propertyName] : value,
|
||||
});
|
||||
|
||||
// Local settings
|
||||
module.getters.localSettings = getter('localSettings');
|
||||
module.actions.patchLocalSettings = patcher('localSettings');
|
||||
const localSettingsToggler = propertyName => ({ getters, dispatch }, value) => dispatch('patchLocalSettings', {
|
||||
[propertyName]: value === undefined ? !getters.localSettings[propertyName] : value,
|
||||
});
|
||||
module.actions.toggleNavigationBar = localSettingsToggler('showNavigationBar');
|
||||
module.actions.toggleEditor = localSettingsToggler('showEditor');
|
||||
module.actions.toggleSidePreview = localSettingsToggler('showSidePreview');
|
||||
@ -48,7 +48,7 @@ module.getters.lastOpened = getter('lastOpened');
|
||||
const getLastOpenedIds = (lastOpened, rootState) => Object.keys(lastOpened)
|
||||
.filter(id => rootState.file.itemMap[id])
|
||||
.sort((id1, id2) => lastOpened[id2] - lastOpened[id1])
|
||||
.slice(0, 10);
|
||||
.slice(0, 20);
|
||||
module.getters.lastOpenedIds = (state, getters, rootState) =>
|
||||
getLastOpenedIds(getters.lastOpened, rootState);
|
||||
module.actions.setLastOpenedId = ({ getters, commit, rootState }, fileId) => {
|
||||
@ -80,18 +80,18 @@ module.actions.setSyncData = setter('syncData');
|
||||
module.getters.tokens = getter('tokens');
|
||||
module.getters.googleTokens = (state, getters) => getters.tokens.google || {};
|
||||
module.getters.loginToken = (state, getters) => {
|
||||
// Return the first googleToken that has the isLogin flag
|
||||
// Return the first google token that has the isLogin flag
|
||||
const googleTokens = getters.googleTokens;
|
||||
const loginSubs = Object.keys(googleTokens)
|
||||
.filter(sub => googleTokens[sub].isLogin);
|
||||
return googleTokens[loginSubs[0]];
|
||||
};
|
||||
module.actions.patchTokens = patcher('tokens');
|
||||
module.actions.setGoogleToken = ({ getters, dispatch }, googleToken) => {
|
||||
module.actions.setGoogleToken = ({ getters, dispatch }, token) => {
|
||||
dispatch('patchTokens', {
|
||||
google: {
|
||||
...getters.googleTokens,
|
||||
[googleToken.sub]: googleToken,
|
||||
[token.sub]: token,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user