// noinspection RedundantIfStatementJS
/**
* @author Maximilian Kallert <max@refinio.net>
* @author Eduard Reimer <eduard@refinio.net>
* @copyright REFINIO GmbH 2020
* @license CC-BY-NC-SA-2.5; portions MIT License
* @version 1.0.0
*/
/**
* This module implements the CRDT ORSetForArrays algorithm.
* @module
*/
import {convertValue} from '../object-to-microdata';
import type {RecipeRule} from '../recipes';
import {createRandomString} from '../system/crypto-helpers';
import {constructItemRuleFromListRule} from '../util/recipe-checks';
import {isListItemType, ruleHasItemType} from '../util/type-checks';
import {isObject, isString} from '../util/type-checks-basic';
import type {CRDTImplementation} from './CRDTImplementation';
/**
* The structure of one metadata list entry of this CRDT.
* uniqueId - a unique id generated when a new entry is added to the list
* value - the value added to the list
*/
interface MetaListEntry {
uniqueId: string;
value: unknown;
}
/**
* The structure of the metadata of this crdt.
*/
interface ORSetMetaData {
addList: MetaListEntry[];
removeList: MetaListEntry[];
}
/**
* Function that verifies that the metadata has the correct format. It checks the array
* types, not each element in the array.
*
* @param {any} data - The data to check for compatibility.
* @returns {boolean}
*/
function isORSetMetadataFastCheck(data: unknown): data is ORSetMetaData {
if (!isObject(data)) {
return false;
}
if (!data || !Array.isArray(data.addList) || !Array.isArray(data.removeList)) {
return false;
}
return true;
}
/**
* Function that verifies that the metadata has the correct format. It checks the array types
* and every element in the array.
* @param {unknown} data - The data to check for compatibility.
* @returns {boolean}
*/
function isORSetMetadata(data: unknown): data is ORSetMetaData {
if (!isORSetMetadataFastCheck(data)) {
return false;
}
if (!data.addList.every(addEntry => isString(addEntry.uniqueId))) {
return false;
}
if (!data.removeList.every(removeEntry => isString(removeEntry.uniqueId))) {
return false;
}
return true;
}
/**
* Implementation of the Observed-Remove Set (OR-Set)
*
* See paper:
* A comprehensive study of Convergent and Commutative Replicated Data Types
* Marc Shapiro, Nuno PreguiƧa, Carlos Baquero, Marek Zawirski
* p. 26, Section 3.3.5
* https://hal.inria.fr/inria-00555588/document
*/
export class ORSetForArrays implements CRDTImplementation {
generateRecipeRules(rule: RecipeRule, path: string): RecipeRule[] {
if (ruleHasItemType(rule) && !isListItemType(rule.itemtype)) {
throw new Error(
`generateRecipeRules: You cannot use the ORSetForArrays implementation if the data type' +
' is not a set: ${rule}`
);
}
return [
{
itemprop: 'addList',
itemtype: {
type: 'array',
item: {
type: 'object',
rules: [
{
itemprop: 'uniqueId',
itemtype: {type: 'string'}
},
{
itemprop: 'value',
inheritFrom: {rule: path, extract: 'CollectionItemType'}
}
]
}
}
},
{
itemprop: 'removeList',
itemtype: {
type: 'array',
item: {
type: 'object',
rules: [
{
itemprop: 'uniqueId',
itemtype: {type: 'string'}
},
{
itemprop: 'value',
inheritFrom: {rule: path, extract: 'CollectionItemType'}
}
]
}
}
}
];
}
async generateMetaData(
dataOld: unknown = [],
dataNew: unknown,
metadataOld: unknown = {addList: [], removeList: []},
rule: RecipeRule
): Promise<ORSetMetaData> {
// #### STEP 1: Sanity checks and rule adaption ####
if (ruleHasItemType(rule) && !isListItemType(rule.itemtype)) {
throw new Error(
`generateMetaData: You cannot use the ORSetForArrays implementation if the data type is' +
' not a set: ${rule}`
);
}
if (!isORSetMetadata(metadataOld)) {
throw new Error(`Old medatata has invalid format: ${metadataOld}`);
}
if (!Array.isArray(dataOld)) {
throw new Error(
`The ORSetForArrays needs dataOld to be of type array or undefined: ${dataOld}`
);
}
if (!Array.isArray(dataNew)) {
throw new Error(`The ORSetForArrays needs dataNew to be of type array: ${dataNew}`);
}
const ruleNoList = constructItemRuleFromListRule(rule);
// #### STEP 2: Convert to 'string content' -> 'jsobject content' ####
// Leads to: [[string, content], ...]
const dataOldSerialized: Array<[string, unknown]> = dataOld.map(elem => {
return [convertValue(ruleNoList, elem), elem];
});
const dataNewSerialized: Array<[string, unknown]> = dataNew.map(elem => [
convertValue(ruleNoList, elem),
elem
]);
// Leads to a map: string -> content
const dataOldMap = new Map(dataOldSerialized);
const dataNewMap = new Map(dataNewSerialized);
// #### STEP 3: Find elements that were added / removed and add this information ####
// Fill the output maps based on the calculated differences
const metadataNew: ORSetMetaData = {
addList: [...metadataOld.addList],
removeList: [...metadataOld.removeList]
};
// Add added elements to addList
const dataNewSerializedFiltered = dataNewSerialized.filter(
elem => !dataOldMap.has(elem[0])
);
const newAddListEntries = await Promise.all(
dataNewSerializedFiltered.map(async addedElement => {
return {
uniqueId: await createRandomString(),
value: addedElement[1]
};
})
);
metadataNew.addList = metadataNew.addList.concat(newAddListEntries);
const serializedDataToBeDeleted = dataOldSerialized.filter(
elem => !dataNewMap.has(elem[0])
);
serializedDataToBeDeleted.forEach(deletedElement => {
// Create removeList entry for each known uniqueId entry in the addList for the
// deleted value
const metaListEntriesToBeDeleted = metadataNew.addList.filter(
metaListEntry => convertValue(ruleNoList, metaListEntry.value) === deletedElement[0]
);
const removeListEntries = metaListEntriesToBeDeleted.map(removeUniqueId => {
return {
uniqueId: removeUniqueId.uniqueId,
value: deletedElement[1]
};
});
metadataNew.removeList = metadataNew.removeList.concat(removeListEntries);
});
return metadataNew;
}
async mergeMetaData(
objLatestVersion: unknown,
metadataLatestVersion: unknown,
objToMerge: unknown,
metadataToMerge: unknown,
rule: Readonly<RecipeRule>
): Promise<{metadata: ORSetMetaData; data: unknown}> {
// #### STEP 1: Sanity checks and rule adaption ####
if (ruleHasItemType(rule) && !isListItemType(rule.itemtype)) {
throw new Error(
`mergeMetaData: You cannot use the ORSetForArrays implementation if the data type is not' +
' a set: ${rule}`
);
}
// If data old is undefined it means, that we don't have any data in it, it is
// equivalent to an empty array.
if (objLatestVersion === undefined) {
// eslint-disable-next-line no-param-reassign
objLatestVersion = [];
if (metadataLatestVersion === undefined) {
// eslint-disable-next-line no-param-reassign
metadataLatestVersion = {
addList: [],
removeList: []
};
}
}
if (!isORSetMetadata(metadataLatestVersion)) {
throw new Error(`Old medatata has invalid format: ${metadataLatestVersion}`);
}
if (!isORSetMetadata(metadataToMerge)) {
throw new Error(`Merge metadata has invalid format: ${metadataToMerge}`);
}
if (!Array.isArray(objLatestVersion)) {
throw new Error(
`The ORSetForArrays needs dataOld to be of type array: ${objLatestVersion}`
);
}
if (!Array.isArray(objToMerge)) {
throw new Error(
`The ORSetForArrays needs dataMerge to be of type array: ${objToMerge}`
);
}
const ruleNoList = constructItemRuleFromListRule(rule);
// #### STEP 2: Calculate new metadata by merging the old ones ####
const mergedAddList = ORSetForArrays.mergeFilterList(
metadataLatestVersion.addList,
metadataToMerge.addList
);
const mergedRemoveList = ORSetForArrays.mergeFilterList(
metadataLatestVersion.removeList,
metadataToMerge.removeList
);
// #### STEP 3: Generate the new data object ####
const data = ORSetForArrays.computeAliveElements(mergedAddList, mergedRemoveList);
const filteredData = ORSetForArrays.filterDuplicateValues(data, ruleNoList);
// #### STEP 4: return metadata and data ####
return {
metadata: {
addList: mergedAddList,
removeList: mergedRemoveList
},
data: filteredData
};
}
/**
* Filter duplicate values by comparing serialized values.
* @param {unknown[]} values
* @param {RecipeRule} ruleNoList
* @returns {unknown[]}
* @private
*/
private static filterDuplicateValues(values: unknown[], ruleNoList: RecipeRule): unknown[] {
const serializedValues: Array<[string, any]> = values.map(elem => {
return [convertValue(ruleNoList, elem), elem];
});
// map creation will eliminate the duplicates because the key is the microdata
// representation
const dataOldMap = new Map(serializedValues);
return [...dataOldMap.values()];
}
/**
* Merge and filter the duplicates based on unique ids.
* @param {MetaListEntry[]} list1
* @param {MetaListEntry[]} list2
* @returns {MetaListEntry[]}
* @private
*/
private static mergeFilterList(
list1: MetaListEntry[],
list2: MetaListEntry[]
): MetaListEntry[] {
const mergedList = [...list1, ...list2];
// filter duplicates based on unique id
return mergedList.filter((elem: MetaListEntry, index: number, self: MetaListEntry[]) => {
return (
index === self.findIndex(metaListEntry => metaListEntry.uniqueId === elem.uniqueId)
);
});
}
/**
* Calculate the elements of the list.
*
* @param {MetaListEntry[]} addList
* @param {MetaListEntry[]} removeList
* @returns {unknown[]}
* @private
*/
private static computeAliveElements(
addList: MetaListEntry[],
removeList: MetaListEntry[]
): unknown[] {
// This maps the hash to elements.
const map = new Map<string, MetaListEntry>();
// Fill the map with all added values.
for (const elem of addList) {
map.set(elem.uniqueId, {
uniqueId: elem.uniqueId,
value: elem.value
});
}
// Delete the removed elements
for (const removedElement of removeList) {
const latestAdd = map.get(removedElement.uniqueId);
if (latestAdd !== undefined) {
map.delete(removedElement.uniqueId);
}
}
return [...map.values()].map(e => e.value);
}
}