diff --git a/packages/dds/tree/README.md b/packages/dds/tree/README.md index 96725263a592..49d887fcaa7e 100644 --- a/packages/dds/tree/README.md +++ b/packages/dds/tree/README.md @@ -67,7 +67,20 @@ This typically means the client side "business logic" or "view" part of some gra ### Ownership and Lifetimes -TODO: add a diagram for this section. +This diagram shows the ownership hierarchy during a transaction with solid arrows, and some important references with dashed arrows: + +```mermaid +graph TD; + store["Data Store"]-->doc["Persisted Summaries"] + container["Fluid Container"]-->shared-tree; + shared-tree--"extends"-->shared-tree-core; + shared-tree-core-."reads".->doc; + shared-tree-core-->EditManager-->X["collab window & branches"]; + shared-tree-core-->Indexes-->ForestIndex; + shared-tree-->checkout["default checkout"]-->transaction-."updates".->checkout; + transaction-->ProgressiveEditBuilder + checkout-."reads".->ForestIndex; +``` `tree` is a DDS, and therefore it stores its persisted data in a Fluid Container, and is also owned by that same container. When nothing in that container references the DDS anymore, it may get garbage collected by the Fluid GC. @@ -97,7 +110,14 @@ could be added in the future. #### Viewing -TODO: add a diagram for this section. +```mermaid +graph LR; + doc["Persisted Summaries"]--"Summary+Trailing ops"-->shared-tree-core + shared-tree--"configures"-->shared-tree-core + shared-tree-core--"Summary"-->Indexes--"Summary"-->ForestIndex; + ForestIndex--"Exposed by"-->checkout; + checkout--"viewed by"-->app +``` [`shared-tree`](./src/shared-tree/) configures [`shared-tree-core`](./src/shared-tree-core/README.md) with a set of indexes. `shared-tree-core` downloads the summary data from the Fluid Container, feeding the summary data (and any future edits) into the indexes. diff --git a/packages/dds/tree/src/changeset/format.ts b/packages/dds/tree/src/changeset/format.ts index 269b378f84f9..35af2ea67975 100644 --- a/packages/dds/tree/src/changeset/format.ts +++ b/packages/dds/tree/src/changeset/format.ts @@ -15,16 +15,14 @@ import { JsonableTree } from "../tree"; export namespace Transposed { export interface Transaction extends PeerChangeset { /** - * The reference sequence number of the transaction that this transaction was originally - * issued after. + * The tag of the changeset that this transaction was originally issued after. */ - ref: SeqNumber; + ref: ChangesetTag; /** - * The reference sequence number of the transaction that this transaction has been - * transposed over. + * The tag of the latest changeset that this transaction has been transposed over. * Omitted on changesets that have not been transposed. */ - newRef?: SeqNumber; + newRef?: ChangesetTag; } /** @@ -53,41 +51,43 @@ export namespace Transposed { export type MarkList = TMark[]; export type Mark = + | SizedMark + | AttachGroup; + + export type ObjectMark = + | SizedObjectMark + | AttachGroup; + + export type SizedMark = | Skip + | SizedObjectMark; + + export type SizedObjectMark = | Tomb | Modify | Detach | Reattach | ModifyReattach | ModifyDetach - | GapEffectSegment - | AttachGroup; + | GapEffectSegment; export type AttachGroup = Attach[]; export interface Tomb { type: "Tomb"; - seq: SeqNumber; + change: ChangesetTag; count: number; } - export type ValueMark = SetValue | RevertValue; - - export interface SetValue { - type: "Set"; + export interface SetValue extends HasOpId { /** Can be left unset to represent the value being cleared. */ value?: Value; } - export interface RevertValue { - type: "Revert"; - seq: SeqNumber; - } - export interface Modify { type: "Modify"; - tomb?: SeqNumber; - value?: ValueMark; + tomb?: ChangesetTag; + value?: SetValue; fields?: FieldMarks; } @@ -155,7 +155,7 @@ export namespace Transposed { export interface ModifyInsert extends HasOpId, HasPlaceFields { type: "MInsert"; content: ProtoNode; - value?: ValueMark; + value?: SetValue; fields?: FieldMarks; } @@ -177,9 +177,9 @@ export namespace Transposed { } /** - * Represents the precise location of a concurrent slice-move-in. + * Represents the precise location of a concurrent slice-move-in within the same gap. * This is needed so we can tell where concurrent sliced-inserts (that this changeset has yet to be rebased over) - * may land in the field. Without this, we would need to be able to retain information about the relative order in + * may land in the gap. Without this, we would need to be able to retain information about the relative order in * time of any number of concurrent slice-moves. See scenario N. */ export interface Intake extends PriorOp { @@ -196,7 +196,7 @@ export namespace Transposed { export interface ModifyMoveIn extends HasOpId, HasPlaceFields { type: "MMoveIn"; - value?: ValueMark; + value?: SetValue; fields?: FieldMarks; } @@ -207,7 +207,7 @@ export namespace Transposed { export type GapEffectType = GapEffect["type"]; export interface GapEffectSegment { - tombs?: SeqNumber; + tomb?: ChangesetTag; type: "Gap"; count: GapCount; /** @@ -235,7 +235,7 @@ export namespace Transposed { export type NodeMark = Detach | Reattach; export interface Detach extends HasOpId { - tomb?: SeqNumber; + tomb?: ChangesetTag; gaps?: GapEffect[]; type: "Delete" | "MoveOut"; count: NodeCount; @@ -243,20 +243,20 @@ export namespace Transposed { export interface ModifyDetach extends HasOpId { type: "MDelete" | "MMoveOut"; - tomb?: SeqNumber; - value?: ValueMark; + tomb?: ChangesetTag; + value?: SetValue; fields?: FieldMarks; } export interface Reattach extends HasOpId { type: "Revive" | "Return"; - tomb: SeqNumber; + tomb: ChangesetTag; count: NodeCount; } export interface ModifyReattach extends HasOpId { type: "MRevive" | "MReturn"; - tomb: SeqNumber; - value?: ValueMark; + tomb: ChangesetTag; + value?: SetValue; fields?: FieldMarks; } @@ -271,30 +271,13 @@ export namespace Transposed { */ export interface Tombstones { count: NodeCount; - seq: PriorSeq; + change: ChangesetTag; } export interface PriorOp { - seq: PriorSeq; + change: ChangesetTag; id: OpId; } - - /** - * The sequence number of the edit that caused the nodes to be detached. - * - * When the nodes were detached as the result of learning of a prior concurrent change - * that preceded a prior change that the current change depends on, a pair of sequence - * numbers is used instead were `seq[0]` is the earlier change whose effect on `seq[1]` - * these tombstones represent. This can be read as "tombstones from the effect of `seq[0]` - * on `seq[1]`". - */ - export type PriorSeq = SeqNumber | [SeqNumber, SeqNumber]; -} - -export namespace Sequenced { - export interface Transaction extends Transposed.Transaction { - seq: SeqNumber; - } } export interface HasLength { @@ -318,20 +301,12 @@ export enum RangeType { /** * A monotonically increasing positive integer assigned to each change within the changeset. * OpIds are scoped to a single changeset, so referring to OpIds across changesets requires - * qualifying them by sequence/commit number. + * qualifying them by change tag. * * The uniqueness of IDs is leveraged to uniquely identify the matching move-out for a move-in/return and vice-versa. */ export type OpId = number; -export interface HasSeqNumber { - /** - * Included in a mark to indicate the transaction it was part of. - * This number is assigned by the Fluid service. - */ - seq: SeqNumber; -} - export interface HasOpId { /** * The sequential ID assigned to a change within a transaction. @@ -347,7 +322,7 @@ export type ProtoNode = JsonableTree; export type NodeCount = number; export type GapCount = number; export type Skip = number; -export type SeqNumber = number; +export type ChangesetTag = number | string; export type Value = number | string | boolean; export type ClientId = number; export enum Tiebreak { Left, Right } diff --git a/packages/dds/tree/src/changeset/index.ts b/packages/dds/tree/src/changeset/index.ts index 846778513ffa..2a4b80ad64eb 100644 --- a/packages/dds/tree/src/changeset/index.ts +++ b/packages/dds/tree/src/changeset/index.ts @@ -9,3 +9,5 @@ export * from "./format"; export * from "./toDelta"; +export * from "./utils"; +export * from "./markListFactory"; diff --git a/packages/dds/tree/src/changeset/markListFactory.ts b/packages/dds/tree/src/changeset/markListFactory.ts new file mode 100644 index 000000000000..864836a5a5e2 --- /dev/null +++ b/packages/dds/tree/src/changeset/markListFactory.ts @@ -0,0 +1,58 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +import { Skip, Transposed as T } from "./format"; +import { extendAttachGroup, isAttachGroup, isObjMark, isSkipMark, tryExtendMark } from "./utils"; + +/** + * Helper class for constructing an offset list of marks that... + * - Does not insert offsets if there is no content after them + * - Does not insert 0-sized offsets + * - Merges runs of offsets together + * - Merges marks together + */ +export class MarkListFactory { + private offset = 0; + public readonly list: T.MarkList = []; + + public push(...marks: T.Mark[]): void { + for (const item of marks) { + if (isSkipMark(item)) { + this.pushOffset(item); + } else { + this.pushContent(item); + } + } + } + + public pushOffset(offset: Skip): void { + this.offset += offset; + } + + public pushContent(mark: T.ObjectMark): void { + if (this.offset > 0) { + this.list.push(this.offset); + this.offset = 0; + } + const prev = this.list[this.list.length - 1]; + if (isObjMark(prev)) { + if (isAttachGroup(prev)) { + if (isAttachGroup(mark)) { + extendAttachGroup(prev, mark); + return; + } + } else if ( + !isAttachGroup(mark) + && prev.type === mark.type + ) { + // Neither are attach groups + if (tryExtendMark(prev, mark)) { + return; + } + } + } + this.list.push(mark); + } +} diff --git a/packages/dds/tree/src/changeset/toDelta.ts b/packages/dds/tree/src/changeset/toDelta.ts index b1f41e085450..0d951a7b68a1 100644 --- a/packages/dds/tree/src/changeset/toDelta.ts +++ b/packages/dds/tree/src/changeset/toDelta.ts @@ -202,15 +202,7 @@ function applyOrCollectModifications( ): InsertedFieldsMarksMap { const outFieldsMarks: InsertedFieldsMarksMap = new Map(); if (modify.value !== undefined) { - const type = modify.value.type; - switch (type) { - case "Set": - node.value = modify.value.value; - break; - case "Revert": - fail(ERR_REVERT_ON_INSERT); - default: unreachableCase(type); - } + node.value = modify.value.value; } if (modify.fields !== undefined) { const protoFields = node.fields ?? {}; @@ -328,7 +320,6 @@ function applyOrCollectModifications( const ERR_NOT_IMPLEMENTED = "Not implemented"; const ERR_TOMB_IN_INSERT = "Encountered a concurrent deletion in inserted content"; const ERR_MOD_ON_MISSING_FIELD = "Encountered a modification that targets a non-existent field on an inserted tree"; -const ERR_REVERT_ON_INSERT = "Encountered a revert operation on an inserted node"; const ERR_BOUNCE_ON_INSERT = "Encountered a Bounce mark in an inserted field"; const ERR_INTAKE_ON_INSERT = "Encountered an Intake mark in an inserted field"; const ERR_REVIVE_ON_INSERT = "Encountered a Revive mark in an inserted field"; @@ -338,7 +329,7 @@ const ERR_RETURN_ON_INSERT = "Encountered a Return mark in an inserted field"; * Modifications to a subtree as described by a Changeset. */ interface ChangesetMods { - value?: T.ValueMark; + value?: T.SetValue; fields?: T.FieldMarks; } @@ -356,15 +347,7 @@ interface ChangesetMods { function convertModify(modify: ChangesetMods): DeltaMods { const out: DeltaMods = {}; if (modify.value !== undefined) { - const type = modify.value.type; - switch (type) { - case "Set": - out.setValue = modify.value.value; - break; - case "Revert": - fail(ERR_NOT_IMPLEMENTED); - default: unreachableCase(type); - } + out.setValue = modify.value.value; } const fields = modify.fields; if (fields !== undefined) { diff --git a/packages/dds/tree/src/changeset/utils.ts b/packages/dds/tree/src/changeset/utils.ts new file mode 100644 index 000000000000..f64bac586bdd --- /dev/null +++ b/packages/dds/tree/src/changeset/utils.ts @@ -0,0 +1,380 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +import { unreachableCase } from "@fluidframework/common-utils"; +import { fail } from "../util"; +import { Skip, Transposed as T } from "./format"; + +export function isAttachGroup(mark: T.Mark): mark is T.AttachGroup { + return Array.isArray(mark); +} + +export function isReattach(mark: T.Mark): mark is T.Reattach | T.ModifyReattach { + return isObjMark(mark) && "type" in mark && + ( + mark.type === "Revive" + || mark.type === "MRevive" + || mark.type === "Return" + || mark.type === "MReturn" + ); +} + +export function isTomb(mark: T.Mark): mark is T.Tomb { + return isObjMark(mark) && "type" in mark && mark.type === "Tomb"; +} + +export function isGapEffectMark(mark: T.Mark): mark is T.GapEffectSegment { + return isObjMark(mark) && "type" in mark && mark.type === "Gap"; +} + +export function getAttachLength(attach: T.Attach): number { + const type = attach.type; + switch (type) { + case "Bounce": + case "Intake": + return 0; + case "MInsert": + case "MMoveIn": + return 1; + case "Insert": + return attach.content.length; + case "MoveIn": + return attach.count; + default: unreachableCase(type); + } +} + +/** + * @returns `true` iff `lhs` and `rhs` are deeply structurally equal. + */ +export function isEqualGaps(lhs: T.GapEffect[] | undefined, rhs: T.GapEffect[] | undefined): boolean { + if (lhs === rhs) { + return true; + } + if (lhs === undefined || rhs === undefined || lhs.length !== rhs.length) { + return false; + } + for (let i = 0; i < lhs.length; ++i) { + if (!isEqualGapEffect(lhs[i], rhs[i])) { + return false; + } + } + return true; +} + +export function isEqualGapEffect(lhs: Readonly, rhs: Readonly): boolean { + return lhs.id === rhs.id + && lhs.type === rhs.type + && lhs.excludePriorInsertions === rhs.excludePriorInsertions + && lhs.includePosteriorInsertions === rhs.includePosteriorInsertions; +} + +/** + * @param mark - The mark to get the length of. + * @returns The number of nodes within the output context of the mark. + */ +export function getOutputLength(mark: T.Mark): number { + if (isSkipMark(mark)) { + return mark; + } + if (isAttachGroup(mark)) { + return mark.reduce((prev, attach) => prev + getAttachLength(attach), 0); + } + const type = mark.type; + switch (type) { + case "Tomb": + case "Gap": + case "Revive": + case "Return": + return mark.count; + case "MReturn": + case "MRevive": + case "Modify": + return 1; + case "Delete": + case "MDelete": + case "MoveOut": + case "MMoveOut": + return 0; + default: unreachableCase(type); + } +} + +/** + * @param mark - The mark to get the length of. + * @returns The number of nodes within the input context of the mark. + */ +export function getInputLength(mark: T.Mark): number { + if (isSkipMark(mark)) { + return mark; + } + if (isAttachGroup(mark)) { + return 0; + } + const type = mark.type; + switch (type) { + case "Tomb": + case "Gap": + case "Revive": + case "Return": + case "Delete": + case "MoveOut": + return mark.count; + case "MReturn": + case "MRevive": + case "Modify": + case "MDelete": + case "MMoveOut": + return 1; + default: unreachableCase(type); + } +} + +export function isSkipMark(mark: T.Mark): mark is Skip { + return typeof mark === "number"; +} + +/** + * Splits the `mark` into two marks such that the first returned mark has input length `length`. + * @param mark - The mark to split. + * @param length - The desired length for the first of the two returned marks. + * @returns A pair of marks equivalent to the original `mark` + * such that the first returned mark has input length `length`. + */ +export function splitMarkOnInput(mark: TMark, length: number): [TMark, TMark] { + const markLength = getInputLength(mark); + const remainder = markLength - length; + if (length < 1 || remainder < 1) { + fail(`Unable to split mark of length ${markLength} into marks of lengths ${length} and ${remainder}`); + } + if (isSkipMark(mark)) { + return [length, remainder] as [TMark, TMark]; + } + // The compiler seems to think the case below is necessary + // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion + const markObj = mark as T.SizedObjectMark; + const type = markObj.type; + switch (type) { + case "Modify": + case "MDelete": + case "MMoveOut": + case "MReturn": + case "MRevive": + fail(`Unable to split ${type} mark of length 1`); + case "Delete": + case "MoveOut": + case "Return": + case "Revive": + case "Tomb": + case "Gap": + return [{ ...markObj, count: length }, { ...markObj, count: remainder }] as [TMark, TMark]; + default: unreachableCase(type); + } +} + +/** + * Splits the `mark` into two marks such that the first returned mark has output length `length`. + * @param mark - The mark to split. + * @param length - The desired length for the first of the two returned marks. + * @returns A pair of marks equivalent to the original `mark` + * such that the first returned mark has output length `length`. + */ +export function splitMarkOnOutput(mark: TMark, length: number): [TMark, TMark] { + const markLength = getOutputLength(mark); + const remainder = markLength - length; + if (length < 1 || remainder < 1) { + fail(`Unable to split mark of length ${markLength} into marks of lengths ${length} and ${remainder}`); + } + if (isSkipMark(mark)) { + return [length, remainder] as [TMark, TMark]; + } + if (isAttachGroup(mark)) { + return splitAttachGroup(mark, length) as [TMark, TMark]; + } + const markObj = mark as T.SizedObjectMark; + const type = markObj.type; + switch (type) { + case "Modify": + case "MReturn": + case "MRevive": + fail(`Unable to split ${type} mark of length 1`); + case "MDelete": + case "MMoveOut": + case "Delete": + case "MoveOut": + fail(`Unable to split ${type} mark of length 0`); + case "Return": + case "Revive": + case "Tomb": + case "Gap": + return [{ ...markObj, count: length }, { ...markObj, count: remainder }] as [TMark, TMark]; + default: unreachableCase(type); + } +} + +function splitAttachGroup(mark: TAttach[], length: number): [TAttach[], TAttach[]] { + const groupA: TAttach[] = []; + const groupB: TAttach[] = [...mark]; + let groupALength = 0; + while (groupALength < length) { + const attach = groupB.shift(); + if (attach === undefined) { + fail("Discrepancy between getMarkLength and splitMark"); + } + const len = getAttachLength(attach); + if (groupALength + len <= length) { + groupA.push(attach); + groupALength += len; + } else { + const [partA, partB] = splitAttachMark(attach, length - groupALength); + groupA.push(partA); + groupB.unshift(partB); + groupALength = length; + } + } + return [groupA, groupB]; +} + +function splitAttachMark(attach: TAttach, length: number): [TAttach, TAttach] { + const markLength = getAttachLength(attach); + const remainder = markLength - length; + if (length < 1 || markLength <= length) { + fail(`Unable to split mark of length ${markLength} into marks of lengths ${length} and ${remainder}`); + } + const type = attach.type; + switch (type) { + case "Bounce": + case "Intake": + case "MInsert": + case "MMoveIn": + fail("Unable to split mark"); + case "Insert": + return [ + { ...attach, content: attach.content.slice(0, length) }, + { ...attach, content: attach.content.slice(length) }, + ]; + case "MoveIn": + return [ + { ...attach, count: length }, + { ...attach, count: remainder }, + ]; + default: unreachableCase(type); + } +} + +export function isDetachMark(mark: T.Mark | undefined): mark is T.Detach | T.ModifyDetach { + if (isObjMark(mark) && "type" in mark) { + const type = mark.type; + return type === "Delete" || type === "MDelete" || type === "MoveOut" || type === "MMoveOut"; + } + return false; +} + +export function isObjMark(mark: T.Mark | undefined): mark is T.ObjectMark { + return typeof mark === "object"; +} + +/** + * Appends the contents of the `addendum` to the `group`. + * @param group - The attach group to append attach marks to. Is mutated by this function. + * @param addendum - The array of attach marks to append. Is not mutated by this function. + */ +export function extendAttachGroup(group: T.AttachGroup, addendum: T.AttachGroup): void { + const lastLeft = group[group.length - 1]; + const firstRight = addendum[0]; + if (lastLeft !== undefined + && firstRight !== undefined + && lastLeft.type === firstRight.type + && lastLeft.id === firstRight.id) { + const type = lastLeft.type; + switch (type) { + case "Insert": + case "MoveIn": { + const firstRightAttach = firstRight as T.Insert | T.MoveIn; + if (lastLeft.heed === firstRightAttach.heed + && lastLeft.tiebreak === firstRightAttach.tiebreak + && lastLeft.src?.id === firstRightAttach.src?.id + && lastLeft.src?.change === firstRightAttach.src?.change + && lastLeft.scorch?.id === firstRightAttach.scorch?.id + && lastLeft.scorch?.change === firstRightAttach.scorch?.change) { + if (lastLeft.type === "Insert") { + const firstRightInsert = firstRight as T.Insert; + lastLeft.content.push(...firstRightInsert.content); + } else { + const firstRightMoveIn = firstRight as T.MoveIn; + lastLeft.count += firstRightMoveIn.count; + } + group.push(...addendum.slice(1)); + return; + } + break; + } + default: break; + } + } + group.push(...addendum); +} + +/** + * Attempts to extend `lhs` to include the effects of `rhs`. + * @param lhs - The mark to extend. + * @param rhs - The effect so extend `rhs` with. + * @returns `true` iff the function was able to mutate `lhs` to include the effects of `rhs`. + * When `false` is returned, `lhs` is left untouched. + */ +export function tryExtendMark(lhs: T.SizedObjectMark, rhs: Readonly): boolean { + if (rhs.type !== lhs.type) { + return false; + } + const type = rhs.type; + switch (type) { + case "Delete": + case "MoveOut": { + const lhsDetach = lhs as T.Detach; + if ( + rhs.id === lhsDetach.id + && rhs.tomb === lhsDetach.tomb + && isEqualGaps(rhs.gaps, lhsDetach.gaps) + ) { + lhsDetach.count += rhs.count; + return true; + } + break; + } + case "Revive": + case "Return": { + const lhsReattach = lhs as T.Reattach; + if ( + rhs.id === lhsReattach.id + && rhs.tomb === lhsReattach.tomb + ) { + lhsReattach.count += rhs.count; + return true; + } + break; + } + case "Gap": { + const lhsGap = lhs as T.GapEffectSegment; + if ( + rhs.tomb === lhsGap.tomb + && isEqualGaps(rhs.stack, lhsGap.stack) + ) { + lhsGap.count += rhs.count; + return true; + } + break; + } + case "Tomb": { + const lhsTomb = lhs as T.Tomb; + if (rhs.change === lhsTomb.change) { + lhsTomb.count += rhs.count; + return true; + } + break; + } + default: break; + } + return false; +} diff --git a/packages/dds/tree/src/edit-manager/editManager.ts b/packages/dds/tree/src/edit-manager/editManager.ts index 5bf6519f8302..4df7402cb33b 100644 --- a/packages/dds/tree/src/edit-manager/editManager.ts +++ b/packages/dds/tree/src/edit-manager/editManager.ts @@ -6,7 +6,7 @@ import { assert } from "@fluidframework/common-utils"; import { ChangeFamily } from "../change-family"; import { AnchorSet, Delta } from "../tree"; -import { brand, Brand, fail, RecursiveReadonly } from "../util"; +import { Brand, fail, RecursiveReadonly } from "../util"; export interface Commit { sessionId: SessionId; @@ -26,8 +26,18 @@ export type SessionId = string; // TODO: Try to reduce this to a single type parameter // TODO: Move logic into Rebaser if possible export class EditManager> { + // The trunk represents the list of received sequenced changes. + // The change in each commit is rebased onto the previous change in the list. private readonly trunk: Commit[] = []; + /** + * Branches are maintained to represent the local change list that the issuing client would have had + * at the time of submitting the last edit on the branch. + * This means the last change on a branch is always in its original (non-rebased) form. + */ private readonly branches: Map> = new Map(); + // This is the ordered list of changes made by this client which have not yet been confirmed as sequenced changes. + // The first change in this list is based on the last change in the trunk. + // Every other change in this list is based on the change preceding it. private localChanges: TChangeset[] = []; private localSessionId: SessionId | undefined; @@ -56,11 +66,11 @@ export class EditManager): Delta.Root { if (this.trunk.length > 0) { const lastSeqNumber = this.trunk[this.trunk.length - 1].seqNumber; - const nextSeqNumber: SeqNumber = brand(lastSeqNumber as number + 1); - assert(newCommit.seqNumber === nextSeqNumber, - 0x34a /* Expected incoming commit to be next sequenced commit */); + assert( + newCommit.seqNumber > lastSeqNumber, + "Incoming remote op sequence# <= local collabWindow's currentSequence#", + ); } - if (newCommit.sessionId === this.localSessionId) { // `newCommit` should correspond to the oldest change in `localChanges`, so we move it into trunk. // `localChanges` are already rebased to the trunk, so we can use the stored change instead of rebasing the @@ -74,7 +84,7 @@ export class EditManager, newRef: SeqNumber) { - const trunkChanges = this.getCommitsBetween(branch.refSeq, newRef); + /** + * Updates the `branch` to reflect the local changes that the session owner would have had after + * they learned of the commit with sequence number `newRef` being sequenced. + * This is accomplished by rebasing the branch's changes over any new trunk changes up to and including `newRef`. + * Changes with sequence number less than or equal to `newRef` are removed from the branch, + * since they are now part of the trunk this branch is based on. + * @param branch - The branch to update. + * @param newRef - The point in the trunk to rebase the branch up to. + */ + private updateBranch(branch: Branch, newRef: SeqNumber) { + const trunkChanges = this.getCommitsAfterAndUpToInclusive(branch.refSeq, newRef); + if (trunkChanges.length === 0) { + assert(branch.refSeq === newRef, "Expected trunk changes"); + // This early return avoids rebasing the branch changes over an empty sandwich. + return; + } const newBranchChanges: Commit[] = []; const inverses: TChangeset[] = []; @@ -172,14 +196,37 @@ export class EditManager[] { - const firstIndex = this.getCommitIndex(first); - const lastIndex = last === undefined ? undefined : this.getCommitIndex(last); - return this.trunk.slice(firstIndex, lastIndex); + /** + * @param pred - The sequence number of the commit immediately before the commits of interest. + * @param last - The sequence number of the last commit of interest. + * @returns The trunk commits with sequence numbers greater than `pred` and smaller or equal to `last`, + * ordered in sequencing order. + */ + private getCommitsAfterAndUpToInclusive(pred: SeqNumber, last: SeqNumber): Commit[] { + // This check is not just a fast-path for the common case where no concurrent edits occurred; + // it also serves to handle the case where `last` represents the initial state before any commits. + if (pred === last) { + return []; + } + // If there is no corresponding commit, we assume `pred` refers to initial state of the DDS. + const firstIndex = (this.getCommitIndex(pred) ?? -1) + 1; + const lastIndex = this.getCommitIndex(last) ?? fail("Unknown sequence number"); + return this.trunk.slice(firstIndex, lastIndex + 1); + } + + /** + * @param pred - The sequence number of the commit immediately before the commits of interest. + * @returns The trunk commits with sequence numbers greater than `pred` + */ + private getCommitsAfter(pred: SeqNumber): Commit[] { + // If there is no corresponding commit, we assume `pred` refers to initial state of the DDS. + const firstIndex = (this.getCommitIndex(pred) ?? -1) + 1; + return this.trunk.slice(firstIndex); } - private getCommitIndex(seqNumber: SeqNumber): number { - return this.trunk.findIndex((commit) => commit.seqNumber === seqNumber); + private getCommitIndex(seqNumber: SeqNumber): number | undefined { + const index = this.trunk.findIndex((commit) => commit.seqNumber === seqNumber); + return index === -1 ? undefined : index; } private getOrCreateBranch(sessionId: SessionId, refSeq: SeqNumber): Branch { diff --git a/packages/dds/tree/src/feature-libraries/sequence-change-family/compose.ts b/packages/dds/tree/src/feature-libraries/sequence-change-family/compose.ts new file mode 100644 index 000000000000..2fbddf0d977a --- /dev/null +++ b/packages/dds/tree/src/feature-libraries/sequence-change-family/compose.ts @@ -0,0 +1,243 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +import { + getInputLength, + getOutputLength, + isAttachGroup, + isDetachMark, + isGapEffectMark, + isReattach, + isSkipMark, + isTomb, + MarkListFactory, + splitMarkOnInput, + splitMarkOnOutput, + Transposed as T, +} from "../../changeset"; +import { clone, fail, StackyIterator } from "../../util"; +import { SequenceChangeset } from "./sequenceChangeset"; + +/** + * Composes a sequence of changesets into a single changeset. + * @param changes - The changesets to be applied. + * Parts of the input may be reused in the output, but the input is not mutated. + * Each changeset in the list is assumed to be applicable after the previous one. + * @returns A changeset that is equivalent to applying each of the given `changes` in order. + * + * WARNING! This implementation is incomplete: + * - Tombstone information is ignored. + * - Support for moves is not implemented. + * - Support for slices is not implemented. + */ +export function compose(changes: SequenceChangeset[]): SequenceChangeset { + if (changes.length === 1) { + return changes[0]; + } + let composedFieldMarks: T.FieldMarks = {}; + for (const change of changes) { + composedFieldMarks = composeFieldMarks(composedFieldMarks, change.marks); + } + return { + marks: composedFieldMarks, + }; +} + +function composeFieldMarks(baseFieldMarks: T.FieldMarks, newFieldMarks: T.FieldMarks): T.FieldMarks { + const composed: T.FieldMarks = {}; + for (const key of Object.keys(newFieldMarks)) { + const composedMarkList = composeMarkLists(baseFieldMarks[key] ?? [], newFieldMarks[key]); + if (composedMarkList.length > 0) { + composed[key] = composedMarkList; + } + } + for (const key of Object.keys(baseFieldMarks)) { + if (!(key in newFieldMarks)) { + composed[key] = baseFieldMarks[key]; + } + } + return composed; +} + +function composeMarkLists( + baseMarkList: T.MarkList, + newMarkList: T.MarkList, +): T.MarkList { + const factory = new MarkListFactory(); + const baseIter = new StackyIterator(baseMarkList); + const newIter = new StackyIterator(newMarkList); + for (let newMark of newIter) { + let baseMark: T.Mark | undefined = baseIter.pop(); + if (baseMark === undefined) { + // We have reached a region of the field that the base change does not affect. + // We therefore adopt the new mark as is. + factory.push(clone(newMark)); + } else if (isAttachGroup(newMark)) { + // Content that is being attached by the new changeset cannot interact with base changes. + // Note that attach marks from different changesets can only target the same gap if they are concurrent. + // This method assumes that `newMarkList` is based on `baseMarkList`, so they are not concurrent. + factory.pushContent(clone(newMark)); + baseIter.push(baseMark); + } else if (isReattach(newMark)) { + // Content that is being re-attached by the new changeset can interact with base changes. + // This can happen in two cases: + // - The base change contains the detach that the re-attach is the inverse of. + // - The base change contains a tombstone for the detach that the re-attach is the inverse of. + // We're ignoring these cases for now. The impact of ignoring them is that the relative order of + // reattached content and concurrently attached content is not preserved. + // TODO: properly compose reattach marks with their matching base marks if any. + factory.pushContent(clone(newMark)); + baseIter.push(baseMark); + } else if (isDetachMark(baseMark)) { + // Content that is being detached by the base changeset can interact with the new changes. + // This can happen in two cases: + // - The new change contains reattach marks for this detach. (see above) + // - The new change contains tombs for this detach. + // We're ignoring these cases for now. The impact of ignoring them is that the relative order of + // reattached content and concurrently attached content is not preserved. + // TODO: properly compose detach marks with their matching new marks if any. + factory.pushContent(baseMark); + newIter.push(newMark); + } else if (isTomb(baseMark) || isGapEffectMark(baseMark) || isTomb(newMark) || isGapEffectMark(newMark)) { + // We don't currently support Tomb and Gap marks (and don't offer ways to generate them). + fail("TODO: support Tomb and Gap marks"); + } else { + // If we've reached this branch then `baseMark` and `newMark` start at the same location + // in the document field at the revision after the base changes and before the new changes. + // Despite that, it's not necessarily true that they affect the same range in that document + // field because they may be of different lengths. + // We perform any necessary splitting in order to end up with a pair of marks that do have the same length. + const newMarkLength = getInputLength(newMark); + const baseMarkLength = getOutputLength(baseMark); + if (newMarkLength < baseMarkLength) { + let nextBaseMark; + [baseMark, nextBaseMark] = splitMarkOnOutput(baseMark, newMarkLength); + baseIter.push(nextBaseMark); + } else if (newMarkLength > baseMarkLength) { + let nextNewMark; + [newMark, nextNewMark] = splitMarkOnInput(newMark, baseMarkLength); + newIter.push(nextNewMark); + } + // Past this point, we are guaranteed that `newMark` and `baseMark` have the same length and + // start at the same location in the revision after the base changes. + // They therefore refer to the same range for that revision. + const composedMark = composeMarks(baseMark, newMark); + factory.push(composedMark); + } + } + // Push the remaining base marks if any + for (const baseMark of baseIter) { + factory.push(baseMark); + } + return factory.list; +} + +/** + * Composes two marks where `newMark` is based on the state produced by `baseMark`. + * @param newMark - The mark to compose with `baseMark`. + * Its input range should be the same as `baseMark`'s output range. + * @param baseMark - The mark to compose with `newMark`. + * Its output range should be the same as `newMark`'s input range. + * @returns A mark that is equivalent to applying both `baseMark` and `newMark` successively. + */ +function composeMarks(baseMark: T.Mark, newMark: T.SizedMark): T.Mark { + if (isSkipMark(baseMark)) { + return clone(newMark); + } + if (isSkipMark(newMark)) { + return baseMark; + } + if (isAttachGroup(baseMark)) { + return composeWithAttachGroup(baseMark, newMark); + } + const baseType = baseMark.type; + const newType = newMark.type; + if (newType === "MDelete" || baseType === "MDelete") { + // This should not occur yet because we discard all modifications to deleted subtrees + // In the long run we want to preserve them. + fail("TODO: support modifications to deleted subtree"); + } + switch (baseType) { + case "Modify": { + switch (newType) { + case "Modify": { + updateModifyLike(newMark, baseMark); + if (baseMark.fields === undefined && baseMark.value === undefined) { + return 1; + } + return baseMark; + } + case "Delete": { + // For now the deletion obliterates all other modifications. + // In the long run we want to preserve them. + return clone(newMark); + } + default: fail("Not implemented"); + } + } + case "Revive": { + switch (newType) { + case "Modify": { + const modRevive: T.ModifyReattach = { + type: "MRevive", + id: baseMark.id, + tomb: baseMark.tomb, + }; + updateModifyLike(newMark, modRevive); + return modRevive; + } + case "Delete": { + // The deletion undoes the revival + return 0; + } + default: fail("Not implemented"); + } + } + default: fail("Not implemented"); + } +} + +function composeWithAttachGroup(baseMark: T.AttachGroup, newMark: T.SizedObjectMark): T.Mark { + const newType = newMark.type; + switch (newType) { + case "Modify": { + const attach = baseMark[0]; + const baseType = attach.type; + switch (baseType) { + case "Insert": + return [{ + ...newMark, + type: "MInsert", + id: attach.id, + content: attach.content[0], + }]; + case "MInsert": { + updateModifyLike(newMark, attach); + return [attach]; + } + default: fail("Not implemented"); + } + } + case "Delete": { + // The insertion of the previous change is subsequently deleted. + // TODO: preserve the insertion as muted + return 0; + } + default: fail("Not implemented"); + } +} + +function updateModifyLike(curr: T.Modify, base: T.ModifyInsert | T.Modify | T.ModifyReattach) { + if (curr.fields !== undefined) { + base.fields = composeFieldMarks(base.fields ?? {}, curr.fields); + if (Object.keys(base.fields).length === 0) { + delete base.fields; + } + } + if (curr.value !== undefined) { + // Later values override earlier ones + base.value = clone(curr.value); + } +} diff --git a/packages/dds/tree/src/feature-libraries/sequence-change-family/index.ts b/packages/dds/tree/src/feature-libraries/sequence-change-family/index.ts index ecefe9056c65..6f86538fa8d5 100644 --- a/packages/dds/tree/src/feature-libraries/sequence-change-family/index.ts +++ b/packages/dds/tree/src/feature-libraries/sequence-change-family/index.ts @@ -5,5 +5,6 @@ export * from "./sequenceChangeFamily"; export * from "./sequenceChangeRebaser"; -export * from "./sequenceEditBuilder"; export * from "./sequenceChangeset"; +export * from "./sequenceEditBuilder"; +export { DUMMY_INVERSE_VALUE, DUMMY_INVERT_TAG } from "./invert"; diff --git a/packages/dds/tree/src/feature-libraries/sequence-change-family/invert.ts b/packages/dds/tree/src/feature-libraries/sequence-change-family/invert.ts new file mode 100644 index 000000000000..c648c51b6e1a --- /dev/null +++ b/packages/dds/tree/src/feature-libraries/sequence-change-family/invert.ts @@ -0,0 +1,118 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +import { ChangesetTag, isAttachGroup, isSkipMark, OpId, Transposed as T } from "../../changeset"; +import { fail } from "../../util"; +import { SequenceChangeset } from "./sequenceChangeset"; + +/** + * Dummy value used in place of the actual tag. + * TODO: give `invert` access real tag data. + */ + export const DUMMY_INVERT_TAG: ChangesetTag = "Dummy Invert Changeset Tag"; + +/** + * Dummy value used in place of actual repair data. + * TODO: give `invert` access real repair data. + */ +export const DUMMY_INVERSE_VALUE = "Dummy inverse value"; + +/** + * Inverts a given changeset. + * @param change - The changeset to produce the inverse of. + * @returns The inverse of the given `change` such that the inverse can be applied after `change`. + * + * WARNING! This implementation is incomplete: + * - It is unable to produce adequate inverses for set-value and delete operations. + * This is because changesets are not given IDs. + * - Support for moves is not implemented. + * - Support for slices is not implemented. + */ +export function invert(change: SequenceChangeset): SequenceChangeset { + // TODO: support the input change being a squash + const opIdToTag = (id: OpId): ChangesetTag => { + return DUMMY_INVERT_TAG; + }; + return { + marks: invertFieldMarks(change.marks, opIdToTag), + }; +} + +type IdToTagLookup = (id: OpId) => ChangesetTag; + +function invertFieldMarks(fieldMarks: T.FieldMarks, opIdToTag: IdToTagLookup): T.FieldMarks { + const inverseFieldMarks: T.FieldMarks = {}; + for (const key of Object.keys(fieldMarks)) { + const markList = fieldMarks[key]; + inverseFieldMarks[key] = invertMarkList(markList, opIdToTag); + } + return inverseFieldMarks; +} + +function invertMarkList(markList: T.MarkList, opIdToTag: IdToTagLookup): T.MarkList { + const inverseMarkList: T.MarkList = []; + for (const mark of markList) { + const inverseMarks = invertMark(mark, opIdToTag); + inverseMarkList.push(...inverseMarks); + } + return inverseMarkList; +} + +function invertMark(mark: T.Mark, opIdToTag: IdToTagLookup): T.Mark[] { + if (isSkipMark(mark)) { + return [mark]; + } else if (isAttachGroup(mark)) { + const inverseMarks: T.Mark[] = []; + for (const attach of mark) { + switch (attach.type) { + case "Insert": + case "MInsert": { + inverseMarks.push({ + type: "Delete", + id: attach.id, + count: attach.type === "Insert" ? attach.content.length : 1, + }); + break; + } + default: fail("Not implemented"); + } + } + return inverseMarks; + } else { + switch (mark.type) { + case "Delete": { + return [{ + type: "Revive", + id: mark.id, + tomb: opIdToTag(mark.id), + count: mark.count, + }]; + } + case "Revive": { + return [{ + type: "Delete", + id: mark.id, + count: mark.count, + }]; + } + case "Modify": { + const modify: T.Modify = { + type: "Modify", + }; + if (mark.value !== undefined) { + modify.value = { + id: mark.value.id, + value: DUMMY_INVERSE_VALUE, + }; + } + if (mark.fields !== undefined) { + modify.fields = invertFieldMarks(mark.fields, opIdToTag); + } + return [modify]; + } + default: fail("Not implemented"); + } + } +} diff --git a/packages/dds/tree/src/feature-libraries/sequence-change-family/rebase.ts b/packages/dds/tree/src/feature-libraries/sequence-change-family/rebase.ts new file mode 100644 index 000000000000..280b1507669c --- /dev/null +++ b/packages/dds/tree/src/feature-libraries/sequence-change-family/rebase.ts @@ -0,0 +1,140 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +import { + getInputLength, + getOutputLength, + isAttachGroup, + isReattach, + isSkipMark, + MarkListFactory, + splitMarkOnInput, + Transposed as T, +} from "../../changeset"; +import { clone, fail, StackyIterator } from "../../util"; +import { SequenceChangeset } from "./sequenceChangeset"; + +/** + * Rebases `change` over `base` assuming they both apply to the same initial state. + * @param change - The changeset to rebase. + * @param base - The changeset to rebase over. + * @returns A changeset that performs the changes in `change` but does so assuming `base` has been applied first. + * + * WARNING! This implementation is incomplete: + * - Marks that affect existing content are removed instead of muted when rebased over the deletion of that content. + * This prevents us from then reinstating the mark when rebasing over the revive. + * - Tombs are not added when rebasing an insert over a gap that is immediately left of deleted content. + * This prevents us from being able to accurately track the position of the insert. + * - Tiebreak ordering is not respected. + * - Support for moves is not implemented. + * - Support for slices is not implemented. + */ +export function rebase(change: SequenceChangeset, base: SequenceChangeset): SequenceChangeset { + const fields = rebaseFieldMarks(change.marks, base.marks); + return { + marks: fields, + }; +} + +function rebaseFieldMarks(change: T.FieldMarks, base: T.FieldMarks): T.FieldMarks { + const fields: T.FieldMarks = {}; + for (const key of Object.keys(change)) { + if (key in base) { + fields[key] = rebaseMarkList(change[key], base[key]); + } else { + fields[key] = clone(change[key]); + } + } + return fields; +} + +function rebaseMarkList(currMarkList: T.MarkList, baseMarkList: T.MarkList): T.MarkList { + const factory = new MarkListFactory(); + const baseIter = new StackyIterator(baseMarkList); + const currIter = new StackyIterator(currMarkList); + for (let baseMark of baseIter) { + let currMark: T.Mark | undefined = currIter.pop(); + if (currMark === undefined) { + break; + } + + if (isAttachGroup(currMark)) { + // We currently ignore the ways in which base marks could affect attaches. + // These are: + // 1. Slices with which the attach would commute. + // 2. Attaches that target the same gap. + // We ignore #1 because slices are not yet supported. + // We ignore #2 because we do not yet support specifying the tiebreak. + factory.pushContent(clone(currMark)); + baseIter.push(baseMark); + } else if (isReattach(currMark)) { + // We currently ignore the ways in which base marks could affect re-attaches. + // These are: + // 1. A reattach that targets the same tombs. + // 2. Attaches that target the same gap. + // We ignore #1 because it could only occur if undo were supported. + // We ignore #2 because we do not yet support specifying the tiebreak. + factory.pushContent(clone(currMark)); + baseIter.push(baseMark); + } else if (isReattach(baseMark)) { + // We currently ignore the ways in which curr marks overlap with this re-attach. + // These are: + // 1. A reattach that matches this re-attach. + // 2. A tomb that matches this re-attach. + // We ignore #1 because it could only occur if undo were supported. + // We ignore #2 because we do not yet produce tombs. + factory.pushOffset(getOutputLength(baseMark)); + currIter.push(currMark); + } else if (isAttachGroup(baseMark)) { + // We currently ignore the ways in which curr marks overlap with these attaches. + // These are: + // 1. Slice ranges that include prior insertions + // We ignore #1 because we do not yet support slices. + factory.pushOffset(getOutputLength(baseMark)); + currIter.push(currMark); + } else { + // If we've reached this branch then `baseMark` and `currMark` start at the same location + // in the document field at the revision to which both changesets apply. + // Despite that, it's not necessarily true that they affect the same range in that document + // field because they may be of different lengths. + // We perform any necessary splitting in order to end up with a pair of marks that do have the same length. + const currMarkLength = getInputLength(currMark); + const baseMarkLength = getInputLength(baseMark); + if (currMarkLength < baseMarkLength) { + let nextBaseMark; + [baseMark, nextBaseMark] = splitMarkOnInput(baseMark, currMarkLength); + baseIter.push(nextBaseMark); + } else if (currMarkLength > baseMarkLength) { + let nextCurrMark; + [currMark, nextCurrMark] = splitMarkOnInput(currMark, baseMarkLength); + currIter.push(nextCurrMark); + } + // Past this point, we are guaranteed that `baseMark` and `currMark` have the same length and + // start at the same location at the revision to which both changesets apply. + // They therefore refer to the same range for that revision. + const rebasedMark = rebaseMark(currMark, baseMark); + factory.push(rebasedMark); + } + } + for (const currMark of currIter) { + factory.push(currMark); + } + return factory.list; +} + +function rebaseMark(currMark: T.SizedMark, baseMark: T.SizedMark): T.SizedMark { + if (isSkipMark(baseMark)) { + return clone(currMark); + } + const baseType = baseMark.type; + switch (baseType) { + case "Delete": + case "MDelete": + return 0; + case "Modify": + return clone(currMark); + default: fail("Not implemented"); + } +} diff --git a/packages/dds/tree/src/feature-libraries/sequence-change-family/sequenceChangeRebaser.ts b/packages/dds/tree/src/feature-libraries/sequence-change-family/sequenceChangeRebaser.ts index 806fd96dff28..c904148bfd4f 100644 --- a/packages/dds/tree/src/feature-libraries/sequence-change-family/sequenceChangeRebaser.ts +++ b/packages/dds/tree/src/feature-libraries/sequence-change-family/sequenceChangeRebaser.ts @@ -7,25 +7,12 @@ import { toDelta } from "../../changeset"; import { ChangeRebaser } from "../../rebase"; import { AnchorSet } from "../../tree"; import { SequenceChangeset } from "./sequenceChangeset"; +import { compose } from "./compose"; +import { invert } from "./invert"; +import { rebase } from "./rebase"; export type SequenceChangeRebaser = ChangeRebaser; -function compose(changes: SequenceChangeset[]): SequenceChangeset { - if (changes.length === 1) { - return changes[0]; - } - - throw Error("Not implemented"); // TODO -} - -function invert(changes: SequenceChangeset): SequenceChangeset { - throw Error("Not implemented"); // TODO - } - -function rebase(change: SequenceChangeset, over: SequenceChangeset): SequenceChangeset { - throw Error("Not implemented"); // TODO -} - function rebaseAnchors(anchors: AnchorSet, over: SequenceChangeset): void { anchors.applyDelta(toDelta(over)); } diff --git a/packages/dds/tree/src/feature-libraries/sequence-change-family/sequenceEditBuilder.ts b/packages/dds/tree/src/feature-libraries/sequence-change-family/sequenceEditBuilder.ts index feaa2b6e9703..593a002fd407 100644 --- a/packages/dds/tree/src/feature-libraries/sequence-change-family/sequenceEditBuilder.ts +++ b/packages/dds/tree/src/feature-libraries/sequence-change-family/sequenceEditBuilder.ts @@ -23,7 +23,7 @@ export class SequenceEditBuilder extends ProgressiveEditBuilder implements Checkout { diff --git a/packages/dds/tree/src/test/changeset/samples.ts b/packages/dds/tree/src/test/changeset/samples.ts index da461bdaf80e..041f21f0f1a5 100644 --- a/packages/dds/tree/src/test/changeset/samples.ts +++ b/packages/dds/tree/src/test/changeset/samples.ts @@ -4,7 +4,7 @@ */ import { jsonArray, jsonNumber, jsonObject, jsonString } from "../../domains"; -import { Effects, Transposed as T, Sequenced as S } from "../../changeset"; +import { Effects, Transposed as T } from "../../changeset"; /** * Demonstrates how to represent a change that inserts a root tree. @@ -149,9 +149,8 @@ export namespace SwapParentChild { * Expected outcome: foo=[A] bar=[X D] */ export namespace ScenarioA { - export const e1: S.Transaction = { + export const e1: T.Transaction = { ref: 0, - seq: 1, marks: [{ type: "Modify", fields: { @@ -167,9 +166,8 @@ export namespace ScenarioA { }], }; - export const e2: S.Transaction = { + export const e2: T.Transaction = { ref: 0, - seq: 2, moves: [{ id: 0, src: { 0: { foo: 1 } }, dst: { 0: { bar: 0 } } }], marks: [{ type: "Modify", @@ -194,9 +192,8 @@ export namespace ScenarioA { }], }; - export const e3: S.Transaction = { + export const e3: T.Transaction = { ref: 0, - seq: 3, marks: [{ type: "Modify", fields: { @@ -208,8 +205,7 @@ export namespace ScenarioA { }], }; - export const e2_r_e1: S.Transaction = { - seq: 2, + export const e2_r_e1: T.Transaction = { ref: 0, newRef: 1, moves: [ @@ -252,8 +248,7 @@ export namespace ScenarioA { }], }; - export const e3_r_e1: S.Transaction = { - seq: 3, + export const e3_r_e1: T.Transaction = { ref: 0, newRef: 1, marks: [{ @@ -261,16 +256,15 @@ export namespace ScenarioA { fields: { foo: [ 1, - { type: "Tomb", seq: 1, count: 1 }, // B + { type: "Tomb", change: 1, count: 1 }, // B [{ type: "Insert", id: 0, content: [nodeX], heed: Effects.All }], - { type: "Tomb", seq: 1, count: 1 }, // C + { type: "Tomb", change: 1, count: 1 }, // C ], }, }], }; - export const e3_r_e2: S.Transaction = { - seq: 3, + export const e3_r_e2: T.Transaction = { ref: 0, newRef: 2, moves: [{ id: 0, src: { 0: { foo: 1 } }, dst: { 0: { bar: 0 } } }], @@ -279,9 +273,9 @@ export namespace ScenarioA { fields: { foo: [ 1, - { type: "Tomb", seq: 1, count: 1 }, // B + { type: "Tomb", change: 1, count: 1 }, // B [{ type: "Bounce", id: 0, heed: Effects.All }], - { type: "Tomb", seq: 1, count: 1 }, // C + { type: "Tomb", change: 1, count: 1 }, // C ], bar: [ [ @@ -290,10 +284,10 @@ export namespace ScenarioA { id: 0, content: [nodeX], heed: Effects.All, - src: { seq: 2, id: 0 }, + src: { change: 2, id: 0 }, }, - { type: "Intake", seq: 2, id: 1 }, - { type: "Intake", seq: 2, id: 2 }, + { type: "Intake", change: 2, id: 1 }, + { type: "Intake", change: 2, id: 2 }, ], ], }, @@ -314,9 +308,8 @@ export namespace ScenarioA { // * Expected outcome: foo=[W X Y Z] // */ // export namespace ScenarioB { -// export const e1: S.Transaction = { +// export const e1: T.Transaction = { // ref: 0, -// seq: 1, // marks: { // modify: [{ // foo: { @@ -328,9 +321,8 @@ export namespace ScenarioA { // }, // }; -// export const e2: S.Transaction = { +// export const e2: T.Transaction = { // ref: 0, -// seq: 2, // marks: { // modify: [{ // foo: { @@ -345,9 +337,8 @@ export namespace ScenarioA { // }, // }; -// export const e3: S.Transaction = { +// export const e3: T.Transaction = { // ref: 0, -// seq: 3, // marks: { // modify: [{ // foo: { @@ -362,14 +353,13 @@ export namespace ScenarioA { // }, // }; -// export const e2_r_e1: S.Transaction = { +// export const e2_r_e1: T.Transaction = { // ref: 0, -// seq: 2, // newRef: 1, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 5, seq: 1 }], +// tombs: [{ count: 5, change: 1 }], // attach: [ // 1, // [{ type: "Insert", id: 0, content: [{ id: "W" }] }], @@ -381,14 +371,13 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e1: S.Transaction = { +// export const e3_r_e1: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 2, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 5, seq: 1 }], +// tombs: [{ count: 5, change: 1 }], // attach: [ // 2, // [{ type: "Insert", id: 0, content: [{ id: "X" }] }], @@ -400,19 +389,18 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e2: S.Transaction = { +// export const e3_r_e2: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 2, // marks: { // modify: [{ // foo: { // tombs: [ -// { count: 1, seq: 1 }, +// { count: 1, change: 1 }, // 1, // W -// { count: 2, seq: 1 }, +// { count: 2, change: 1 }, // 1, // Y -// { count: 1, seq: 1 }, +// { count: 1, change: 1 }, // ], // attach: [ // 3, // [-A-W-B @@ -442,9 +430,8 @@ export namespace ScenarioA { // * User 3's edit should be muted. // */ // export namespace ScenarioC { -// export const e1: S.Transaction = { +// export const e1: T.Transaction = { // ref: 0, -// seq: 1, // marks: { // modify: [{ // foo: { @@ -456,13 +443,12 @@ export namespace ScenarioA { // }, // }; -// export const e2: S.Transaction = { +// export const e2: T.Transaction = { // ref: 0, -// seq: 1, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 1, seq: 1 }], +// tombs: [{ count: 1, change: 1 }], // nodes: [ // { type: "Revive", id: 0, count: 1 }, // ], @@ -471,9 +457,8 @@ export namespace ScenarioA { // }, // }; -// export const e3: S.Transaction = { +// export const e3: T.Transaction = { // ref: 0, -// seq: 3, // marks: { // modify: [{ // foo: { @@ -485,14 +470,13 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e1: S.Transaction = { +// export const e3_r_e1: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 1, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 1, seq: 1 }], +// tombs: [{ count: 1, change: 1 }], // nodes: [ // { type: "Delete", id: 0, count: 1 }, // ], @@ -501,9 +485,8 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e2: S.Transaction = { +// export const e3_r_e2: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 2, // marks: { // modify: [{ @@ -516,9 +499,8 @@ export namespace ScenarioA { // }, // }; -// export const e4: S.Transaction = { +// export const e4: T.Transaction = { // ref: 0, -// seq: 4, // marks: { // modify: [{ // foo: { @@ -530,14 +512,13 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e1: S.Transaction = { +// export const e4_r_e1: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 1, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 1, seq: 1 }], +// tombs: [{ count: 1, change: 1 }], // nodes: [ // { type: "Delete", id: 0, count: 1 }, // ], @@ -546,9 +527,8 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e2: S.Transaction = { +// export const e4_r_e2: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 2, // marks: { // modify: [{ @@ -561,14 +541,13 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e3: S.Transaction = { +// export const e4_r_e3: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 3, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 1, seq: 1 }], +// tombs: [{ count: 1, change: 1 }], // nodes: [ // { type: "Delete", id: 0, count: 1 }, // ], @@ -589,9 +568,8 @@ export namespace ScenarioA { // * User 2's edit should be muted. // */ // export namespace ScenarioD { -// export const e1: S.Transaction = { +// export const e1: T.Transaction = { // ref: 0, -// seq: 1, // marks: { // modify: [{ // foo: { @@ -606,9 +584,8 @@ export namespace ScenarioA { // }, // }; -// export const e2: S.Transaction = { +// export const e2: T.Transaction = { // ref: 0, -// seq: 2, // marks: { // modify: [{ // foo: { @@ -621,17 +598,16 @@ export namespace ScenarioA { // }, // }; -// export const e2_r_e1: S.Transaction = { +// export const e2_r_e1: T.Transaction = { // ref: 0, -// seq: 2, // newRef: 1, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 1 }], +// tombs: [{ count: 2, change: 1 }], // attach: [ // 1, -// [{ type: "Insert", id: 0, content: [{ id: "X" }], scorch: { seq: 1, id: 0 } }], +// [{ type: "Insert", id: 0, content: [{ id: "X" }], scorch: { change: 1, id: 0 } }], // ], // }, // }], @@ -655,8 +631,7 @@ export namespace ScenarioA { // * X is deleted (as opposed to inserted in trait bar). // */ // export namespace ScenarioE { -// export const e1: S.Transaction = { -// seq: 1, +// export const e1: T.Transaction = { // ref: 0, // moves: [{ id: 0, src: { foo: 1 }, dst: { bar: 0 } }], // marks: { @@ -676,8 +651,7 @@ export namespace ScenarioA { // }, // }; -// export const e2: S.Transaction = { -// seq: 2, +// export const e2: T.Transaction = { // ref: 0, // marks: { // modify: [{ @@ -694,8 +668,7 @@ export namespace ScenarioA { // }, // }; -// export const e2_r_e1: S.Transaction = { -// seq: 2, +// export const e2_r_e1: T.Transaction = { // ref: 0, // newRef: 1, // marks: { @@ -723,9 +696,8 @@ export namespace ScenarioA { // * Expected outcome: [r A x y z B] // */ // export namespace ScenarioF { -// export const e1: S.Transaction = { +// export const e1: T.Transaction = { // ref: 0, -// seq: 1, // marks: { // modify: [{ // foo: { @@ -737,9 +709,8 @@ export namespace ScenarioA { // }, // }; -// export const e2: S.Transaction = { +// export const e2: T.Transaction = { // ref: 0, -// seq: 2, // marks: { // modify: [{ // foo: { @@ -752,9 +723,8 @@ export namespace ScenarioA { // }, // }; -// export const e3: S.Transaction = { +// export const e3: T.Transaction = { // ref: 0, -// seq: 3, // marks: { // modify: [{ // foo: { @@ -767,8 +737,7 @@ export namespace ScenarioA { // }, // }; -// export const e2_r_e1: S.Transaction = { -// seq: 2, +// export const e2_r_e1: T.Transaction = { // ref: 0, // newRef: 1, // marks: { @@ -795,8 +764,7 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e2: S.Transaction = { -// seq: 3, +// export const e3_r_e2: T.Transaction = { // ref: 0, // newRef: 2, // marks: { @@ -828,9 +796,8 @@ export namespace ScenarioA { // * Expected outcome: foo=[M N O] bar=[A X Y B] // */ // export namespace ScenarioG { -// export const e1: S.Transaction = { +// export const e1: T.Transaction = { // ref: 0, -// seq: 1, // moves: [{ id: 0, src: { foo: 0 }, dst: { bar: 0 } }], // marks: { // modify: [{ @@ -852,9 +819,8 @@ export namespace ScenarioA { // }, // }; -// export const e2: S.Transaction = { +// export const e2: T.Transaction = { // ref: 0, -// seq: 2, // marks: { // modify: [{ // foo: { @@ -872,9 +838,8 @@ export namespace ScenarioA { // }, // }; -// export const e3: S.Transaction = { +// export const e3: T.Transaction = { // ref: 0, -// seq: 3, // marks: { // modify: [{ // foo: { @@ -887,9 +852,8 @@ export namespace ScenarioA { // }, // }; -// export const e4: S.Transaction = { +// export const e4: T.Transaction = { // ref: 0, -// seq: 4, // marks: { // modify: [{ // foo: { @@ -902,9 +866,8 @@ export namespace ScenarioA { // }, // }; -// export const e5: S.Transaction = { +// export const e5: T.Transaction = { // ref: 0, -// seq: 5, // marks: { // modify: [{ // foo: { @@ -917,14 +880,13 @@ export namespace ScenarioA { // }, // }; -// export const e2_r_e1: S.Transaction = { -// seq: 2, +// export const e2_r_e1: T.Transaction = { // ref: 0, // newRef: 1, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 1 }], +// tombs: [{ count: 2, change: 1 }], // attach: [ // 1, // [{ type: "Bounce", id: 0, heed: Effects.Move }], @@ -937,7 +899,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "X" }, { id: "Y" }], -// src: { seq: 1, id: 0 }, +// src: { change: 1, id: 0 }, // heed: Effects.Move, // }], // ], @@ -968,17 +930,16 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e2: S.Transaction = { -// seq: 3, +// export const e3_r_e2: T.Transaction = { // ref: 0, // newRef: 2, // marks: { // modify: [{ // foo: { // tombs: [ -// { count: 1, seq: 1 }, // A -// { count: 2, seq: [1, 2] }, // X Y -// { count: 1, seq: 1 }, // B +// { count: 1, change: 1 }, // A +// { count: 2, change: 2 }, // X Y +// { count: 1, change: 1 }, // B // ], // attach: [ // 2, @@ -1013,19 +974,18 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e3: S.Transaction = { -// seq: 4, +// export const e4_r_e3: T.Transaction = { // ref: 0, // newRef: 3, // marks: { // modify: [{ // foo: { // tombs: [ -// { count: 1, seq: 1 }, // A -// { count: 1, seq: [1, 2] }, // X +// { count: 1, change: 1 }, // A +// { count: 1, change: 2 }, // X // 1, // N -// { count: 1, seq: [1, 2] }, // Y -// { count: 1, seq: 1 }, // B +// { count: 1, change: 2 }, // Y +// { count: 1, change: 1 }, // B // ], // attach: [ // 1, // [-A @@ -1062,20 +1022,19 @@ export namespace ScenarioA { // }, // }; -// export const e5_r_e4: S.Transaction = { -// seq: 5, +// export const e5_r_e4: T.Transaction = { // ref: 0, // newRef: 3, // marks: { // modify: [{ // foo: { // tombs: [ -// { count: 1, seq: 1 }, // A +// { count: 1, change: 1 }, // A // 1, // M -// { count: 1, seq: [1, 2] }, // X +// { count: 1, change: 2 }, // X // 1, // N -// { count: 1, seq: [1, 2] }, // Y -// { count: 1, seq: 1 }, // B +// { count: 1, change: 2 }, // Y +// { count: 1, change: 1 }, // B // ], // attach: [ // 5, @@ -1104,9 +1063,8 @@ export namespace ScenarioA { // * Expected outcome: foo=[] bar=[A X B Y] baz=[U V] // */ // export namespace ScenarioH { -// export const e1: S.Transaction = { +// export const e1: T.Transaction = { // ref: 0, -// seq: 1, // moves: [{ id: 0, src: { foo: 0 }, dst: { bar: 1 } }], // marks: { // modify: [{ @@ -1128,9 +1086,8 @@ export namespace ScenarioA { // }, // }; -// export const e2: S.Transaction = { +// export const e2: T.Transaction = { // ref: 0, -// seq: 2, // moves: [{ id: 0, src: { bar: 0 }, dst: { baz: 0 } }], // marks: { // modify: [{ @@ -1151,9 +1108,8 @@ export namespace ScenarioA { // }, // }; -// export const e3: S.Transaction = { +// export const e3: T.Transaction = { // ref: 0, -// seq: 3, // marks: { // modify: [{ // foo: { @@ -1167,9 +1123,8 @@ export namespace ScenarioA { // }, // }; -// export const e2_r_e1: S.Transaction = { +// export const e2_r_e1: T.Transaction = { // ref: 0, -// seq: 2, // newRef: 1, // moves: [{ id: 0, src: { bar: 0 }, dst: { baz: 0 } }], // marks: { @@ -1195,9 +1150,8 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e1: S.Transaction = { +// export const e3_r_e1: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 1, // moves: [ // { id: 0, src: { foo: 1 }, dst: { bar: 2 } }, @@ -1207,7 +1161,7 @@ export namespace ScenarioA { // modify: [{ // foo: { // tombs: [ -// { count: 2, seq: 1 }, +// { count: 2, change: 1 }, // ], // attach: [ // 1, // [-A @@ -1222,14 +1176,14 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "X" }], -// src: { seq: 1, id: 0 }, +// src: { change: 1, id: 0 }, // heed: Effects.None, // }], // [{ // B-V // type: "Insert", // id: 1, // content: [{ id: "Y" }], -// src: { seq: 1, id: 0 }, +// src: { change: 1, id: 0 }, // heed: Effects.None, // }], // ], @@ -1238,9 +1192,8 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e2: S.Transaction = { +// export const e3_r_e2: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 2, // moves: [ // { id: 0, src: { foo: 1 }, dst: { bar: 1 } }, @@ -1250,7 +1203,7 @@ export namespace ScenarioA { // modify: [{ // foo: { // tombs: [ -// { count: 2, seq: 1 }, +// { count: 2, change: 1 }, // ], // attach: [ // 1, // [-A @@ -1260,9 +1213,9 @@ export namespace ScenarioA { // }, // bar: { // tombs: [ -// { count: 1, seq: 2 }, // U +// { count: 1, change: 2 }, // U // 2, // A B -// { count: 1, seq: 2 }, // V +// { count: 1, change: 2 }, // V // ], // attach: [ // 2, // [-U-A @@ -1270,14 +1223,14 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "X" }], -// src: { seq: 1, id: 0 }, +// src: { change: 1, id: 0 }, // heed: Effects.None, // }], // [{ // B-V // type: "Insert", // id: 1, // content: [{ id: "Y" }], -// src: { seq: 1, id: 0 }, +// src: { change: 1, id: 0 }, // heed: Effects.None, // }], // ], @@ -1320,9 +1273,8 @@ export namespace ScenarioA { // */ // export namespace ScenarioI { -// export const e1: S.Transaction = { +// export const e1: T.Transaction = { // ref: 0, -// seq: 1, // moves: [{ id: 0, src: { foo: 0 }, dst: { bar: 1 } }], // marks: { // modify: [{ @@ -1344,9 +1296,8 @@ export namespace ScenarioA { // }, // }; -// export const e2: S.Transaction = { +// export const e2: T.Transaction = { // ref: 0, -// seq: 2, // moves: [{ id: 0, src: { bar: 0 }, dst: { foo: 1 } }], // marks: { // modify: [{ @@ -1391,9 +1342,8 @@ export namespace ScenarioA { // * Expected outcome: foo=[] bar=[A W X Y Z C] // */ // export namespace ScenarioJ { -// export const e1: S.Transaction = { +// export const e1: T.Transaction = { // ref: 0, -// seq: 1, // marks: { // modify: [{ // foo: { @@ -1406,9 +1356,8 @@ export namespace ScenarioA { // }, // }; -// export const e2: S.Transaction = { +// export const e2: T.Transaction = { // ref: 0, -// seq: 2, // moves: [{ id: 0, src: { foo: 0 }, dst: { bar: 0 } }], // marks: { // modify: [{ @@ -1430,9 +1379,8 @@ export namespace ScenarioA { // }, // }; -// export const e3: S.Transaction = { +// export const e3: T.Transaction = { // ref: 0, -// seq: 3, // marks: { // modify: [{ // foo: { @@ -1445,9 +1393,8 @@ export namespace ScenarioA { // }, // }; -// export const e4: S.Transaction = { +// export const e4: T.Transaction = { // ref: 0, -// seq: 4, // marks: { // modify: [{ // foo: { @@ -1460,9 +1407,8 @@ export namespace ScenarioA { // }, // }; -// export const e5: S.Transaction = { +// export const e5: T.Transaction = { // ref: 2, // With knowledge with e1 and e2 -// seq: 5, // marks: { // modify: [{ // bar: { @@ -1478,15 +1424,14 @@ export namespace ScenarioA { // }, // }; -// export const e2_r_e1: S.Transaction = { +// export const e2_r_e1: T.Transaction = { // ref: 0, -// seq: 2, // newRef: 1, // moves: [{ id: 0, src: { foo: 0 }, dst: { bar: 0 } }], // marks: { // modify: [{ // foo: { -// tombs: [1, { count: 1, seq: 1 }], +// tombs: [1, { count: 1, change: 1 }], // nodes: [ // { type: "Move", id: 0, count: 3 }, // ], @@ -1504,14 +1449,13 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e1: S.Transaction = { +// export const e3_r_e1: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 1, // marks: { // modify: [{ // foo: { -// tombs: [1, { count: 1, seq: 1 }], +// tombs: [1, { count: 1, change: 1 }], // attach: [ // 2, // [{ type: "Insert", id: 0, content: [{ id: "Y" }], tiebreak: Tiebreak.Left }], @@ -1521,15 +1465,14 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e2: S.Transaction = { +// export const e3_r_e2: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 2, // moves: [{ id: 0, src: { foo: 1 }, dst: { bar: 1 } }], // marks: { // modify: [{ // foo: { -// tombs: [1, { count: 1, seq: 1 }], +// tombs: [1, { count: 1, change: 1 }], // attach: [ // 2, // [{ type: "Bounce", id: 0, tiebreak: Tiebreak.Left }], @@ -1545,14 +1488,13 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e1: S.Transaction = { +// export const e4_r_e1: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 1, // marks: { // modify: [{ // foo: { -// tombs: [1, { count: 1, seq: 1 }], +// tombs: [1, { count: 1, change: 1 }], // attach: [ // 1, // [{ type: "Insert", id: 0, content: [{ id: "X" }] }], @@ -1562,15 +1504,14 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e2: S.Transaction = { +// export const e4_r_e2: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 2, // moves: [{ id: 0, src: { foo: 1 }, dst: { bar: 1 } }], // marks: { // modify: [{ // foo: { -// tombs: [1, { count: 1, seq: 1 }], +// tombs: [1, { count: 1, change: 1 }], // attach: [ // 1, // [{ type: "Bounce", id: 0 }], @@ -1586,15 +1527,14 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e3: S.Transaction = { +// export const e4_r_e3: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 3, // moves: [{ id: 0, src: { foo: 1 }, dst: { bar: 1 } }], // marks: { // modify: [{ // foo: { -// tombs: [1, { count: 1, seq: 1 }], +// tombs: [1, { count: 1, change: 1 }], // attach: [ // 1, // [{ type: "Bounce", id: 0 }], @@ -1610,9 +1550,8 @@ export namespace ScenarioA { // }, // }; -// export const e5_r_e3: S.Transaction = { +// export const e5_r_e3: T.Transaction = { // ref: 2, -// seq: 5, // newRef: 3, // marks: { // modify: [{ @@ -1627,9 +1566,8 @@ export namespace ScenarioA { // }, // }; -// export const e5_r_e4: S.Transaction = { +// export const e5_r_e4: T.Transaction = { // ref: 2, -// seq: 5, // newRef: 4, // marks: { // modify: [{ @@ -1662,9 +1600,8 @@ export namespace ScenarioA { // * Expected outcome: foo=[X Y] // */ // export namespace ScenarioK { -// export const e1: S.Transaction = { +// export const e1: T.Transaction = { // ref: 0, -// seq: 1, // marks: { // modify: [{ // foo: { @@ -1676,9 +1613,8 @@ export namespace ScenarioA { // }, // }; -// export const e2: S.Transaction = { +// export const e2: T.Transaction = { // ref: 0, -// seq: 2, // moves: [{ id: 0, src: { foo: 0 }, dst: { foo: 0 } }], // marks: { // modify: [{ @@ -1698,9 +1634,8 @@ export namespace ScenarioA { // }, // }; -// export const e3: S.Transaction = { +// export const e3: T.Transaction = { // ref: 0, -// seq: 3, // marks: { // modify: [{ // foo: { @@ -1716,15 +1651,14 @@ export namespace ScenarioA { // }, // }; -// export const e2_r_e1: S.Transaction = { +// export const e2_r_e1: T.Transaction = { // ref: 0, -// seq: 2, // newRef: 1, // moves: [{ id: 0, src: { foo: 0 }, dst: { foo: 0 } }], // marks: { // modify: [{ // foo: { -// tombs: [{ count: 1, seq: 1 }], +// tombs: [{ count: 1, change: 1 }], // nodes: [ // { type: "Move", id: 0, count: 1 }, // ], @@ -1740,14 +1674,13 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e1: S.Transaction = { +// export const e3_r_e1: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 1, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 1, seq: 1 }], +// tombs: [{ count: 1, change: 1 }], // attach: [ // 1, // [ @@ -1760,16 +1693,15 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e2: S.Transaction = { +// export const e3_r_e2: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 2, // moves: [{ id: 0, src: { foo: 0 }, dst: { foo: 0 } }], // marks: { // modify: [{ // foo: { // tombs: [ -// { count: 1, seq: 1 }, +// { count: 1, change: 1 }, // ], // attach: [ // [ @@ -1777,7 +1709,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "X" }], -// src: { seq: 2, id: 0 }, +// src: { change: 2, id: 0 }, // }, // ], // [ @@ -1819,9 +1751,8 @@ export namespace ScenarioA { // * Expected outcome: qux=[X Y] // */ // export namespace ScenarioL { -// export const e1: S.Transaction = { +// export const e1: T.Transaction = { // ref: 0, -// seq: 1, // marks: { // modify: [{ // foo: { @@ -1833,9 +1764,8 @@ export namespace ScenarioA { // }, // }; -// export const e2: S.Transaction = { +// export const e2: T.Transaction = { // ref: 0, -// seq: 2, // moves: [{ id: 0, src: { foo: 0 }, dst: { bar: 0 } }], // marks: { // modify: [{ @@ -1856,9 +1786,8 @@ export namespace ScenarioA { // }, // }; -// export const e3: S.Transaction = { +// export const e3: T.Transaction = { // ref: 0, -// seq: 3, // moves: [ // { id: 0, src: { bar: 0 }, dst: { baz: 0 } }, // { id: 1, src: { bar: 1 }, dst: { baz: 0 } }, @@ -1886,9 +1815,8 @@ export namespace ScenarioA { // }, // }; -// export const e4: S.Transaction = { +// export const e4: T.Transaction = { // ref: 0, -// seq: 4, // moves: [{ id: 0, src: { baz: 0 }, dst: { qux: 0 } }], // marks: { // modify: [{ @@ -1909,9 +1837,8 @@ export namespace ScenarioA { // }, // }; -// export const e5: S.Transaction = { +// export const e5: T.Transaction = { // ref: 0, -// seq: 5, // marks: { // modify: [{ // foo: { @@ -1924,9 +1851,8 @@ export namespace ScenarioA { // }, // }; -// export const e6: S.Transaction = { +// export const e6: T.Transaction = { // ref: 0, -// seq: 6, // marks: { // modify: [{ // foo: { @@ -1938,15 +1864,14 @@ export namespace ScenarioA { // }, // }; -// export const e2_r_e1: S.Transaction = { +// export const e2_r_e1: T.Transaction = { // ref: 0, -// seq: 2, // newRef: 1, // moves: [{ id: 0, src: { foo: 0 }, dst: { bar: 0 } }], // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 1 }], +// tombs: [{ count: 2, change: 1 }], // nodes: [ // { type: "Move", id: 0, count: 2 }, // ], @@ -1963,9 +1888,8 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e1: S.Transaction = { +// export const e3_r_e1: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 1, // moves: [ // { id: 0, src: { bar: 0 }, dst: { baz: 0 } }, @@ -1974,7 +1898,7 @@ export namespace ScenarioA { // marks: { // modify: [{ // bar: { -// tombs: [{ count: 2, seq: [1, 2] }], +// tombs: [{ count: 2, change: 2 }], // nodes: [ // { type: "Move", id: 0, count: 1 }, // { type: "Move", id: 1, count: 1 }, @@ -1995,15 +1919,14 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e1: S.Transaction = { +// export const e4_r_e1: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 1, // moves: [{ id: 0, src: { baz: 0 }, dst: { qux: 0 } }], // marks: { // modify: [{ // baz: { -// tombs: [{ count: 2, seq: [1, 3] }], +// tombs: [{ count: 2, change: [1, 3] }], // nodes: [ // { type: "Move", id: 0, count: 2 }, // ], @@ -2020,14 +1943,13 @@ export namespace ScenarioA { // }, // }; -// export const e5_r_e1: S.Transaction = { +// export const e5_r_e1: T.Transaction = { // ref: 0, -// seq: 5, // newRef: 1, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 1 }], +// tombs: [{ count: 2, change: 1 }], // attach: [ // 2, // [{ type: "Insert", id: 0, content: [{ id: "X" }], tiebreak: Tiebreak.Left }], @@ -2037,15 +1959,14 @@ export namespace ScenarioA { // }, // }; -// export const e5_r_e2: S.Transaction = { +// export const e5_r_e2: T.Transaction = { // ref: 0, -// seq: 5, // newRef: 2, // moves: [{ id: 0, src: { foo: 2 }, dst: { bar: 0 } }], // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 1 }], +// tombs: [{ count: 2, change: 1 }], // attach: [ // 2, // [{ type: "Bounce", id: 0, tiebreak: Tiebreak.Left }], @@ -2057,7 +1978,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "X" }], -// src: { seq: 2, id: 0 }, +// src: { change: 2, id: 0 }, // tiebreak: Tiebreak.Left, // }], // ], @@ -2066,15 +1987,14 @@ export namespace ScenarioA { // }, // }; -// export const e5_r_e3: S.Transaction = { +// export const e5_r_e3: T.Transaction = { // ref: 0, -// seq: 5, // newRef: 3, // moves: [{ id: 0, src: { foo: 2 }, hops: [{ bar: 0 }], dst: { baz: 0 } }], // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 1 }], +// tombs: [{ count: 2, change: 1 }], // attach: [ // 2, // [{ type: "Bounce", id: 0, tiebreak: Tiebreak.Left }], @@ -2082,7 +2002,7 @@ export namespace ScenarioA { // }, // bar: { // attach: [ -// [{ type: "Bounce", id: 0, src: { seq: 2, id: 0 }, tiebreak: Tiebreak.Left }], +// [{ type: "Bounce", id: 0, src: { change: 2, id: 0 }, tiebreak: Tiebreak.Left }], // ], // }, // baz: { @@ -2091,7 +2011,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "X" }], -// src: { seq: 3, id: 1 }, +// src: { change: 3, id: 1 }, // tiebreak: Tiebreak.Left, // }], // ], @@ -2100,15 +2020,14 @@ export namespace ScenarioA { // }, // }; -// export const e5_r_e4: S.Transaction = { +// export const e5_r_e4: T.Transaction = { // ref: 0, -// seq: 5, // newRef: 4, // moves: [{ id: 0, src: { foo: 2 }, hops: [{ bar: 0 }, { baz: 0 }], dst: { qux: 0 } }], // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 1 }], +// tombs: [{ count: 2, change: 1 }], // attach: [ // 2, // [{ type: "Bounce", id: 0, tiebreak: Tiebreak.Left }], @@ -2116,12 +2035,12 @@ export namespace ScenarioA { // }, // bar: { // attach: [ -// [{ type: "Bounce", id: 0, src: { seq: 2, id: 0 }, tiebreak: Tiebreak.Left }], +// [{ type: "Bounce", id: 0, src: { change: 2, id: 0 }, tiebreak: Tiebreak.Left }], // ], // }, // baz: { // attach: [ -// [{ type: "Bounce", id: 0, src: { seq: 3, id: 1 }, tiebreak: Tiebreak.Left }], +// [{ type: "Bounce", id: 0, src: { change: 3, id: 1 }, tiebreak: Tiebreak.Left }], // ], // }, // qux: { @@ -2130,7 +2049,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "X" }], -// src: { seq: 4, id: 0 }, +// src: { change: 4, id: 0 }, // tiebreak: Tiebreak.Left, // }], // ], @@ -2139,14 +2058,13 @@ export namespace ScenarioA { // }, // }; -// export const e6_r_e1: S.Transaction = { +// export const e6_r_e1: T.Transaction = { // ref: 0, -// seq: 6, // newRef: 1, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 1 }], +// tombs: [{ count: 2, change: 1 }], // attach: [ // [{ type: "Insert", id: 0, content: [{ id: "Y" }], tiebreak: Tiebreak.Right }], // ], @@ -2155,15 +2073,14 @@ export namespace ScenarioA { // }, // }; -// export const e6_r_e2: S.Transaction = { +// export const e6_r_e2: T.Transaction = { // ref: 0, -// seq: 6, // newRef: 2, // moves: [{ id: 0, src: { foo: 0 }, dst: { bar: 0 } }], // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 1 }], +// tombs: [{ count: 2, change: 1 }], // attach: [ // [{ type: "Bounce", id: 0, tiebreak: Tiebreak.Right }], // ], @@ -2174,7 +2091,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "Y" }], -// src: { seq: 2, id: 0 }, +// src: { change: 2, id: 0 }, // tiebreak: Tiebreak.Left, // }], // ], @@ -2183,22 +2100,21 @@ export namespace ScenarioA { // }, // }; -// export const e6_r_e3: S.Transaction = { +// export const e6_r_e3: T.Transaction = { // ref: 0, -// seq: 6, // newRef: 3, // moves: [{ id: 0, src: { foo: 0 }, hops: [{ bar: 0 }], dst: { baz: 0 } }], // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 1 }], +// tombs: [{ count: 2, change: 1 }], // attach: [ // [{ type: "Bounce", id: 0, tiebreak: Tiebreak.Right }], // ], // }, // bar: { // attach: [ -// [{ type: "Bounce", id: 0, src: { seq: 2, id: 0 }, tiebreak: Tiebreak.Left }], +// [{ type: "Bounce", id: 0, src: { change: 2, id: 0 }, tiebreak: Tiebreak.Left }], // ], // }, // baz: { @@ -2207,7 +2123,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "Y" }], -// src: { seq: 3, id: 0 }, +// src: { change: 3, id: 0 }, // tiebreak: Tiebreak.Right, // }], // ], @@ -2216,27 +2132,26 @@ export namespace ScenarioA { // }, // }; -// export const e6_r_e4: S.Transaction = { +// export const e6_r_e4: T.Transaction = { // ref: 0, -// seq: 6, // newRef: 4, // moves: [{ id: 0, src: { foo: 0 }, hops: [{ bar: 0 }, { baz: 0 }], dst: { qux: 0 } }], // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 1 }], +// tombs: [{ count: 2, change: 1 }], // attach: [ // [{ type: "Bounce", id: 0, tiebreak: Tiebreak.Right }], // ], // }, // bar: { // attach: [ -// [{ type: "Bounce", id: 0, src: { seq: 2, id: 0 }, tiebreak: Tiebreak.Left }], +// [{ type: "Bounce", id: 0, src: { change: 2, id: 0 }, tiebreak: Tiebreak.Left }], // ], // }, // baz: { // attach: [ -// [{ type: "Bounce", id: 0, src: { seq: 3, id: 0 }, tiebreak: Tiebreak.Right }], +// [{ type: "Bounce", id: 0, src: { change: 3, id: 0 }, tiebreak: Tiebreak.Right }], // ], // }, // qux: { @@ -2244,7 +2159,7 @@ export namespace ScenarioA { // [{ // type: "Insert", id: 0, // content: [{ id: "Y" }], -// src: { seq: 4, id: 0 }, +// src: { change: 4, id: 0 }, // tiebreak: Tiebreak.Left, // }], // ], @@ -2253,27 +2168,26 @@ export namespace ScenarioA { // }, // }; -// export const e6_r_e5: S.Transaction = { +// export const e6_r_e5: T.Transaction = { // ref: 0, -// seq: 6, // newRef: 5, // moves: [{ id: 0, src: { foo: 0 }, hops: [{ bar: 0 }, { baz: 0 }], dst: { qux: 0 } }], // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 1 }], +// tombs: [{ count: 2, change: 1 }], // attach: [ // [{ type: "Bounce", id: 0, tiebreak: Tiebreak.Right }], // ], // }, // bar: { // attach: [ -// [{ type: "Bounce", id: 0, src: { seq: 2, id: 0 }, tiebreak: Tiebreak.Left }], +// [{ type: "Bounce", id: 0, src: { change: 2, id: 0 }, tiebreak: Tiebreak.Left }], // ], // }, // baz: { // attach: [ -// [{ type: "Bounce", id: 0, src: { seq: 3, id: 0 }, tiebreak: Tiebreak.Right }], +// [{ type: "Bounce", id: 0, src: { change: 3, id: 0 }, tiebreak: Tiebreak.Right }], // ], // }, // qux: { @@ -2283,7 +2197,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "Y" }], -// src: { seq: 4, id: 0 }, +// src: { change: 4, id: 0 }, // tiebreak: Tiebreak.Left, // }], // ], @@ -2310,9 +2224,8 @@ export namespace ScenarioA { // * Expected outcome: foo=[X Y] // */ // export namespace ScenarioM { -// export const e1: S.Transaction = { +// export const e1: T.Transaction = { // ref: 0, -// seq: 1, // marks: { // modify: [{ // foo: { @@ -2324,9 +2237,8 @@ export namespace ScenarioA { // }, // }; -// export const e2: S.Transaction = { +// export const e2: T.Transaction = { // ref: 0, -// seq: 2, // marks: { // modify: [{ // foo: { @@ -2339,9 +2251,8 @@ export namespace ScenarioA { // }, // }; -// export const e3: S.Transaction = { +// export const e3: T.Transaction = { // ref: 0, -// seq: 3, // marks: { // modify: [{ // foo: { @@ -2354,9 +2265,8 @@ export namespace ScenarioA { // }, // }; -// export const e4: S.Transaction = { +// export const e4: T.Transaction = { // ref: 0, -// seq: 4, // marks: { // modify: [{ // foo: { @@ -2369,9 +2279,8 @@ export namespace ScenarioA { // }, // }; -// export const e2_r_e1: S.Transaction = { +// export const e2_r_e1: T.Transaction = { // ref: 0, -// seq: 2, // newRef: 1, // marks: { // modify: [{ @@ -2384,14 +2293,13 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e1: S.Transaction = { +// export const e3_r_e1: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 1, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 1 }], +// tombs: [{ count: 2, change: 1 }], // attach: [ // 1, // [{ type: "Insert", id: 0, content: [{ id: "X" }] }], @@ -2401,14 +2309,13 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e2: S.Transaction = { +// export const e3_r_e2: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 2, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 1 }, { count: 2, seq: 2 }], +// tombs: [{ count: 2, change: 1 }, { count: 2, change: 2 }], // attach: [ // 1, // [{ type: "Insert", id: 0, content: [{ id: "X" }] }], @@ -2418,14 +2325,13 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e1: S.Transaction = { +// export const e4_r_e1: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 1, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 1 }], +// tombs: [{ count: 2, change: 1 }], // attach: [ // 3, // [{ type: "Insert", id: 0, content: [{ id: "Y" }] }], @@ -2435,14 +2341,13 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e2: S.Transaction = { +// export const e4_r_e2: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 2, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 1 }, { count: 2, seq: 2 }], +// tombs: [{ count: 2, change: 1 }, { count: 2, change: 2 }], // attach: [ // 3, // [{ type: "Insert", id: 0, content: [{ id: "Y" }] }], @@ -2452,18 +2357,17 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e3: S.Transaction = { +// export const e4_r_e3: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 3, // marks: { // modify: [{ // foo: { // tombs: [ -// { count: 1, seq: 1 }, +// { count: 1, change: 1 }, // 1, // X -// { count: 1, seq: 1 }, -// { count: 1, seq: 2 }, +// { count: 1, change: 1 }, +// { count: 1, change: 2 }, // ], // attach: [ // 4, // [-A-X-B-C @@ -2505,9 +2409,8 @@ export namespace ScenarioA { // * Expected outcome: foo=[A B C] bar=[X Y] // */ // export namespace ScenarioN { -// export const e1: S.Transaction = { +// export const e1: T.Transaction = { // ref: 0, -// seq: 1, // moves: [ // { id: 0, src: { foo: 1 }, dst: { bar: 0 } }, // { id: 1, src: { foo: 2 }, dst: { bar: 0 } }, @@ -2533,9 +2436,8 @@ export namespace ScenarioA { // }, // }; -// export const e2: S.Transaction = { +// export const e2: T.Transaction = { // ref: 0, -// seq: 2, // marks: { // modify: [{ // foo: { @@ -2548,9 +2450,8 @@ export namespace ScenarioA { // }, // }; -// export const e3: S.Transaction = { +// export const e3: T.Transaction = { // ref: 0, -// seq: 2, // marks: { // modify: [{ // foo: { @@ -2563,9 +2464,8 @@ export namespace ScenarioA { // }, // }; -// export const e2_r_e1: S.Transaction = { +// export const e2_r_e1: T.Transaction = { // ref: 0, -// seq: 2, // newRef: 1, // moves: [ // { id: 0, src: { foo: 2 }, dst: { bar: 0 } }, @@ -2581,8 +2481,8 @@ export namespace ScenarioA { // bar: { // attach: [ // [ -// { type: "Intake", seq: 1, id: 0 }, -// { type: "Insert", id: 0, content: [{ id: "Y" }], src: { seq: 1, id: 1 } }, +// { type: "Intake", change: 1, id: 0 }, +// { type: "Insert", id: 0, content: [{ id: "Y" }], src: { change: 1, id: 1 } }, // ], // ], // }, @@ -2590,9 +2490,8 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e1: S.Transaction = { +// export const e3_r_e1: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 1, // moves: [ // { id: 0, src: { foo: 1 }, dst: { bar: 0 } }, @@ -2608,8 +2507,8 @@ export namespace ScenarioA { // bar: { // attach: [ // [ -// { type: "Insert", id: 0, content: [{ id: "X" }], src: { seq: 1, id: 0 } }, -// { type: "Intake", seq: 1, id: 1 }, +// { type: "Insert", id: 0, content: [{ id: "X" }], src: { change: 1, id: 0 } }, +// { type: "Intake", change: 1, id: 1 }, // ], // ], // }, @@ -2617,9 +2516,8 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e2: S.Transaction = { +// export const e3_r_e2: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 2, // moves: [ // { id: 0, src: { foo: 1 }, dst: { bar: 0 } }, @@ -2635,8 +2533,8 @@ export namespace ScenarioA { // bar: { // attach: [ // [ -// { type: "Insert", id: 0, content: [{ id: "X" }], src: { seq: 1, id: 0 } }, -// { type: "Intake", seq: 1, id: 1 }, +// { type: "Insert", id: 0, content: [{ id: "X" }], src: { change: 1, id: 0 } }, +// { type: "Intake", change: 1, id: 1 }, // ], // ], // }, @@ -2668,9 +2566,8 @@ export namespace ScenarioA { // * Expected outcome: foo=[] bar=[U X Y] // */ // export namespace ScenarioO { -// export const e1: S.Transaction = { +// export const e1: T.Transaction = { // ref: 0, -// seq: 1, // marks: { // modify: [{ // bar: { @@ -2683,9 +2580,8 @@ export namespace ScenarioA { // }, // }; -// export const e2: S.Transaction = { +// export const e2: T.Transaction = { // ref: 0, -// seq: 2, // marks: { // modify: [{ // foo: { @@ -2697,9 +2593,8 @@ export namespace ScenarioA { // }, // }; -// export const e3: S.Transaction = { +// export const e3: T.Transaction = { // ref: 1, // Known of 1 -// seq: 3, // marks: { // modify: [{ // foo: { @@ -2721,9 +2616,8 @@ export namespace ScenarioA { // }, // }; -// export const e4: S.Transaction = { +// export const e4: T.Transaction = { // ref: 0, -// seq: 4, // marks: { // modify: [{ // foo: { @@ -2736,9 +2630,8 @@ export namespace ScenarioA { // }, // }; -// export const e5: S.Transaction = { +// export const e5: T.Transaction = { // ref: 0, -// seq: 5, // marks: { // modify: [{ // bar: { @@ -2751,9 +2644,8 @@ export namespace ScenarioA { // }, // }; -// export const e2_r_e1: S.Transaction = { +// export const e2_r_e1: T.Transaction = { // ref: 0, -// seq: 2, // newRef: 1, // marks: { // modify: [{ @@ -2766,14 +2658,13 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e2: S.Transaction = { +// export const e3_r_e2: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 2, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 2 }], +// tombs: [{ count: 2, change: 2 }], // nodes: [ // { type: "Move", id: 0, count: 2 }, // ], @@ -2792,9 +2683,8 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e1: S.Transaction = { +// export const e4_r_e1: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 1, // marks: { // modify: [{ @@ -2808,14 +2698,13 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e2: S.Transaction = { +// export const e4_r_e2: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 2, // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 2 }], +// tombs: [{ count: 2, change: 2 }], // attach: [ // 1, // [{ type: "Insert", id: 0, content: [{ id: "X" }], tiebreak: Tiebreak.Left }], @@ -2825,15 +2714,14 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e3: S.Transaction = { +// export const e4_r_e3: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 3, // moves: [{ id: 0, src: { foo: 0 }, dst: { bar: 2 } }], // marks: { // modify: [{ // foo: { -// tombs: [{ count: 2, seq: 2 }], +// tombs: [{ count: 2, change: 2 }], // attach: [ // 1, // [{ type: "Bounce", id: 0, tiebreak: Tiebreak.Left }], @@ -2849,14 +2737,13 @@ export namespace ScenarioA { // }, // }; -// export const e5_r_e1: S.Transaction = { +// export const e5_r_e1: T.Transaction = { // ref: 0, -// seq: 5, // newRef: 1, // marks: { // modify: [{ // bar: { -// tombs: [1, { count: 1, seq: 1 }], +// tombs: [1, { count: 1, change: 1 }], // attach: [ // 2, // [{ type: "Insert", id: 0, content: [{ id: "Y" }], tiebreak: Tiebreak.Left }], @@ -2866,14 +2753,13 @@ export namespace ScenarioA { // }, // }; -// export const e5_r_e2: S.Transaction = { +// export const e5_r_e2: T.Transaction = { // ref: 0, -// seq: 5, // newRef: 2, // marks: { // modify: [{ // bar: { -// tombs: [1, { count: 1, seq: 1 }], +// tombs: [1, { count: 1, change: 1 }], // attach: [ // 2, // [{ type: "Insert", id: 0, content: [{ id: "Y" }], tiebreak: Tiebreak.Left }], @@ -2883,16 +2769,15 @@ export namespace ScenarioA { // }, // }; -// export const e5_r_e3: S.Transaction = { +// export const e5_r_e3: T.Transaction = { // ref: 0, -// seq: 5, // newRef: 3, // marks: { // modify: [{ // bar: { // tombs: [ // 1, -// { count: 1, seq: 1 }, // V +// { count: 1, change: 1 }, // V // ], // attach: [ // 2, // [-U-V @@ -2903,16 +2788,15 @@ export namespace ScenarioA { // }, // }; -// export const e5_r_e4: S.Transaction = { +// export const e5_r_e4: T.Transaction = { // ref: 0, -// seq: 5, // newRef: 4, // marks: { // modify: [{ // bar: { // tombs: [ // 2, // U X -// { count: 1, seq: 1 }, // V +// { count: 1, change: 1 }, // V // ], // attach: [ // 3, // [-U-X-V @@ -2939,9 +2823,8 @@ export namespace ScenarioA { // * Expected outcome: foo=[X Y] // */ // export namespace ScenarioP { -// export const e1: S.Transaction = { +// export const e1: T.Transaction = { // ref: 0, -// seq: 1, // moves: [{ id: 0, src: { bar: 0 }, dst: { foo: 0 } }], // marks: { // modify: [{ @@ -2959,9 +2842,8 @@ export namespace ScenarioA { // }, // }; -// export const e2: S.Transaction = { +// export const e2: T.Transaction = { // ref: 0, -// seq: 2, // moves: [{ id: 0, src: { baz: 0 }, dst: { foo: 0 } }], // marks: { // modify: [{ @@ -2979,9 +2861,8 @@ export namespace ScenarioA { // }, // }; -// export const e3: S.Transaction = { +// export const e3: T.Transaction = { // ref: 0, -// seq: 3, // marks: { // modify: [{ // bar: { @@ -2993,9 +2874,8 @@ export namespace ScenarioA { // }, // }; -// export const e4: S.Transaction = { +// export const e4: T.Transaction = { // ref: 0, -// seq: 4, // marks: { // modify: [{ // baz: { @@ -3007,9 +2887,8 @@ export namespace ScenarioA { // }, // }; -// export const e2_r_e1: S.Transaction = { +// export const e2_r_e1: T.Transaction = { // ref: 0, -// seq: 2, // newRef: 1, // moves: [{ id: 0, src: { baz: 0 }, dst: { foo: 0 } }], // marks: { @@ -3028,9 +2907,8 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e1: S.Transaction = { +// export const e3_r_e1: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 1, // moves: [{ id: 0, src: { bar: 0 }, dst: { foo: 0 } }], // marks: { @@ -3046,7 +2924,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "X" }], -// src: { seq: 1, id: 0 }, +// src: { change: 1, id: 0 }, // tiebreak: Tiebreak.Left, // Move Tiebreak // }], // ], @@ -3055,9 +2933,8 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e2: S.Transaction = { +// export const e3_r_e2: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 2, // moves: [{ id: 0, src: { bar: 0 }, dst: { foo: 0 } }], // marks: { @@ -3073,7 +2950,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "X" }], -// src: { seq: 1, id: 0 }, +// src: { change: 1, id: 0 }, // tiebreak: Tiebreak.Left, // Move Tiebreak // }], // ], @@ -3082,9 +2959,8 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e1: S.Transaction = { +// export const e4_r_e1: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 1, // marks: { // modify: [{ @@ -3097,9 +2973,8 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e2: S.Transaction = { +// export const e4_r_e2: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 2, // moves: [{ id: 0, src: { baz: 0 }, dst: { foo: 0 } }], // marks: { @@ -3115,7 +2990,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "Y" }], -// src: { seq: 2, id: 0 }, +// src: { change: 2, id: 0 }, // tiebreak: Tiebreak.Right, // Move Tiebreak // }], // ], @@ -3124,9 +2999,8 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e3: S.Transaction = { +// export const e4_r_e3: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 3, // moves: [{ id: 0, src: { baz: 0 }, dst: { foo: 0 } }], // marks: { @@ -3143,7 +3017,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "Y" }], -// src: { seq: 2, id: 0 }, +// src: { change: 2, id: 0 }, // tiebreak: Tiebreak.Right, // Move Tiebreak // }], // ], @@ -3172,9 +3046,8 @@ export namespace ScenarioA { // * Expected outcome: qux=[X Y] // */ // export namespace ScenarioQ { -// export const e1: S.Transaction = { +// export const e1: T.Transaction = { // ref: 0, -// seq: 1, // moves: [{ id: 0, src: { bar: 0 }, dst: { foo: 0 } }], // marks: { // modify: [{ @@ -3192,9 +3065,8 @@ export namespace ScenarioA { // }, // }; -// export const e2: S.Transaction = { +// export const e2: T.Transaction = { // ref: 0, -// seq: 2, // moves: [{ id: 0, src: { baz: 0 }, dst: { foo: 0 } }], // marks: { // modify: [{ @@ -3212,9 +3084,8 @@ export namespace ScenarioA { // }, // }; -// export const e3: S.Transaction = { +// export const e3: T.Transaction = { // ref: 0, -// seq: 3, // moves: [{ id: 0, src: { foo: 0 }, dst: { qux: 0 } }], // marks: { // modify: [{ @@ -3232,9 +3103,8 @@ export namespace ScenarioA { // }, // }; -// export const e4: S.Transaction = { +// export const e4: T.Transaction = { // ref: 0, -// seq: 4, // marks: { // modify: [{ // bar: { @@ -3246,9 +3116,8 @@ export namespace ScenarioA { // }, // }; -// export const e5: S.Transaction = { +// export const e5: T.Transaction = { // ref: 0, -// seq: 5, // marks: { // modify: [{ // baz: { @@ -3260,9 +3129,8 @@ export namespace ScenarioA { // }, // }; -// export const e2_r_e1: S.Transaction = { +// export const e2_r_e1: T.Transaction = { // ref: 0, -// seq: 2, // newRef: 1, // moves: [{ id: 0, src: { baz: 0 }, dst: { foo: 0 } }], // marks: { @@ -3281,9 +3149,8 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e1: S.Transaction = { +// export const e3_r_e1: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 1, // moves: [{ id: 0, src: { foo: 0 }, dst: { qux: 0 } }], // marks: { @@ -3302,9 +3169,8 @@ export namespace ScenarioA { // }, // }; -// export const e3_r_e2: S.Transaction = { +// export const e3_r_e2: T.Transaction = { // ref: 0, -// seq: 3, // newRef: 2, // moves: [{ id: 0, src: { foo: 0 }, dst: { qux: 0 } }], // marks: { @@ -3323,9 +3189,8 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e1: S.Transaction = { +// export const e4_r_e1: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 1, // moves: [{ id: 0, src: { bar: 0 }, dst: { foo: 0 } }], // marks: { @@ -3341,7 +3206,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "X" }], -// src: { seq: 1, id: 0 }, +// src: { change: 1, id: 0 }, // tiebreak: Tiebreak.Left, // Move Tiebreak // }], // ], @@ -3350,9 +3215,8 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e2: S.Transaction = { +// export const e4_r_e2: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 2, // moves: [{ id: 0, src: { bar: 0 }, dst: { foo: 0 } }], // marks: { @@ -3368,7 +3232,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "X" }], -// src: { seq: 1, id: 0 }, +// src: { change: 1, id: 0 }, // tiebreak: Tiebreak.Left, // Move Tiebreak // }], // ], @@ -3377,9 +3241,8 @@ export namespace ScenarioA { // }, // }; -// export const e4_r_e3: S.Transaction = { +// export const e4_r_e3: T.Transaction = { // ref: 0, -// seq: 4, // newRef: 3, // moves: [{ id: 0, src: { bar: 0 }, hops: [{ foo: 0 }], dst: { qux: 0 } }], // marks: { @@ -3394,7 +3257,7 @@ export namespace ScenarioA { // [{ // type: "Bounce", // id: 0, -// src: { seq: 1, id: 0 }, +// src: { change: 1, id: 0 }, // tiebreak: Tiebreak.Left, // Move Tiebreak // }], // ], @@ -3405,7 +3268,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "X" }], -// src: { seq: 3, id: 0 }, +// src: { change: 3, id: 0 }, // tiebreak: Tiebreak.Right, // Move Tiebreak // }], // ], @@ -3414,9 +3277,8 @@ export namespace ScenarioA { // }, // }; -// export const e5_r_e1: S.Transaction = { +// export const e5_r_e1: T.Transaction = { // ref: 0, -// seq: 5, // newRef: 1, // marks: { // modify: [{ @@ -3429,9 +3291,8 @@ export namespace ScenarioA { // }, // }; -// export const e5_r_e2: S.Transaction = { +// export const e5_r_e2: T.Transaction = { // ref: 0, -// seq: 5, // newRef: 2, // moves: [{ id: 0, src: { baz: 0 }, dst: { foo: 0 } }], // marks: { @@ -3447,7 +3308,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "Y" }], -// src: { seq: 2, id: 0 }, +// src: { change: 2, id: 0 }, // tiebreak: Tiebreak.Right, // Move Tiebreak // }], // ], @@ -3456,9 +3317,8 @@ export namespace ScenarioA { // }, // }; -// export const e5_r_e3: S.Transaction = { +// export const e5_r_e3: T.Transaction = { // ref: 0, -// seq: 5, // newRef: 3, // moves: [{ id: 0, src: { baz: 0 }, hops: [{ foo: 0 }], dst: { qux: 0 } }], // marks: { @@ -3473,7 +3333,7 @@ export namespace ScenarioA { // [{ // type: "Bounce", // id: 0, -// src: { seq: 2, id: 0 }, +// src: { change: 2, id: 0 }, // tiebreak: Tiebreak.Right, // Move Tiebreak // }], // ], @@ -3484,7 +3344,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "Y" }], -// src: { seq: 3, id: 0 }, +// src: { change: 3, id: 0 }, // tiebreak: Tiebreak.Right, // Move Tiebreak // }], // ], @@ -3493,9 +3353,8 @@ export namespace ScenarioA { // }, // }; -// export const e5_r_e4: S.Transaction = { +// export const e5_r_e4: T.Transaction = { // ref: 0, -// seq: 5, // newRef: 4, // moves: [{ id: 0, src: { baz: 0 }, hops: [{ foo: 0 }], dst: { qux: 0 } }], // marks: { @@ -3510,7 +3369,7 @@ export namespace ScenarioA { // [{ // type: "Bounce", // id: 0, -// src: { seq: 2, id: 0 }, +// src: { change: 2, id: 0 }, // tiebreak: Tiebreak.Right, // Move Tiebreak // }], // ], @@ -3522,7 +3381,7 @@ export namespace ScenarioA { // type: "Insert", // id: 0, // content: [{ id: "Y" }], -// src: { seq: 3, id: 0 }, +// src: { change: 3, id: 0 }, // tiebreak: Tiebreak.Right, // Move Tiebreak // }], // ], diff --git a/packages/dds/tree/src/test/changeset/toDelta.spec.ts b/packages/dds/tree/src/test/changeset/toDelta.spec.ts index 684fa894905e..3b34584cc7ca 100644 --- a/packages/dds/tree/src/test/changeset/toDelta.spec.ts +++ b/packages/dds/tree/src/test/changeset/toDelta.spec.ts @@ -4,12 +4,12 @@ */ import { strict as assert } from "assert"; -import { TreeSchemaIdentifier } from "../.."; import { ProtoNode, toDelta as toDeltaImpl, Transposed as T, } from "../../changeset"; +import { TreeSchemaIdentifier } from "../../schema-stored"; import { FieldKey, Delta } from "../../tree"; import { brand, brandOpaque } from "../../util"; import { deepFreeze } from "../utils"; @@ -49,7 +49,7 @@ describe("toDelta", () => { it("set root value", () => { const changeset: T.MarkList = [{ type: "Modify", - value: { type: "Set", value: 1 }, + value: { id: 0, value: 1 }, }]; const mark: Delta.Modify = { type: Delta.MarkType.Modify, @@ -68,7 +68,7 @@ describe("toDelta", () => { 42, { type: "Modify", - value: { type: "Set", value: 1 }, + value: { id: 0, value: 1 }, }, ], }, @@ -321,7 +321,7 @@ describe("toDelta", () => { 1, { type: "Modify", - value: { type: "Set", value: 1 }, + value: { id: opId, value: 1 }, }, ], }, @@ -356,11 +356,11 @@ describe("toDelta", () => { type: "MInsert", id: opId, content: content[0], - value: { type: "Set", value: 4242 }, + value: { id: opId, value: 4242 }, fields: { foo: [{ type: "Modify", - value: { type: "Set", value: 4343 }, + value: { id: opId, value: 4343 }, }], }, }], @@ -435,7 +435,7 @@ describe("toDelta", () => { type: "MInsert", id: opId, content: { type, value: 45 }, - value: { type: "Set", value: 4545 }, + value: { id: opId, value: 4545 }, }], ], }, diff --git a/packages/dds/tree/src/test/edit-manager/editManager.spec.ts b/packages/dds/tree/src/test/edit-manager/editManager.spec.ts index 844224a2ca78..f3b1ef053343 100644 --- a/packages/dds/tree/src/test/edit-manager/editManager.spec.ts +++ b/packages/dds/tree/src/test/edit-manager/editManager.spec.ts @@ -5,7 +5,6 @@ import { fail, strict as assert } from "assert"; import { ChangeEncoder, ChangeFamily, JsonCompatible } from "../../change-family"; -import { SeqNumber } from "../../changeset"; import { Commit, EditManager, SessionId } from "../../edit-manager"; import { ChangeRebaser } from "../../rebase"; import { AnchorSet } from "../../tree"; @@ -15,11 +14,11 @@ interface NonEmptyTestChangeset { /** * Identifies the document state that the changeset should apply to. */ - inputContext: number; + inputContext: number[]; /** * Identifies the document state brought about by applying the changeset to the document. */ - outputContext: number; + outputContext: number[]; /** * Identifies the editing intentions included in the changeset. * Editing intentions can be thought of as user actions, where each user action is unique. @@ -40,41 +39,70 @@ export type TestChangeset = NonEmptyTestChangeset | EmptyTestChangeset; function isNonEmptyChange( change: RecursiveReadonly, ): change is RecursiveReadonly { - return "inputContext" in change && "outputContext" in change; + return "inputContext" in change; } interface AnchorRebaseData { rebases: RecursiveReadonly[]; - intentions: Set; + intentions: number[]; } class TestChangeRebaser implements ChangeRebaser { - private contextCounter: number = 0; - private intentionCounter: number = 0; public readonly anchorRebases: Map = new Map(); + public static mintChangeset(inputContext: readonly number[], intentionOpt: number): NonEmptyTestChangeset { + const intention = intentionOpt; + return { + inputContext: [...inputContext], + intentions: [intention], + outputContext: TestChangeRebaser.composeIntentions(inputContext, [intention]), + }; + } + + public static composeIntentions(base: readonly number[], extras: readonly number[]): number[] { + const composed = [...base]; + let last: number | undefined = composed[composed.length - 1]; + for (const extra of extras) { + // Check wether we are composing intentions that cancel each other out. + // This helps us ensure that we always represent sequences of intentions + // in the same canonical form. + if (last === -extra) { + composed.pop(); + last = composed[composed.length - 1]; + } else { + composed.push(extra); + last = extra; + } + } + return composed; + } + public compose(changes: TestChangeset[]): TestChangeset { - let inputContext: number | undefined; - let outputContext: number | undefined; - const intentions: number[] = []; + let inputContext: number[] | undefined; + let outputContext: number[] | undefined; + let intentions: number[] = []; for (const change of changes) { if (isNonEmptyChange(change)) { + inputContext ??= change.inputContext; if (outputContext !== undefined) { - // One can only compose changes of the output context of each change N matches - // the input context of the change N+1. - assert.equal(outputContext, change.inputContext); + // The input context should match the output context of the previous change. + assert.deepEqual(change.inputContext, outputContext); } - inputContext ??= change.inputContext; - outputContext = change.outputContext; - intentions.push(...change.intentions); + outputContext = TestChangeRebaser.composeIntentions( + outputContext ?? inputContext, + change.intentions, + ); + intentions = TestChangeRebaser.composeIntentions( + intentions, + change.intentions, + ); } } if (inputContext !== undefined) { return { inputContext, - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - outputContext: outputContext!, intentions, + outputContext: outputContext ?? fail(), }; } return emptyChange; @@ -94,16 +122,11 @@ class TestChangeRebaser implements ChangeRebaser { public rebase(change: TestChangeset, over: TestChangeset): TestChangeset { if (isNonEmptyChange(change)) { if (isNonEmptyChange(over)) { + // Rebasing should only occur between two changes with the same input context + assert.deepEqual(change.inputContext, over.inputContext); return { inputContext: over.outputContext, - // Note that we mint a new context ID for each rebased operation. - // This means that rebasing some change A over some change B will produce - // a change with a different output context every time. - // This lack of fidelity could make some of the tests fail when they - // should not, but will not make tests pass if they should not. - // If a rebaser implementation needed to leverage this missing fidelity then this gap could - // be addressed by using a more complex encoding to represent contexts. - outputContext: ++this.contextCounter, + outputContext: TestChangeRebaser.composeIntentions(over.outputContext, change.intentions), intentions: change.intentions, }; } @@ -116,11 +139,11 @@ class TestChangeRebaser implements ChangeRebaser { if (isNonEmptyChange(over)) { let data = this.anchorRebases.get(anchors); if (data === undefined) { - data = { rebases: [], intentions: new Set() }; + data = { rebases: [], intentions: [] }; this.anchorRebases.set(anchors, data); } let lastChange: RecursiveReadonly | undefined; - const { rebases, intentions } = data; + const { rebases } = data; for (let iChange = rebases.length - 1; iChange >= 0; --iChange) { const change = rebases[iChange]; if (isNonEmptyChange(change)) { @@ -130,40 +153,28 @@ class TestChangeRebaser implements ChangeRebaser { } if (lastChange !== undefined) { // The new change should apply to the context brought about by the previous change - assert.equal(over.inputContext, lastChange.outputContext); + assert.deepEqual(over.inputContext, lastChange.outputContext); } - updateIntentionSet(over.intentions, intentions); + data.intentions = TestChangeRebaser.composeIntentions(data.intentions, over.intentions); rebases.push(over); } } - public mintChangeset(inputContext: number): NonEmptyTestChangeset { - return { - inputContext, - outputContext: ++this.contextCounter, - intentions: [++this.intentionCounter], - }; - } - - public checkChangeList(changes: readonly RecursiveReadonly[], intentions?: Set): void { + public static checkChangeList(changes: readonly RecursiveReadonly[], intentions: number[]): void { const filtered = changes.filter(isNonEmptyChange); - const intentionsSeen = new Set(); - const intentionsExpected = new Set( - intentions ?? - makeArray(this.intentionCounter, (i: number) => i + 1), - ); + let intentionsSeen: number[] = []; let index = 0; for (const change of filtered) { - updateIntentionSet(change.intentions, intentionsSeen); + intentionsSeen = TestChangeRebaser.composeIntentions(intentionsSeen, change.intentions); if (index > 0) { const prev = filtered[index - 1]; // The current change should apply to the context brought about by the previous change - assert.equal(change.inputContext, prev.outputContext); + assert.deepEqual(change.inputContext, prev.outputContext); } ++index; } // All expected intentions were present - assert.deepEqual(intentionsSeen, intentionsExpected); + assert.deepEqual(intentionsSeen, intentions); } } @@ -179,25 +190,6 @@ class TestChangeEncoder extends ChangeEncoder { type TestChangeFamily = ChangeFamily; type TestEditManager = EditManager; -function updateIntentionSet( - intentions: readonly number[], - intentionsSeen: Set, -) { - for (const intention of intentions) { - if (intention > 0) { - // The same intention should never be applied multiple times - assert(!intentionsSeen.has(intention)); - intentionsSeen.add(intention); - // The intention should be part of the expected set for this client - } else if (intention < 0) { - // We are dealing with the inverse of an intention. - // In order for the inverse to apply, the non-inverse should have been applied already - assert(intentionsSeen.has(-intention)); - intentionsSeen.delete(-intention); - } - } -} - function changeFamilyFactory(): { family: ChangeFamily; rebaser: TestChangeRebaser; @@ -231,313 +223,293 @@ const peerSessionId2: SessionId = "2"; const NUM_STEPS = 5; const NUM_CLIENTS = 3; +type TestCommit = Commit; + describe("EditManager", () => { it("Can handle non-concurrent local changes being sequenced immediately", () => { const { rebaser, manager } = editManagerFactory(); - const cs1 = rebaser.mintChangeset(0); - const cs2 = rebaser.mintChangeset(cs1.outputContext); - const cs3 = rebaser.mintChangeset(cs2.outputContext); - manager.addLocalChange(cs1); - manager.addSequencedChange({ + const c1: TestCommit = { sessionId: localSessionId, seqNumber: brand(1), refNumber: brand(0), - changeset: cs1, - }); - manager.addLocalChange(cs2); - manager.addSequencedChange({ + changeset: TestChangeRebaser.mintChangeset([], 1), + }; + const c2: TestCommit = { sessionId: localSessionId, seqNumber: brand(2), refNumber: brand(1), - changeset: cs2, - }); - manager.addLocalChange(cs3); - manager.addSequencedChange({ + changeset: TestChangeRebaser.mintChangeset([1], 2), + }; + const c3: TestCommit = { sessionId: localSessionId, seqNumber: brand(3), refNumber: brand(2), - changeset: cs3, - }); - checkChangeList(manager, rebaser); + changeset: TestChangeRebaser.mintChangeset([1, 2], 3), + }; + manager.addLocalChange(c1.changeset); + manager.addSequencedChange(c1); + manager.addLocalChange(c2.changeset); + manager.addSequencedChange(c2); + manager.addLocalChange(c3.changeset); + manager.addSequencedChange(c3); + checkChangeList(manager, [1, 2, 3]); }); it("Can handle non-concurrent local changes being sequenced later", () => { const { rebaser, manager } = editManagerFactory(); - const cs1 = rebaser.mintChangeset(0); - const cs2 = rebaser.mintChangeset(cs1.outputContext); - const cs3 = rebaser.mintChangeset(cs2.outputContext); - manager.addLocalChange(cs1); - manager.addLocalChange(cs2); - manager.addLocalChange(cs3); - manager.addSequencedChange({ + const c1: TestCommit = { sessionId: localSessionId, seqNumber: brand(1), refNumber: brand(0), - changeset: cs1, - }); - manager.addSequencedChange({ + changeset: TestChangeRebaser.mintChangeset([], 1), + }; + const c2: TestCommit = { sessionId: localSessionId, seqNumber: brand(2), refNumber: brand(0), - changeset: cs2, - }); - manager.addSequencedChange({ + changeset: TestChangeRebaser.mintChangeset([1], 2), + }; + const c3: TestCommit = { sessionId: localSessionId, seqNumber: brand(3), refNumber: brand(0), - changeset: cs3, - }); - checkChangeList(manager, rebaser); + changeset: TestChangeRebaser.mintChangeset([1, 2], 3), + }; + manager.addLocalChange(c1.changeset); + manager.addLocalChange(c2.changeset); + manager.addLocalChange(c3.changeset); + manager.addSequencedChange(c1); + manager.addSequencedChange(c2); + manager.addSequencedChange(c3); + checkChangeList(manager, [1, 2, 3]); }); it("Can handle non-concurrent peer changes sequenced immediately", () => { const { rebaser, manager } = editManagerFactory(); - const cs1 = rebaser.mintChangeset(0); - const cs2 = rebaser.mintChangeset(cs1.outputContext); - const cs3 = rebaser.mintChangeset(cs2.outputContext); manager.addSequencedChange({ sessionId: peerSessionId1, seqNumber: brand(1), refNumber: brand(0), - changeset: cs1, + changeset: TestChangeRebaser.mintChangeset([], 1), }); manager.addSequencedChange({ sessionId: peerSessionId1, seqNumber: brand(2), refNumber: brand(1), - changeset: cs2, + changeset: TestChangeRebaser.mintChangeset([1], 2), }); manager.addSequencedChange({ sessionId: peerSessionId1, seqNumber: brand(3), refNumber: brand(2), - changeset: cs3, + changeset: TestChangeRebaser.mintChangeset([1, 2], 3), }); - checkChangeList(manager, rebaser); + checkChangeList(manager, [1, 2, 3]); }); it("Can handle non-concurrent peer changes sequenced later", () => { const { rebaser, manager } = editManagerFactory(); - const cs1 = rebaser.mintChangeset(0); - const cs2 = rebaser.mintChangeset(cs1.outputContext); - const cs3 = rebaser.mintChangeset(cs2.outputContext); manager.addSequencedChange({ sessionId: peerSessionId1, seqNumber: brand(1), refNumber: brand(0), - changeset: cs1, + changeset: TestChangeRebaser.mintChangeset([], 1), }); manager.addSequencedChange({ sessionId: peerSessionId1, seqNumber: brand(2), refNumber: brand(0), - changeset: cs2, + changeset: TestChangeRebaser.mintChangeset([1], 2), }); manager.addSequencedChange({ sessionId: peerSessionId1, seqNumber: brand(3), refNumber: brand(0), - changeset: cs3, + changeset: TestChangeRebaser.mintChangeset([1, 2], 3), }); - checkChangeList(manager, rebaser); + checkChangeList(manager, [1, 2, 3]); }); it("Can rebase a single peer change over multiple peer changes", () => { const { rebaser, manager } = editManagerFactory(); - const cs1 = rebaser.mintChangeset(0); - const cs2 = rebaser.mintChangeset(cs1.outputContext); - const cs3 = rebaser.mintChangeset(cs2.outputContext); - const cs4 = rebaser.mintChangeset(0); manager.addSequencedChange({ sessionId: peerSessionId1, seqNumber: brand(1), refNumber: brand(0), - changeset: cs1, + changeset: TestChangeRebaser.mintChangeset([], 1), }); manager.addSequencedChange({ sessionId: peerSessionId1, seqNumber: brand(2), refNumber: brand(1), - changeset: cs2, + changeset: TestChangeRebaser.mintChangeset([1], 2), }); manager.addSequencedChange({ sessionId: peerSessionId1, seqNumber: brand(3), refNumber: brand(2), - changeset: cs3, + changeset: TestChangeRebaser.mintChangeset([1, 2], 3), }); manager.addSequencedChange({ sessionId: peerSessionId2, seqNumber: brand(4), refNumber: brand(0), - changeset: cs4, + changeset: TestChangeRebaser.mintChangeset([], 4), }); - checkChangeList(manager, rebaser); + checkChangeList(manager, [1, 2, 3, 4]); }); it("Can rebase multiple non-interleaved peer changes", () => { const { rebaser, manager } = editManagerFactory(); - const cs1 = rebaser.mintChangeset(0); - const cs2 = rebaser.mintChangeset(cs1.outputContext); - const cs3 = rebaser.mintChangeset(cs2.outputContext); - const cs4 = rebaser.mintChangeset(0); - const cs5 = rebaser.mintChangeset(cs4.outputContext); - const cs6 = rebaser.mintChangeset(cs5.outputContext); manager.addSequencedChange({ sessionId: peerSessionId1, seqNumber: brand(1), refNumber: brand(0), - changeset: cs1, + changeset: TestChangeRebaser.mintChangeset([], 1), }); manager.addSequencedChange({ sessionId: peerSessionId1, seqNumber: brand(2), refNumber: brand(1), - changeset: cs2, + changeset: TestChangeRebaser.mintChangeset([1], 2), }); manager.addSequencedChange({ sessionId: peerSessionId1, seqNumber: brand(3), refNumber: brand(2), - changeset: cs3, + changeset: TestChangeRebaser.mintChangeset([1, 2], 3), }); manager.addSequencedChange({ sessionId: peerSessionId2, seqNumber: brand(4), refNumber: brand(0), - changeset: cs4, + changeset: TestChangeRebaser.mintChangeset([], 4), }); manager.addSequencedChange({ sessionId: peerSessionId2, seqNumber: brand(5), refNumber: brand(0), - changeset: cs5, + changeset: TestChangeRebaser.mintChangeset([4], 5), }); manager.addSequencedChange({ sessionId: peerSessionId2, seqNumber: brand(6), refNumber: brand(0), - changeset: cs6, + changeset: TestChangeRebaser.mintChangeset([4, 5], 6), }); - checkChangeList(manager, rebaser); + checkChangeList(manager, [1, 2, 3, 4, 5, 6]); }); it("Can rebase multiple interleaved peer changes", () => { const { rebaser, manager } = editManagerFactory(); - const cs1 = rebaser.mintChangeset(0); - const cs2 = rebaser.mintChangeset(cs1.outputContext); - const cs3 = rebaser.mintChangeset(cs2.outputContext); - const cs4 = rebaser.mintChangeset(0); - const cs5 = rebaser.mintChangeset(cs4.outputContext); - const cs6 = rebaser.mintChangeset(cs5.outputContext); manager.addSequencedChange({ sessionId: peerSessionId1, seqNumber: brand(1), refNumber: brand(0), - changeset: cs1, + changeset: TestChangeRebaser.mintChangeset([], 1), }); manager.addSequencedChange({ sessionId: peerSessionId2, seqNumber: brand(2), refNumber: brand(0), - changeset: cs4, + changeset: TestChangeRebaser.mintChangeset([], 2), }); manager.addSequencedChange({ sessionId: peerSessionId1, seqNumber: brand(3), refNumber: brand(1), - changeset: cs2, + changeset: TestChangeRebaser.mintChangeset([1], 3), }); manager.addSequencedChange({ sessionId: peerSessionId1, seqNumber: brand(4), refNumber: brand(2), - changeset: cs3, + changeset: TestChangeRebaser.mintChangeset([1, 2, 3], 4), }); manager.addSequencedChange({ sessionId: peerSessionId2, seqNumber: brand(5), refNumber: brand(0), - changeset: cs5, + changeset: TestChangeRebaser.mintChangeset([2], 5), }); manager.addSequencedChange({ sessionId: peerSessionId2, seqNumber: brand(6), refNumber: brand(0), - changeset: cs6, + changeset: TestChangeRebaser.mintChangeset([2, 5], 6), }); - checkChangeList(manager, rebaser); + checkChangeList(manager, [1, 2, 3, 4, 5, 6]); }); it("Can rebase multiple interleaved peer and local changes", () => { const { rebaser, manager } = editManagerFactory(); - const cs1 = rebaser.mintChangeset(0); - const cs2 = rebaser.mintChangeset(cs1.outputContext); - const cs3 = rebaser.mintChangeset(cs2.outputContext); - const cs4 = rebaser.mintChangeset(0); - const cs5 = rebaser.mintChangeset(cs4.outputContext); - const cs6 = rebaser.mintChangeset(cs5.outputContext); - const cs7 = rebaser.mintChangeset(0); - manager.addLocalChange(cs7); - manager.addSequencedChange({ + const c1: TestCommit = { sessionId: peerSessionId1, seqNumber: brand(1), refNumber: brand(0), - changeset: cs1, - }); - manager.addSequencedChange({ + changeset: TestChangeRebaser.mintChangeset([], 1), + }; + const c2: TestCommit = { sessionId: peerSessionId2, seqNumber: brand(2), refNumber: brand(0), - changeset: cs4, - }); - const cs8 = rebaser.mintChangeset(getTipContext(manager)); - manager.addLocalChange(cs8); - const cs9 = rebaser.mintChangeset(getTipContext(manager)); - manager.addLocalChange(cs9); - manager.addSequencedChange({ + changeset: TestChangeRebaser.mintChangeset([], 2), + }; + const c3: TestCommit = { sessionId: localSessionId, seqNumber: brand(3), refNumber: brand(0), - changeset: cs7, - }); - manager.addSequencedChange({ + changeset: TestChangeRebaser.mintChangeset([], 3), + }; + const c4: TestCommit = { sessionId: peerSessionId1, seqNumber: brand(4), refNumber: brand(1), - changeset: cs2, - }); - manager.addSequencedChange({ + changeset: TestChangeRebaser.mintChangeset([1], 4), + }; + const c5: TestCommit = { sessionId: peerSessionId1, seqNumber: brand(5), refNumber: brand(2), - changeset: cs3, - }); - manager.addSequencedChange({ + changeset: TestChangeRebaser.mintChangeset([1, 2, 4], 5), + }; + const c6: TestCommit = { sessionId: localSessionId, seqNumber: brand(6), refNumber: brand(2), - changeset: cs8, - }); - manager.addSequencedChange({ + changeset: TestChangeRebaser.mintChangeset([1, 2, 3], 6), + }; + const c7: TestCommit = { sessionId: peerSessionId2, seqNumber: brand(7), refNumber: brand(0), - changeset: cs5, - }); - manager.addSequencedChange({ + changeset: TestChangeRebaser.mintChangeset([2], 7), + }; + const c8: TestCommit = { sessionId: localSessionId, seqNumber: brand(8), refNumber: brand(2), - changeset: cs9, - }); - manager.addSequencedChange({ + changeset: TestChangeRebaser.mintChangeset([1, 2, 3, 6], 8), + }; + const c9: TestCommit = { sessionId: peerSessionId2, seqNumber: brand(9), refNumber: brand(0), - changeset: cs6, - }); - checkChangeList(manager, rebaser); + changeset: TestChangeRebaser.mintChangeset([2, 7], 9), + }; + manager.addLocalChange(c3.changeset); + manager.addSequencedChange(c1); + manager.addSequencedChange(c2); + manager.addLocalChange(c6.changeset); + manager.addLocalChange(c8.changeset); + manager.addSequencedChange(c3); + manager.addSequencedChange(c4); + manager.addSequencedChange(c5); + manager.addSequencedChange(c6); + manager.addSequencedChange(c7); + manager.addSequencedChange(c8); + manager.addSequencedChange(c9); + checkChangeList(manager, [1, 2, 3, 4, 5, 6, 7, 8, 9]); }); /** @@ -555,6 +527,9 @@ describe("EditManager", () => { seq: 0, }; for (const scenario of buildScenario([], meta)) { + // Uncomment the lines below to see which scenario fails first. + // const name = scenario.map((step) => `${step.type}${step.client}`).join("-"); + // console.debug(name); runScenario(scenario); } }); @@ -573,8 +548,8 @@ type ScenarioStep = * State needed by the scenario builder. */ interface ScenarioBuilderState { - clientData: { pulled: SeqNumber; numLocal: number; }[]; - seq: SeqNumber; + clientData: { pulled: number; numLocal: number; }[]; + seq: number; } function* buildScenario( @@ -630,29 +605,29 @@ function* buildScenario( interface ClientData { manager: TestEditManager; /** The local changes in their original form */ - localChanges: { change: TestChangeset; ref: SeqNumber; }[]; + localChanges: { change: TestChangeset; ref: number; }[]; /** The last sequence number received by the client */ - ref: SeqNumber; + ref: number; /** Intentions that the client should be aware of */ - intentions: Set; + intentions: number[]; } function runScenario(scenario: readonly ScenarioStep[]): void { - const name = scenario.map((step) => `${step.type}${step.client}`).join("-"); const { rebaser, family } = changeFamilyFactory(); const trunk: Commit[] = []; const clientData: ClientData[] = makeArray(NUM_CLIENTS, (iClient) => newClientData(family, iClient)); + let changeCounter = 0; for (const step of scenario) { // Perform the step { const client = clientData[step.client]; if (step.type === "Mint") { - const cs = rebaser.mintChangeset(getTipContext(client.manager)); + const cs = TestChangeRebaser.mintChangeset(getTipContext(client.manager), ++changeCounter); client.manager.addLocalChange(cs); client.localChanges.push({ change: cs, ref: client.ref }); - cs.intentions.forEach((intention) => client.intentions.add(intention)); + cs.intentions.forEach((intention) => client.intentions.push(intention)); } else if (step.type === "Sequence") { - const local = client.localChanges.shift() ?? fail("No local changes to sequence"); + const local = client.localChanges[0] ?? fail("No local changes to sequence"); trunk.push({ changeset: local.change, refNumber: brand(local.ref), @@ -662,18 +637,30 @@ function runScenario(scenario: readonly ScenarioStep[]): void { } else { // step.type === "Receive" const commit = trunk[client.ref]; client.manager.addSequencedChange(commit); - commit.changeset.intentions.forEach((intention) => client.intentions.add(intention)); + // If the change came from this client + if (commit.sessionId === step.client.toString()) { + // Discard the local change + client.localChanges.shift(); + // Do not update the intentions + } else { + // Update the intentions known to this client + client.intentions.splice( + client.intentions.length - client.localChanges.length, + 0, + ...commit.changeset.intentions, + ); + } client.ref += 1; } } // Check the validity of the managers for (const client of clientData) { - checkChangeList(client.manager, rebaser, client.intentions); + checkChangeList(client.manager, client.intentions); const intentionsThatAnchorsWereRebasedOver = // eslint-disable-next-line @typescript-eslint/no-non-null-assertion rebaser.anchorRebases.get(client.manager.anchors!)?.intentions; // Check the anchors have been updated if applicable - assert.deepEqual(intentionsThatAnchorsWereRebasedOver ?? new Set(), client.intentions); + assert.deepEqual(intentionsThatAnchorsWereRebasedOver ?? [], client.intentions); } } } @@ -685,25 +672,25 @@ function newClientData(family: TestChangeFamily, iClient: number): ClientData { manager, localChanges: [], ref: 0, - intentions: new Set(), + intentions: [], }; } -function checkChangeList(manager: TestEditManager, rebaser: TestChangeRebaser, intentions?: Set): void { - rebaser.checkChangeList(getAllChanges(manager), intentions); +function checkChangeList(manager: TestEditManager, intentions: number[]): void { + TestChangeRebaser.checkChangeList(getAllChanges(manager), intentions); } function getAllChanges(manager: TestEditManager): RecursiveReadonly[] { return manager.getTrunk().map((c) => c.changeset).concat(manager.getLocalChanges()); } -function getTipContext(manager: TestEditManager): number { +function getTipContext(manager: TestEditManager): number[] { const changes = getAllChanges(manager); for (let i = changes.length - 1; i >= 0; --i) { const change = changes[i]; if (isNonEmptyChange(change)) { - return change.outputContext; + return [...change.outputContext]; } } - return 0; + return []; } diff --git a/packages/dds/tree/src/test/sequence-change-family/cases.ts b/packages/dds/tree/src/test/sequence-change-family/cases.ts new file mode 100644 index 000000000000..0924200eeaf9 --- /dev/null +++ b/packages/dds/tree/src/test/sequence-change-family/cases.ts @@ -0,0 +1,112 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +import { SequenceChangeset } from "../../feature-libraries"; +import { brand } from "../../util"; +import { Transposed as T, Value } from "../../changeset"; +import { TreeSchemaIdentifier } from "../../schema-stored"; + +export function setRootValueTo(value: Value): SequenceChangeset { + return { + marks: { + root: [{ + type: "Modify", + value: { id: 0, value }, + }], + }, + }; +} + +export function setChildValueTo(value: Value): SequenceChangeset { + return { + marks: { + root: [{ + type: "Modify", + fields: { + foo: [ + 42, + { + type: "Modify", + value: { id: 0, value }, + }, + ], + }, + }], + }, + }; +} + +const type: TreeSchemaIdentifier = brand("Node"); +const tomb = "Dummy Changeset Tag"; + +export const cases: { + no_change: SequenceChangeset; + set_root_value: SequenceChangeset; + set_child_value: SequenceChangeset; + insert: SequenceChangeset; + modify: SequenceChangeset; + modify_insert: SequenceChangeset; + delete: SequenceChangeset; + revive: SequenceChangeset; +} = { + no_change: { + marks: {}, + }, + set_root_value: setRootValueTo(42), + set_child_value: setRootValueTo(42), + insert: { + marks: { + root: [ + 1, + [{ type: "Insert", id: 1, content: [{ type, value: 1 }, { type, value: 2 }] }], + ], + }, + }, + modify: { + marks: { + root: [{ + type: "Modify", + fields: { + foo: [ + [{ type: "Insert", id: 2, content: [{ type, value: 2 }] }], + ], + }, + }], + }, + }, + modify_insert: { + marks: { + root: [ + 1, + [{ + type: "MInsert", + id: 1, + content: { type, value: 1 }, + fields: { + foo: [ + [{ type: "Insert", id: 2, content: [{ type, value: 2 }] }], + ], + }, + }], + ], + }, + }, + delete: { + marks: { + root: [ + 1, + { type: "Delete", id: 1, count: 3 }, + ], + }, + }, + revive: { + marks: { + root: [ + 2, + { type: "Revive", id: 1, count: 2, tomb }, + ], + }, + }, +}; diff --git a/packages/dds/tree/src/test/sequence-change-family/compose.spec.ts b/packages/dds/tree/src/test/sequence-change-family/compose.spec.ts new file mode 100644 index 000000000000..0e3e826542c1 --- /dev/null +++ b/packages/dds/tree/src/test/sequence-change-family/compose.spec.ts @@ -0,0 +1,848 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +import { strict as assert } from "assert"; +import { sequenceChangeRebaser, SequenceChangeset } from "../../feature-libraries"; +import { TreeSchemaIdentifier } from "../../schema-stored"; +import { brand } from "../../util"; +import { deepFreeze } from "../utils"; +import { cases, setChildValueTo, setRootValueTo } from "./cases"; + +const type: TreeSchemaIdentifier = brand("Node"); +const tomb = "Dummy Changeset Tag"; + +function compose(changes: SequenceChangeset[]): SequenceChangeset { + changes.forEach(deepFreeze); + return sequenceChangeRebaser.compose(changes); +} + +describe("SequenceChangeFamily - Compose", () => { + describe("associativity of triplets", () => { + const changes = Object.entries(cases); + for (const a of changes) { + for (const b of changes) { + for (const c of changes) { + it(`((${a[0]}, ${b[0]}), ${c[0]}) === (${a[0]}, (${b[0]}, ${c[0]}))`, () => { + const ab = compose([a[1], b[1]]); + const left = compose([ab, c[1]]); + const bc = compose([b[1], c[1]]); + const right = compose([a[1], bc]); + assert.deepEqual(left, right); + }); + } + } + } + }); + + it("no changes", () => { + const actual = compose([]); + assert.deepEqual(actual, cases.no_change); + }); + + it("Does not leave empty mark lists and fields", () => { + const insertion: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 1 }] }], + ], + }, + }; + const deletion: SequenceChangeset = { + marks: { + root: [ + { type: "Delete", id: 2, count: 1 }, + ], + }, + }; + const actual = compose([insertion, deletion]); + assert.deepEqual(actual, cases.no_change); + }); + + it("Does not leave empty modify marks", () => { + const insertion: SequenceChangeset = { + marks: { + root: [ + { + type: "Modify", + fields: { + foo: [[{ type: "Insert", id: 1, content: [{ type, value: 1 }] }]], + }, + }, + ], + }, + }; + const deletion: SequenceChangeset = { + marks: { + root: [ + { + type: "Modify", + fields: { + foo: [{ type: "Delete", id: 2, count: 1 }], + }, + }, + ], + }, + }; + const actual = compose([insertion, deletion]); + assert.deepEqual(actual, cases.no_change); + }); + + it("set root ○ set root", () => { + const set1 = setRootValueTo(1); + const set2 = setRootValueTo(2); + const actual = compose([set1, set2]); + assert.deepEqual(actual, set2); + }); + + it("set root ○ set child", () => { + const set1 = setRootValueTo(1); + const set2 = setChildValueTo(2); + const actual = compose([set1, set2]); + const expected: SequenceChangeset = { + marks: { + root: [{ + type: "Modify", + value: { id: 0, value: 1 }, + fields: { + foo: [ + 42, + { + type: "Modify", + value: { id: 0, value: 2 }, + }, + ], + }, + }], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("set child ○ set root", () => { + const set1 = setChildValueTo(1); + const set2 = setRootValueTo(2); + const actual = compose([set1, set2]); + const expected: SequenceChangeset = { + marks: { + root: [{ + type: "Modify", + value: { id: 0, value: 2 }, + fields: { + foo: [ + 42, + { + type: "Modify", + value: { id: 0, value: 1 }, + }, + ], + }, + }], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("set child ○ set child", () => { + const set1 = setChildValueTo(1); + const set2 = setChildValueTo(2); + const actual = compose([set1, set2]); + assert.deepEqual(actual, set2); + }); + + it("insert ○ modify", () => { + const insert: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 1 }, { type, value: 2 }] }], + ], + }, + }; + const modify: SequenceChangeset = { + marks: { + root: [{ + type: "Modify", + fields: { + foo: [ + [{ type: "Insert", id: 2, content: [{ type, value: 42 }] }], + ], + }, + }], + }, + }; + const expected: SequenceChangeset = { + marks: { + root: [[ + { + type: "MInsert", + id: 1, + content: { type, value: 1 }, + fields: { + foo: [ + [{ type: "Insert", id: 2, content: [{ type, value: 42 }] }], + ], + }, + }, + { type: "Insert", id: 1, content: [{ type, value: 2 }] }, + ]], + }, + }; + const actual = compose([insert, modify]); + assert.deepEqual(actual, expected); + }); + + it("modify insert ○ modify", () => { + const insert: SequenceChangeset = { + marks: { + root: [ + [{ + type: "MInsert", + id: 1, + content: { type, value: 1 }, + fields: { + foo: [ + [{ type: "Insert", id: 2, content: [{ type, value: 2 }] }], + ], + }, + }], + ], + }, + }; + const modify: SequenceChangeset = { + marks: { + root: [{ + type: "Modify", + fields: { + bar: [ + [{ type: "Insert", id: 3, content: [{ type, value: 3 }] }], + ], + }, + }], + }, + }; + const expected: SequenceChangeset = { + marks: { + root: [ + [{ + type: "MInsert", + id: 1, + content: { type, value: 1 }, + fields: { + foo: [ + [{ type: "Insert", id: 2, content: [{ type, value: 2 }] }], + ], + bar: [ + [{ type: "Insert", id: 3, content: [{ type, value: 3 }] }], + ], + }, + }], + ], + }, + }; + const actual = compose([insert, modify]); + assert.deepEqual(actual, expected); + }); + + it("delete ○ modify", () => { + const deletion: SequenceChangeset = { + marks: { + root: [ + { type: "Delete", id: 1, count: 3 }, + ], + }, + }; + const modify: SequenceChangeset = { + marks: { + root: [{ + type: "Modify", + fields: { + foo: [ + [{ type: "Insert", id: 2, content: [{ type, value: 2 }] }], + ], + }, + }], + }, + }; + const expected: SequenceChangeset = { + marks: { + root: [ + { type: "Delete", id: 1, count: 3 }, + { + type: "Modify", + fields: { + foo: [ + [{ type: "Insert", id: 2, content: [{ type, value: 2 }] }], + ], + }, + }, + ], + }, + }; + const actual = compose([deletion, modify]); + assert.deepEqual(actual, expected); + }); + + it("revive ○ modify", () => { + const revive: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 1, count: 3, tomb }, + ], + }, + }; + const modify: SequenceChangeset = { + marks: { + root: [{ + type: "Modify", + fields: { + foo: [ + [{ type: "Insert", id: 2, content: [{ type, value: 2 }] }], + ], + }, + }], + }, + }; + const expected: SequenceChangeset = { + marks: { + root: [ + { + type: "MRevive", + id: 1, + tomb, + fields: { + foo: [ + [{ type: "Insert", id: 2, content: [{ type, value: 2 }] }], + ], + }, + }, + { type: "Revive", id: 1, count: 2, tomb }, + ], + }, + }; + const actual = compose([revive, modify]); + assert.deepEqual(actual, expected); + }); + + it("modify ○ modify", () => { + const modifyA: SequenceChangeset = { + marks: { + root: [{ + type: "Modify", + fields: { + foo: [ + [{ type: "Insert", id: 1, content: [{ type, value: 1 }] }], + ], + bar: [ + { type: "Delete", id: 2, count: 1 }, + ], + }, + }], + }, + }; + const modifyB: SequenceChangeset = { + marks: { + root: [{ + type: "Modify", + fields: { + bar: [ + 1, + [{ type: "Insert", id: 3, content: [{ type, value: 3 }] }], + ], + baz: [ + { type: "Delete", id: 4, count: 1 }, + ], + }, + }], + }, + }; + const actual = compose([modifyA, modifyB]); + const expected: SequenceChangeset = { + marks: { + root: [{ + type: "Modify", + fields: { + foo: [ + [{ type: "Insert", id: 1, content: [{ type, value: 1 }] }], + ], + bar: [ + { type: "Delete", id: 2, count: 1 }, + 1, + [{ type: "Insert", id: 3, content: [{ type, value: 3 }] }], + ], + baz: [ + { type: "Delete", id: 4, count: 1 }, + ], + }, + }], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("set ○ delete", () => { + const set = setRootValueTo(1); + // Deletes ABCD--GHIJK + const deletion: SequenceChangeset = { + marks: { + root: [ + { type: "Delete", id: 3, count: 1 }, + ], + }, + }; + const actual = compose([set, deletion]); + assert.deepEqual(actual, deletion); + }); + + it("insert ○ delete (within insert)", () => { + const insert: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [ + { type, value: 1 }, + { type, value: 2 }, + { type, value: 3 }, + ] }], + ], + }, + }; + const deletion: SequenceChangeset = { + marks: { + root: [ + 1, + { type: "Delete", id: 2, count: 1 }, + ], + }, + }; + const actual = compose([insert, deletion]); + const expected: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [ + { type, value: 1 }, + { type, value: 3 }, + ] }], + ], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("insert ○ delete (across inserts)", () => { + const insert: SequenceChangeset = { + marks: { + root: [[ + { type: "Insert", id: 1, content: [ + { type, value: 1 }, + { type, value: 2 }, + ] }, + { type: "Insert", id: 2, content: [ + { type, value: 3 }, + { type, value: 4 }, + ] }, + { type: "Insert", id: 3, content: [ + { type, value: 5 }, + { type, value: 6 }, + ] }, + ]], + }, + }; + const deletion: SequenceChangeset = { + marks: { + root: [ + 1, + { type: "Delete", id: 2, count: 4 }, + ], + }, + }; + const actual = compose([insert, deletion]); + const expected: SequenceChangeset = { + marks: { + root: [[ + { type: "Insert", id: 1, content: [ + { type, value: 1 }, + ] }, + { type: "Insert", id: 3, content: [ + { type, value: 6 }, + ] }, + ]], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("modify ○ delete", () => { + const modify: SequenceChangeset = setChildValueTo(1); + const deletion: SequenceChangeset = { + marks: { + root: [ + { type: "Delete", id: 2, count: 1 }, + ], + }, + }; + const actual = compose([modify, deletion]); + assert.deepEqual(actual, deletion); + }); + + it("delete ○ delete", () => { + // Deletes ABC-----IJKLM + const deleteA: SequenceChangeset = { + marks: { + root: [ + { type: "Delete", id: 1, count: 3 }, + 5, + { type: "Delete", id: 2, count: 5 }, + ], + }, + }; + // Deletes DEFG--OP + const deleteB: SequenceChangeset = { + marks: { + root: [ + { type: "Delete", id: 3, count: 4 }, + 2, + { type: "Delete", id: 4, count: 2 }, + ], + }, + }; + const actual = compose([deleteA, deleteB]); + // Deletes ABCDEFG-IJKLMNOP + const expected: SequenceChangeset = { + marks: { + root: [ + { type: "Delete", id: 1, count: 3 }, + { type: "Delete", id: 3, count: 4 }, + 1, + { type: "Delete", id: 2, count: 5 }, + 1, + { type: "Delete", id: 4, count: 2 }, + ], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("revive ○ delete", () => { + const revive: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 1, count: 5, tomb }, + ], + }, + }; + const deletion: SequenceChangeset = { + marks: { + root: [ + 1, + { type: "Delete", id: 3, count: 1 }, + 1, + { type: "Delete", id: 4, count: 3 }, + ], + }, + }; + const actual = compose([revive, deletion]); + const expected: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 1, count: 2, tomb }, + { type: "Delete", id: 4, count: 1 }, + ], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("set ○ insert", () => { + const set = setRootValueTo(1); + const insert: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 2 }] }], + ], + }, + }; + const expected: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 2 }] }], + { type: "Modify", value: { id: 0, value: 1 } }, + ], + }, + }; + const actual = compose([set, insert]); + assert.deepEqual(actual, expected); + }); + + it("modify ○ insert", () => { + const modify: SequenceChangeset = setChildValueTo(1); + const insert: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 2 }] }], + ], + }, + }; + const expected: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 2 }] }], + { + type: "Modify", + fields: { + foo: [ + 42, + { + type: "Modify", + value: { id: 0, value: 1 }, + }, + ], + }, + }, + ], + }, + }; + const actual = compose([modify, insert]); + assert.deepEqual(actual, expected); + }); + + it("delete ○ insert", () => { + const deletion: SequenceChangeset = { + marks: { + root: [ + { type: "Delete", id: 1, count: 3 }, + ], + }, + }; + const insert: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 2 }] }], + ], + }, + }; + // TODO: test with merge-right policy as well + const expected: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 2 }] }], + { type: "Delete", id: 1, count: 3 }, + ], + }, + }; + const actual = compose([deletion, insert]); + assert.deepEqual(actual, expected); + }); + + it("revive ○ insert", () => { + const deletion: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 1, count: 5, tomb }, + ], + }, + }; + const insert: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 2 }] }], + ], + }, + }; + // TODO: test with merge-right policy as well + const expected: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 2 }] }], + { type: "Revive", id: 1, count: 5, tomb }, + ], + }, + }; + const actual = compose([deletion, insert]); + assert.deepEqual(actual, expected); + }); + + it("insert ○ insert", () => { + const insertA: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 1 }] }], + 2, + [{ type: "Insert", id: 2, content: [{ type, value: 2 }, { type, value: 3 }] }], + ], + }, + }; + const insertB: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 3, content: [{ type, value: 3 }] }], + 4, + [{ type: "Insert", id: 4, content: [{ type, value: 4 }] }], + ], + }, + }; + const actual = compose([insertA, insertB]); + const expected: SequenceChangeset = { + marks: { + root: [ + [ + { type: "Insert", id: 3, content: [{ type, value: 3 }] }, + { type: "Insert", id: 1, content: [{ type, value: 1 }] }, + ], + 2, + [ + { type: "Insert", id: 2, content: [{ type, value: 2 }] }, + { type: "Insert", id: 4, content: [{ type, value: 4 }] }, + { type: "Insert", id: 2, content: [{ type, value: 3 }] }, + ], + ], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("set ○ revive", () => { + const set = setRootValueTo(1); + const revive: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 1, count: 2, tomb }, + ], + }, + }; + const expected: SequenceChangeset = { + marks: { + root: [ + // TODO: test Tiebreak policy + { type: "Revive", id: 1, count: 2, tomb }, + { type: "Modify", value: { id: 0, value: 1 } }, + ], + }, + }; + const actual = compose([set, revive]); + assert.deepEqual(actual, expected); + }); + + it("modify ○ revive", () => { + const modify: SequenceChangeset = setChildValueTo(1); + const revive: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 1, count: 2, tomb }, + ], + }, + }; + const expected: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 1, count: 2, tomb }, + { + type: "Modify", + fields: { + foo: [ + 42, + { + type: "Modify", + value: { id: 0, value: 1 }, + }, + ], + }, + }, + ], + }, + }; + const actual = compose([modify, revive]); + assert.deepEqual(actual, expected); + }); + + it("delete ○ revive", () => { + const deletion: SequenceChangeset = { + marks: { + root: [ + { type: "Delete", id: 1, count: 3 }, + ], + }, + }; + const revive: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 1, count: 2, tomb }, + ], + }, + }; + // TODO: test with merge-right policy as well + // TODO: test revive of deleted content + const expected: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 1, count: 2, tomb }, + { type: "Delete", id: 1, count: 3 }, + ], + }, + }; + const actual = compose([deletion, revive]); + assert.deepEqual(actual, expected); + }); + + it("revive ○ revive", () => { + const reviveA: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 1, count: 2, tomb }, + ], + }, + }; + const reviveB: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 2, count: 3, tomb }, + ], + }, + }; + // TODO: test with merge-right policy as well + const expected: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 2, count: 3, tomb }, + { type: "Revive", id: 1, count: 2, tomb }, + ], + }, + }; + const actual = compose([reviveA, reviveB]); + assert.deepEqual(actual, expected); + }); + + it("insert ○ revive", () => { + const insert: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 1 }] }], + 2, + [{ type: "Insert", id: 2, content: [{ type, value: 2 }, { type, value: 3 }] }], + ], + }, + }; + const revive: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 3, count: 1, tomb }, + 4, + { type: "Revive", id: 4, count: 1, tomb }, + ], + }, + }; + const actual = compose([insert, revive]); + const expected: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 3, count: 1, tomb }, + [{ type: "Insert", id: 1, content: [{ type, value: 1 }] }], + 2, + [{ type: "Insert", id: 2, content: [{ type, value: 2 }] }], + { type: "Revive", id: 4, count: 1, tomb }, + [{ type: "Insert", id: 2, content: [{ type, value: 3 }] }], + ], + }, + }; + assert.deepEqual(actual, expected); + }); +}); diff --git a/packages/dds/tree/src/test/sequence-change-family/invert.spec.ts b/packages/dds/tree/src/test/sequence-change-family/invert.spec.ts new file mode 100644 index 000000000000..85e08059eeb2 --- /dev/null +++ b/packages/dds/tree/src/test/sequence-change-family/invert.spec.ts @@ -0,0 +1,132 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +import { strict as assert } from "assert"; +import { Transposed as T } from "../../changeset"; +import { + DUMMY_INVERSE_VALUE, + DUMMY_INVERT_TAG, + sequenceChangeRebaser, + SequenceChangeset, +} from "../../feature-libraries"; +import { TreeSchemaIdentifier } from "../../schema-stored"; +import { brand } from "../../util"; +import { deepFreeze } from "../utils"; + +const type: TreeSchemaIdentifier = brand("Node"); + +function invert(change: SequenceChangeset): SequenceChangeset { + deepFreeze(change); + return sequenceChangeRebaser.invert(change); +} + +describe("SequenceChangeFamily - Invert", () => { + for (const nest of [false, true]) { + describe(nest ? "Nested" : "Root", () => { + function asForest(markList: T.MarkList): SequenceChangeset { + return { + marks: { root: nest ? [{ type: "Modify", fields: { foo: markList } }] : markList }, + }; + } + + it("no changes", () => { + const input = asForest([]); + const expected = asForest([]); + const actual = invert(input); + assert.deepEqual(actual, expected); + }); + + it("set value => set value", () => { + const input = asForest([ + { type: "Modify", value: { id: 1, value: 42 } }, + ]); + const expected = asForest([ + { type: "Modify", value: { id: 1, value: DUMMY_INVERSE_VALUE } }, + ]); + const actual = invert(input); + assert.deepEqual(actual, expected); + }); + + it("insert => delete", () => { + const input = asForest([ + [{ + type: "Insert", + id: 1, + content: [{ type, value: 42 }, { type, value: 43 }], + }], + ]); + const expected = asForest([ + { + type: "Delete", + id: 1, + count: 2, + }, + ]); + const actual = invert(input); + assert.deepEqual(actual, expected); + }); + + it("modified insert => delete", () => { + const input = asForest([ + [{ + type: "MInsert", + id: 1, + content: { type, value: 42 }, + fields: { foo: [{ type: "Modify", value: { id: 1, value: 42 } }] }, + }], + ]); + const expected = asForest([ + { + type: "Delete", + id: 1, + count: 1, + }, + ]); + const actual = invert(input); + assert.deepEqual(actual, expected); + }); + + it("delete => revive", () => { + const input = asForest([ + { + type: "Delete", + id: 1, + count: 2, + }, + ]); + const expected = asForest([ + { + type: "Revive", + id: 1, + count: 2, + tomb: DUMMY_INVERT_TAG, + }, + ]); + const actual = invert(input); + assert.deepEqual(actual, expected); + }); + + it("revive => delete", () => { + const input = asForest([ + { + type: "Revive", + id: 1, + count: 2, + tomb: DUMMY_INVERT_TAG, + }, + ]); + const expected = asForest([ + { + type: "Delete", + id: 1, + count: 2, + }, + ]); + const actual = invert(input); + assert.deepEqual(actual, expected); + }); + }); + } +}); diff --git a/packages/dds/tree/src/test/sequence-change-family/markListFactory.spec.ts b/packages/dds/tree/src/test/sequence-change-family/markListFactory.spec.ts new file mode 100644 index 000000000000..6d3e62b31502 --- /dev/null +++ b/packages/dds/tree/src/test/sequence-change-family/markListFactory.spec.ts @@ -0,0 +1,143 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +import { strict as assert } from "assert"; +import { MarkListFactory, Transposed as T } from "../../changeset"; +import { TreeSchemaIdentifier } from "../../schema-stored"; +import { brand } from "../../util"; + +const dummyMark: T.AttachGroup = []; +const bounce1: T.Bounce = { type: "Bounce", id: 1 }; +const bounce2: T.Bounce = { type: "Bounce", id: 1 }; +const type: TreeSchemaIdentifier = brand("Node"); + +describe("MarkListFactory", () => { + it("Inserts an offset when there is content after the offset", () => { + const factory = new MarkListFactory(); + factory.pushOffset(42); + factory.pushContent(dummyMark); + assert.deepStrictEqual(factory.list, [42, dummyMark]); + }); + + it("Does not insert 0-length offsets", () => { + const factory = new MarkListFactory(); + factory.pushOffset(0); + factory.pushContent(dummyMark); + assert.deepStrictEqual(factory.list, [dummyMark]); + }); + + it("Merges runs of offsets into a single offset", () => { + const factory = new MarkListFactory(); + factory.pushOffset(42); + factory.pushOffset(42); + factory.pushContent(dummyMark); + assert.deepStrictEqual(factory.list, [84, dummyMark]); + }); + + it("Does not insert an offset when there is no content after the offset", () => { + const factory = new MarkListFactory(); + factory.pushContent(dummyMark); + factory.pushOffset(42); + factory.pushOffset(42); + assert.deepStrictEqual(factory.list, [dummyMark]); + }); + + it("Can merge consecutive attach groups", () => { + const factory = new MarkListFactory(); + const insert: T.Insert = { type: "Insert", id: 0, content: [] }; + const bounce: T.Bounce = { type: "Bounce", id: 1 }; + factory.pushContent([insert]); + factory.pushContent([bounce]); + assert.deepStrictEqual(factory.list, [[insert, bounce]]); + }); + + it("Can merge consecutive inserts", () => { + const factory = new MarkListFactory(); + const insert1: T.Insert = { type: "Insert", id: 0, content: [{ type, value: 1 }] }; + const insert2: T.Insert = { type: "Insert", id: 0, content: [{ type, value: 2 }] }; + factory.pushContent([bounce1, insert1]); + factory.pushContent([insert2, bounce2]); + assert.deepStrictEqual(factory.list, [[ + bounce1, + { + type: "Insert", + id: 0, + content: [{ type, value: 1 }, { type, value: 2 }], + }, + bounce2, + ]]); + }); + + it("Can merge consecutive move-ins", () => { + const factory = new MarkListFactory(); + const move1: T.MoveIn = { type: "MoveIn", id: 0, count: 1 }; + const move2: T.MoveIn = { type: "MoveIn", id: 0, count: 1 }; + factory.pushContent([bounce1, move1]); + factory.pushContent([move2, bounce2]); + assert.deepStrictEqual(factory.list, [[ + bounce1, + { + type: "MoveIn", + id: 0, + count: 2, + }, + bounce2, + ]]); + }); + + it("Can merge consecutive deletes", () => { + const factory = new MarkListFactory(); + const delete1: T.Detach = { type: "Delete", id: 0, count: 1 }; + const delete2: T.Detach = { type: "Delete", id: 0, count: 1 }; + factory.pushContent(delete1); + factory.pushContent(delete2); + assert.deepStrictEqual(factory.list, [{ type: "Delete", id: 0, count: 2 }]); + }); + + it("Can merge consecutive move-outs", () => { + const factory = new MarkListFactory(); + const move1: T.Detach = { type: "MoveOut", id: 0, count: 1 }; + const move2: T.Detach = { type: "MoveOut", id: 0, count: 1 }; + factory.pushContent(move1); + factory.pushContent(move2); + assert.deepStrictEqual(factory.list, [{ type: "MoveOut", id: 0, count: 2 }]); + }); + + it("Can merge consecutive revives", () => { + const factory = new MarkListFactory(); + const revive1: T.Reattach = { type: "Revive", id: 0, tomb: 42, count: 1 }; + const revive2: T.Reattach = { type: "Revive", id: 0, tomb: 42, count: 1 }; + factory.pushContent(revive1); + factory.pushContent(revive2); + assert.deepStrictEqual(factory.list, [{ type: "Revive", id: 0, tomb: 42, count: 2 }]); + }); + + it("Can merge consecutive returns", () => { + const factory = new MarkListFactory(); + const return1: T.Reattach = { type: "Return", id: 0, tomb: 42, count: 1 }; + const return2: T.Reattach = { type: "Return", id: 0, tomb: 42, count: 1 }; + factory.pushContent(return1); + factory.pushContent(return2); + assert.deepStrictEqual(factory.list, [{ type: "Return", id: 0, tomb: 42, count: 2 }]); + }); + + it("Can merge consecutive gaps", () => { + const factory = new MarkListFactory(); + const gap1: T.GapEffectSegment = { type: "Gap", count: 1, stack: [] }; + const gap2: T.GapEffectSegment = { type: "Gap", count: 1, stack: [] }; + factory.pushContent(gap1); + factory.pushContent(gap2); + assert.deepStrictEqual(factory.list, [{ type: "Gap", count: 2, stack: [] }]); + }); + + it("Can merge consecutive tombs", () => { + const factory = new MarkListFactory(); + const tomb1: T.Tomb = { type: "Tomb", change: 42, count: 1 }; + const tomb2: T.Tomb = { type: "Tomb", change: 42, count: 1 }; + factory.pushContent(tomb1); + factory.pushContent(tomb2); + assert.deepStrictEqual(factory.list, [{ type: "Tomb", change: 42, count: 2 }]); + }); +}); diff --git a/packages/dds/tree/src/test/sequence-change-family/rebase.spec.ts b/packages/dds/tree/src/test/sequence-change-family/rebase.spec.ts new file mode 100644 index 000000000000..3dff7244adb7 --- /dev/null +++ b/packages/dds/tree/src/test/sequence-change-family/rebase.spec.ts @@ -0,0 +1,690 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +import { strict as assert } from "assert"; +import { sequenceChangeRebaser, SequenceChangeset } from "../../feature-libraries"; +import { TreeSchemaIdentifier } from "../../schema-stored"; +import { brand } from "../../util"; +import { deepFreeze } from "../utils"; +import { cases, setChildValueTo, setRootValueTo } from "./cases"; + +const type: TreeSchemaIdentifier = brand("Node"); +const tomb = "Dummy Changeset Tag"; + +function rebase(change: SequenceChangeset, base: SequenceChangeset): SequenceChangeset { + deepFreeze(change); + deepFreeze(base); + return sequenceChangeRebaser.rebase(change, base); +} + +describe("SequenceChangeFamily - Rebase", () => { + describe("no changes ↷ *", () => { + for (const [name, testCase] of Object.entries(cases)) { + it(`no changes ↷ ${name}`, () => { + const actual = rebase(cases.no_change, testCase); + assert.deepEqual(actual, cases.no_change); + }); + } + }); + + describe("* ↷ no changes", () => { + for (const [name, testCase] of Object.entries(cases)) { + it(`${name} ↷ no changes`, () => { + const actual = rebase(testCase, cases.no_change); + assert.deepEqual(actual, testCase); + }); + } + }); + + it("set root ↷ set root", () => { + const set1 = setRootValueTo(1); + const set2 = setRootValueTo(2); + const actual = rebase(set1, set2); + assert.deepEqual(actual, set1); + }); + + it("set root ↷ set child", () => { + const set1 = setRootValueTo(1); + const set2 = setChildValueTo(2); + const actual = rebase(set1, set2); + assert.deepEqual(actual, set1); + }); + + it("set child ↷ set root", () => { + const set1 = setChildValueTo(1); + const set2 = setRootValueTo(2); + const actual = rebase(set1, set2); + assert.deepEqual(actual, set1); + }); + + it("set child ↷ set child", () => { + const set1 = setChildValueTo(1); + const set2 = setChildValueTo(2); + const actual = rebase(set1, set2); + assert.deepEqual(actual, set1); + }); + + it("insert ↷ modify", () => { + const actual = rebase(cases.insert, cases.modify); + assert.deepEqual(actual, cases.insert); + }); + + it("modify insert ↷ modify", () => { + const actual = rebase(cases.modify_insert, cases.modify); + assert.deepEqual(actual, cases.modify_insert); + }); + + it("delete ↷ modify", () => { + const actual = rebase(cases.delete, cases.modify); + assert.deepEqual(actual, cases.delete); + }); + + it("revive ↷ modify", () => { + const revive: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 1, count: 2, tomb }, + 2, + { type: "Revive", id: 2, count: 2, tomb }, + 4, + { type: "Revive", id: 3, count: 2, tomb }, + ], + }, + }; + const mods: SequenceChangeset = { + marks: { + root: [ + { type: "Modify", fields: { foo: [{ type: "Delete", id: 1, count: 1 }] } }, + 2, + { type: "Modify", fields: { foo: [{ type: "Delete", id: 2, count: 1 }] } }, + 4, + { type: "Modify", fields: { foo: [{ type: "Delete", id: 3, count: 1 }] } }, + ], + }, + }; + const actual = rebase(revive, mods); + assert.deepEqual(actual, revive); + }); + + it("set ↷ delete", () => { + const sets: SequenceChangeset = { + marks: { + root: [ + { type: "Modify", value: { id: 0, value: 42 } }, + 2, + { type: "Modify", value: { id: 0, value: 42 } }, + 4, + { type: "Modify", value: { id: 0, value: 42 } }, + ], + }, + }; + const deletion: SequenceChangeset = { + marks: { + root: [ + 1, + { type: "Delete", id: 1, count: 3 }, + ], + }, + }; + const actual = rebase(sets, deletion); + const expected: SequenceChangeset = { + marks: { + root: [ + // Set at an earlier index is unaffected by a delete at a later index + { type: "Modify", value: { id: 0, value: 42 } }, + // Set as the same index as a delete is muted by the delete + 4, + // Set at a later index moves to an earlier index due to a delete at an earlier index + { type: "Modify", value: { id: 0, value: 42 } }, + ], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("modify ↷ delete", () => { + const mods: SequenceChangeset = { + marks: { + root: [ + { type: "Modify", fields: { foo: [{ type: "Delete", id: 1, count: 1 }] } }, + 2, + { type: "Modify", fields: { foo: [{ type: "Delete", id: 2, count: 1 }] } }, + 4, + { type: "Modify", fields: { foo: [{ type: "Delete", id: 3, count: 1 }] } }, + ], + }, + }; + const deletion: SequenceChangeset = { + marks: { + root: [ + 1, + { type: "Delete", id: 1, count: 3 }, + ], + }, + }; + const actual = rebase(mods, deletion); + const expected: SequenceChangeset = { + marks: { + root: [ + // Set at an earlier index is unaffected by a delete at a later index + { type: "Modify", fields: { foo: [{ type: "Delete", id: 1, count: 1 }] } }, + // Set as the same index as a delete is muted by the delete + 4, + // Set at a later index moves to an earlier index due to a delete at an earlier index + { type: "Modify", fields: { foo: [{ type: "Delete", id: 3, count: 1 }] } }, + ], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("insert ↷ delete", () => { + const insert: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 1 }] }], + 2, + [{ type: "Insert", id: 2, content: [{ type, value: 2 }] }], + 4, + [{ type: "Insert", id: 3, content: [{ type, value: 3 }] }], + ], + }, + }; + const deletion: SequenceChangeset = { + marks: { + root: [ + 1, + { type: "Delete", id: 1, count: 3 }, + ], + }, + }; + const actual = rebase(insert, deletion); + const expected: SequenceChangeset = { + marks: { + root: [ + // Earlier insert is unaffected + [{ type: "Insert", id: 1, content: [{ type, value: 1 }] }], + 1, // Overlapping insert has its index reduced + [{ type: "Insert", id: 2, content: [{ type, value: 2 }] }], + 2, // Later insert has its index reduced + [{ type: "Insert", id: 3, content: [{ type, value: 3 }] }], + ], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("revive ↷ delete", () => { + const revive: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 1, count: 1, tomb }, + 2, + { type: "Revive", id: 2, count: 1, tomb }, + 4, + { type: "Revive", id: 3, count: 1, tomb }, + ], + }, + }; + const deletion: SequenceChangeset = { + marks: { + root: [ + 1, + { type: "Delete", id: 1, count: 3 }, + ], + }, + }; + const actual = rebase(revive, deletion); + const expected: SequenceChangeset = { + marks: { + root: [ + // Earlier revive is unaffected + { type: "Revive", id: 1, count: 1, tomb }, + 1, // Overlapping revive has its index reduced + { type: "Revive", id: 2, count: 1, tomb }, + 2, // Later revive has its index reduced + { type: "Revive", id: 3, count: 1, tomb }, + ], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("delete ↷ overlapping delete", () => { + // Deletes ---DEFGH-- + const deleteA: SequenceChangeset = { + marks: { + root: [ + 3, + { type: "Delete", id: 2, count: 5 }, + ], + }, + }; + // Deletes --CD-F-HI + const deleteB: SequenceChangeset = { + marks: { + root: [ + 2, + { type: "Delete", id: 1, count: 2 }, + 1, + { type: "Delete", id: 2, count: 1 }, + 1, + { type: "Delete", id: 2, count: 2 }, + ], + }, + }; + const actual = rebase(deleteA, deleteB); + // Deletes --E-G + const expected: SequenceChangeset = { + marks: { + root: [ + 2, + { type: "Delete", id: 2, count: 2 }, + ], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("delete ↷ earlier delete", () => { + // Deletes ---DE + const deleteA: SequenceChangeset = { + marks: { + root: [ + 3, + { type: "Delete", id: 2, count: 2 }, + ], + }, + }; + // Deletes AB-- + const deleteB: SequenceChangeset = { + marks: { + root: [ + { type: "Delete", id: 1, count: 2 }, + ], + }, + }; + const actual = rebase(deleteA, deleteB); + // Deletes -DE + const expected: SequenceChangeset = { + marks: { + root: [ + 1, + { type: "Delete", id: 2, count: 2 }, + ], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("delete ↷ later delete", () => { + // Deletes AB-- + const deleteA: SequenceChangeset = { + marks: { + root: [ + { type: "Delete", id: 1, count: 2 }, + ], + }, + }; + // Deletes ---DE + const deleteB: SequenceChangeset = { + marks: { + root: [ + 2, + { type: "Delete", id: 2, count: 2 }, + ], + }, + }; + const actual = rebase(deleteA, deleteB); + assert.deepEqual(actual, deleteA); + }); + + it("set ↷ insert", () => { + const sets: SequenceChangeset = { + marks: { + root: [ + { type: "Modify", value: { id: 0, value: 42 } }, + 2, + { type: "Modify", value: { id: 0, value: 42 } }, + ], + }, + }; + const insert: SequenceChangeset = { + marks: { + root: [ + 2, + [{ type: "Insert", id: 1, content: [{ type, value: 2 }] }], + ], + }, + }; + const expected: SequenceChangeset = { + marks: { + root: [ + // Set at earlier index is unaffected + { type: "Modify", value: { id: 0, value: 42 } }, + 3, + // Set at later index has its index increased + { type: "Modify", value: { id: 0, value: 42 } }, + ], + }, + }; + const actual = rebase(sets, insert); + assert.deepEqual(actual, expected); + }); + + it("modify ↷ insert", () => { + const mods: SequenceChangeset = { + marks: { + root: [ + { type: "Modify", fields: { foo: [{ type: "Delete", id: 1, count: 1 }] } }, + 2, + { type: "Modify", fields: { foo: [{ type: "Delete", id: 1, count: 1 }] } }, + ], + }, + }; + const insert: SequenceChangeset = { + marks: { + root: [ + 2, + [{ type: "Insert", id: 1, content: [{ type, value: 2 }] }], + ], + }, + }; + const expected: SequenceChangeset = { + marks: { + root: [ + // Modify at earlier index is unaffected + { type: "Modify", fields: { foo: [{ type: "Delete", id: 1, count: 1 }] } }, + 3, + // Modify at later index has its index increased + { type: "Modify", fields: { foo: [{ type: "Delete", id: 1, count: 1 }] } }, + ], + }, + }; + const actual = rebase(mods, insert); + assert.deepEqual(actual, expected); + }); + + it("delete ↷ insert", () => { + // Deletes A-CD-E + const deletion: SequenceChangeset = { + marks: { + root: [ + { type: "Delete", id: 1, count: 1 }, + 1, + { type: "Delete", id: 1, count: 2 }, + 1, + { type: "Delete", id: 1, count: 1 }, + ], + }, + }; + // Inserts between C and D + const insert: SequenceChangeset = { + marks: { + root: [ + 3, + [{ type: "Insert", id: 1, content: [{ type, value: 2 }] }], + ], + }, + }; + const expected: SequenceChangeset = { + marks: { + root: [ + // Delete with earlier index is unaffected + { type: "Delete", id: 1, count: 1 }, + 1, + { type: "Delete", id: 1, count: 1 }, + 1, // Delete at overlapping index is split + { type: "Delete", id: 1, count: 1 }, + 1, + // Delete at later index has its index increased + { type: "Delete", id: 1, count: 1 }, + ], + }, + }; + const actual = rebase(deletion, insert); + assert.deepEqual(actual, expected); + }); + + it("insert ↷ insert", () => { + const insertA: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 1 }] }], + 2, + [{ type: "Insert", id: 2, content: [{ type, value: 2 }] }], + ], + }, + }; + const insertB: SequenceChangeset = { + marks: { + root: [ + 1, + [{ type: "Insert", id: 3, content: [{ type, value: 3 }] }], + ], + }, + }; + const actual = rebase(insertA, insertB); + const expected: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 1 }] }], + 3, + [{ type: "Insert", id: 2, content: [{ type, value: 2 }] }], + ], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("revive ↷ insert", () => { + const revive: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 1, count: 1, tomb }, + 2, + { type: "Revive", id: 2, count: 2, tomb }, + 2, + { type: "Revive", id: 3, count: 1, tomb }, + ], + }, + }; + const insert: SequenceChangeset = { + marks: { + root: [ + 2, + // TODO: test both tiebreak policies + [{ type: "Insert", id: 3, content: [{ type, value: 3 }] }], + ], + }, + }; + const actual = rebase(revive, insert); + const expected: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 1, count: 1, tomb }, + 2, + { type: "Revive", id: 2, count: 2, tomb }, + 3, + { type: "Revive", id: 3, count: 1, tomb }, + ], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("set ↷ revive", () => { + const sets: SequenceChangeset = { + marks: { + root: [ + { type: "Modify", value: { id: 0, value: 42 } }, + 2, + { type: "Modify", value: { id: 0, value: 42 } }, + ], + }, + }; + const revive: SequenceChangeset = { + marks: { + root: [ + 2, + { type: "Revive", id: 1, count: 1, tomb }, + ], + }, + }; + const expected: SequenceChangeset = { + marks: { + root: [ + // Set at earlier index is unaffected + { type: "Modify", value: { id: 0, value: 42 } }, + 3, + // Set at later index has its index increased + { type: "Modify", value: { id: 0, value: 42 } }, + ], + }, + }; + const actual = rebase(sets, revive); + assert.deepEqual(actual, expected); + }); + + it("modify ↷ revive", () => { + const mods: SequenceChangeset = { + marks: { + root: [ + { type: "Modify", fields: { foo: [{ type: "Delete", id: 1, count: 1 }] } }, + 2, + { type: "Modify", fields: { foo: [{ type: "Delete", id: 1, count: 1 }] } }, + ], + }, + }; + const revive: SequenceChangeset = { + marks: { + root: [ + 2, + { type: "Revive", id: 1, count: 1, tomb }, + ], + }, + }; + const expected: SequenceChangeset = { + marks: { + root: [ + // Modify at earlier index is unaffected + { type: "Modify", fields: { foo: [{ type: "Delete", id: 1, count: 1 }] } }, + 3, + // Modify at later index has its index increased + { type: "Modify", fields: { foo: [{ type: "Delete", id: 1, count: 1 }] } }, + ], + }, + }; + const actual = rebase(mods, revive); + assert.deepEqual(actual, expected); + }); + + it("delete ↷ revive", () => { + // Deletes A-CD-E + const deletion: SequenceChangeset = { + marks: { + root: [ + { type: "Delete", id: 1, count: 1 }, + 1, + { type: "Delete", id: 1, count: 2 }, + 1, + { type: "Delete", id: 1, count: 1 }, + ], + }, + }; + // Revives content between C and D + const revive: SequenceChangeset = { + marks: { + root: [ + 3, + { type: "Revive", id: 1, count: 1, tomb }, + ], + }, + }; + const expected: SequenceChangeset = { + marks: { + root: [ + // Delete with earlier index is unaffected + { type: "Delete", id: 1, count: 1 }, + 1, + { type: "Delete", id: 1, count: 1 }, + 1, // Delete at overlapping index is split + { type: "Delete", id: 1, count: 1 }, + 1, + // Delete at later index has its index increased + { type: "Delete", id: 1, count: 1 }, + ], + }, + }; + const actual = rebase(deletion, revive); + assert.deepEqual(actual, expected); + }); + + it("insert ↷ revive", () => { + const insert: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 1 }] }], + 2, + [{ type: "Insert", id: 2, content: [{ type, value: 2 }] }], + ], + }, + }; + const revive: SequenceChangeset = { + marks: { + root: [ + 1, + { type: "Revive", id: 1, count: 1, tomb }, + ], + }, + }; + const actual = rebase(insert, revive); + const expected: SequenceChangeset = { + marks: { + root: [ + [{ type: "Insert", id: 1, content: [{ type, value: 1 }] }], + 3, + [{ type: "Insert", id: 2, content: [{ type, value: 2 }] }], + ], + }, + }; + assert.deepEqual(actual, expected); + }); + + it("revive ↷ revive", () => { + const reviveA: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 1, count: 1, tomb }, + 2, + { type: "Revive", id: 2, count: 2, tomb }, + 2, + { type: "Revive", id: 3, count: 1, tomb }, + ], + }, + }; + const reviveB: SequenceChangeset = { + marks: { + root: [ + 2, + { type: "Revive", id: 1, count: 1, tomb }, + ], + }, + }; + const actual = rebase(reviveA, reviveB); + const expected: SequenceChangeset = { + marks: { + root: [ + { type: "Revive", id: 1, count: 1, tomb }, + 2, + { type: "Revive", id: 2, count: 2, tomb }, + 3, + { type: "Revive", id: 3, count: 1, tomb }, + ], + }, + }; + assert.deepEqual(actual, expected); + }); +}); diff --git a/packages/dds/tree/src/test/sequence-change-family/sequenceChangeRebaser.spec.ts b/packages/dds/tree/src/test/sequence-change-family/sequenceChangeRebaser.spec.ts new file mode 100644 index 000000000000..3a192e83bace --- /dev/null +++ b/packages/dds/tree/src/test/sequence-change-family/sequenceChangeRebaser.spec.ts @@ -0,0 +1,125 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +import { strict as assert } from "assert"; +import { Transposed as T } from "../../changeset"; +import { + sequenceChangeFamily, + sequenceChangeRebaser, + SequenceChangeset, +} from "../../feature-libraries"; +import { TreeSchemaIdentifier } from "../../schema-stored"; +import { Delta } from "../../tree"; +import { brand } from "../../util"; +import { deepFreeze } from "../utils"; + +const type: TreeSchemaIdentifier = brand("Node"); +const tomb = "Dummy Changeset Tag"; + +const testMarks: [string, T.Mark][] = [ + ["SetValue", { type: "Modify", value: { id: 0, value: 42 } }], + ["MInsert", [{ type: "MInsert", id: 0, content: { type, value: 42 }, value: { id: 0, value: 43 } }]], + ["Insert-1x2", [{ type: "Insert", id: 0, content: [{ type, value: 42 }, { type, value: 43 }] }]], + ["Insert-2x1", [ + { type: "Insert", id: 0, content: [{ type, value: 42 }] }, + { type: "Insert", id: 1, content: [{ type, value: 43 }] }, + ]], + ["Delete", { type: "Delete", id: 0, count: 2 }], + ["Revive", { type: "Revive", id: 0, count: 2, tomb }], +]; +deepFreeze(testMarks); + +function asForest(markList: T.MarkList): SequenceChangeset { + return { + marks: { root: markList }, + }; +} + +describe("SequenceChangeFamily", () => { + /** + * This test simulates rebasing over an do-undo pair. + */ + describe("A ↷ [B, B⁻¹] === A", () => { + for (const [name1, mark1] of testMarks) { + for (const [name2, mark2] of testMarks) { + if (name2 === "Delete") { + it.skip(`${name1} ↷ [${name2}, ${name2}⁻¹] => ${name1}`, () => { + /** + * These cases are currently disabled because: + * - Marks that affect existing content are removed instead of muted + * when rebased over the deletion of that content. This prevents us + * from then reinstating the mark when rebasing over the revive. + * - Tombs are not added when rebasing an insert over a gap that is + * immediately left of deleted content. This prevents us from being able to + * accurately track the position of the insert. + */ + }); + } else { + it(`${name1} ↷ [${name2}, ${name2}⁻¹] => ${name1}`, () => { + for (let offset1 = 1; offset1 <= 4; ++offset1) { + for (let offset2 = 1; offset2 <= 4; ++offset2) { + const change1 = asForest([offset1, mark1]); + const change2 = asForest([offset2, mark2]); + const inv = sequenceChangeRebaser.invert(change2); + const r1 = sequenceChangeRebaser.rebase(change1, change2); + const r2 = sequenceChangeRebaser.rebase(r1, inv); + assert.deepEqual(r2, change1); + } + } + }); + } + } + } + }); + + /** + * This test simulates sandwich rebasing: + * a change is first rebased over the inverse of a change it took for granted + * then rebased over the updated version of that change (the same as the original in our case). + * + * The first rebase (A ↷ B) is purely for the purpose of manufacturing a change to which we can + * apply the inverse of some change. + */ + describe("(A ↷ B) ↷ [B⁻¹, B] === A ↷ B", () => { + for (const [name1, mark1] of testMarks) { + for (const [name2, mark2] of testMarks) { + it(`${name1} ↷ [${name2}, ${name2}⁻¹, ${name2}] => ${name1} ↷ ${name2}`, () => { + for (let offset1 = 1; offset1 <= 4; ++offset1) { + for (let offset2 = 1; offset2 <= 4; ++offset2) { + const change1 = asForest([offset1, mark1]); + const change2 = asForest([offset2, mark2]); + const inverse2 = sequenceChangeRebaser.invert(change2); + const r1 = sequenceChangeRebaser.rebase(change1, change2); + const r2 = sequenceChangeRebaser.rebase(r1, inverse2); + const r3 = sequenceChangeRebaser.rebase(r2, change2); + assert.deepEqual(r3, r1); + } + } + }); + } + } + }); + + describe("A ○ A⁻¹ === ε", () => { + for (const [name, mark] of testMarks) { + if (name === "SetValue" || name === "Delete") { + it.skip(`${name} ○ ${name}⁻¹ === ε`, () => { + /** + * These cases are currently disabled because the inverses of SetValue and Delete + * do not capture which prior change they are reverting. + */ + }); + } else { + it(`${name} ○ ${name}⁻¹ === ε`, () => { + const change = asForest([mark]); + const inv = sequenceChangeRebaser.invert(change); + const actual = sequenceChangeRebaser.compose([change, inv]); + const delta = sequenceChangeFamily.intoDelta(actual); + assert.deepEqual(delta, Delta.empty); + }); + } + } + }); +}); diff --git a/packages/dds/tree/src/test/util/offsetList.spec.ts b/packages/dds/tree/src/test/util/offsetList.spec.ts index b51e7e173030..aee3a3cf7c69 100644 --- a/packages/dds/tree/src/test/util/offsetList.spec.ts +++ b/packages/dds/tree/src/test/util/offsetList.spec.ts @@ -14,6 +14,13 @@ describe("OffsetListFactory", () => { assert.deepStrictEqual(factory.list, [42, "foo"]); }); + it("Does not insert 0-length offsets", () => { + const factory = new OffsetListFactory(); + factory.pushOffset(0); + factory.pushContent("foo"); + assert.deepStrictEqual(factory.list, ["foo"]); + }); + it("Merges runs of offsets into a single offset", () => { const factory = new OffsetListFactory(); factory.pushOffset(42); diff --git a/packages/dds/tree/src/util/index.ts b/packages/dds/tree/src/util/index.ts index 0e511b78ca6b..04c9c209978d 100644 --- a/packages/dds/tree/src/util/index.ts +++ b/packages/dds/tree/src/util/index.ts @@ -16,3 +16,4 @@ export * from "./utils"; export * from "./typeCheck"; export * from "./brand"; export * from "./offsetList"; +export * from "./stackyIterator"; diff --git a/packages/dds/tree/src/util/offsetList.ts b/packages/dds/tree/src/util/offsetList.ts index 7fcbd284d5c9..e162d6465d83 100644 --- a/packages/dds/tree/src/util/offsetList.ts +++ b/packages/dds/tree/src/util/offsetList.ts @@ -8,21 +8,32 @@ export type OffsetList, TOffset = number> = /** * Helper class for constructing an offset list that... * - Does not insert offsets if there is no content after them - * - Merges runs offsets together + * - Does not insert 0-sized offsets + * - Merges runs of offsets together */ export class OffsetListFactory { - private offset = 0; - public readonly list: OffsetList = []; + private offset = 0; + public readonly list: OffsetList = []; - public pushOffset(offset: number): void { - this.offset += offset; - } + public push(...offsetOrContent: (number | TContent)[]): void { + for (const item of offsetOrContent) { + if (typeof item === "number") { + this.pushOffset(item); + } else { + this.pushContent(item); + } + } + } - public pushContent(content: TContent): void { - if (this.offset > 0) { - this.list.push(this.offset); - this.offset = 0; - } - this.list.push(content); - } + public pushOffset(offset: number): void { + this.offset += offset; + } + + public pushContent(content: TContent): void { + if (this.offset > 0) { + this.list.push(this.offset); + this.offset = 0; + } + this.list.push(content); + } } diff --git a/packages/dds/tree/src/util/stackyIterator.ts b/packages/dds/tree/src/util/stackyIterator.ts new file mode 100644 index 000000000000..632e7eb58cb7 --- /dev/null +++ b/packages/dds/tree/src/util/stackyIterator.ts @@ -0,0 +1,46 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +/** + * An iterator that supports having items pushed onto it for later iteration. + */ +export class StackyIterator implements Iterator, Iterable { + private readonly list: readonly T[]; + private readonly stack: T[] = []; + private index = 0; + + public constructor(list: readonly T[]) { + this.list = list; + } + + [Symbol.iterator](): Iterator { + return this; + } + + next(): IteratorResult { + if (this.done) { + return { value: undefined, done: true }; + } + return { value: this.pop() as T }; + } + + public get done(): boolean { + return this.index >= this.list.length && this.stack.length === 0; + } + + public push(item: T): void { + this.stack.push(item); + } + + public pop(): T | undefined { + if (this.stack.length > 0) { + return this.stack.pop(); + } + if (this.index >= this.list.length) { + return undefined; + } + return this.list[this.index++]; + } +} diff --git a/packages/test/test-end-to-end-tests/src/test/summaryDataStoreStats.spec.ts b/packages/test/test-end-to-end-tests/src/test/summaryDataStoreStats.spec.ts index 7e905cd13a6e..64a317d2f82b 100644 --- a/packages/test/test-end-to-end-tests/src/test/summaryDataStoreStats.spec.ts +++ b/packages/test/test-end-to-end-tests/src/test/summaryDataStoreStats.spec.ts @@ -21,7 +21,7 @@ import { IRequest } from "@fluidframework/core-interfaces"; import { IContainerRuntimeBase } from "@fluidframework/runtime-definitions"; import { requestFluidObject } from "@fluidframework/runtime-utils"; import { MockLogger, TelemetryNullLogger } from "@fluidframework/telemetry-utils"; -import { ITestObjectProvider } from "@fluidframework/test-utils"; +import { ITestObjectProvider, timeoutAwait } from "@fluidframework/test-utils"; import { describeNoCompat } from "@fluidframework/test-version-utils"; class TestDataObject extends DataObject { @@ -86,10 +86,17 @@ describeNoCompat("Generate Summary Stats", (getTestObjectProvider) => { * synchronizes all containers and waits for a summary that contains the last processed sequence number. * @returns the sequence number of the summary */ - async function waitForSummary(): Promise { - await provider.ensureSynchronized(); + async function waitForSummary(timeout: number): Promise { + // Create the timeout error message since the timeout reason in local test is still not clear + await timeoutAwait(provider.ensureSynchronized(), { + durationMs: timeout, + errorMsg: "Timeout happened on provider synchronization", + }); const sequenceNumber = mainContainer.deltaManager.lastSequenceNumber; - await summaryCollection.waitSummaryAck(sequenceNumber); + await timeoutAwait(summaryCollection.waitSummaryAck(sequenceNumber), { + durationMs: timeout, + errorMsg: "Timeout happened on waitSummaryAck", + }); return sequenceNumber; } @@ -121,7 +128,7 @@ describeNoCompat("Generate Summary Stats", (getTestObjectProvider) => { summaryCollection = new SummaryCollection(mainContainer.deltaManager, new TelemetryNullLogger()); }); - it("should generate correct summary stats with summarizing once", async () => { + it("should generate correct summary stats with summarizing once", async function() { const directoryKey = "dataStore2"; // Create a second data store (dataStore2) and add its handle to mark it as referenced. @@ -129,7 +136,7 @@ describeNoCompat("Generate Summary Stats", (getTestObjectProvider) => { mainDataStore._root.set(directoryKey, dataStore2.handle); // Wait for summary that contains the above set. - const sequenceNumber = await waitForSummary(); + const sequenceNumber = await waitForSummary(this.timeout() / 2); const summaryEvent = getGenerateSummaryEvent(sequenceNumber); assert(summaryEvent !== undefined, "generate summary event is undefined"); @@ -137,7 +144,7 @@ describeNoCompat("Generate Summary Stats", (getTestObjectProvider) => { assert.strictEqual(summaryEvent.summarizedDataStoreCount, 2, "summarized data store count is wrong"); }); - it("should generate correct summary stats with changed and unchanged data stores", async () => { + it("should generate correct summary stats with changed and unchanged data stores", async function() { // Create 5 data stores and add their handles to mark it as referenced. const dataStore2 = await dataObjectFactory.createInstance(mainDataStore._context.containerRuntime); const dataStore3 = await dataObjectFactory.createInstance(mainDataStore._context.containerRuntime); @@ -152,7 +159,7 @@ describeNoCompat("Generate Summary Stats", (getTestObjectProvider) => { mainDataStore._root.set("dataStore6", dataStore6.handle); // Wait for summary that contains the above set. - let sequenceNumber = await waitForSummary(); + let sequenceNumber = await waitForSummary(this.timeout() / 4); let summaryEvent = getGenerateSummaryEvent(sequenceNumber); assert(summaryEvent !== undefined, "generate summary event is undefined"); assert.strictEqual(summaryEvent.dataStoreCount, 6, "wrong data store count"); @@ -160,7 +167,7 @@ describeNoCompat("Generate Summary Stats", (getTestObjectProvider) => { mainDataStore._root.delete("dataStore2"); - sequenceNumber = await waitForSummary(); + sequenceNumber = await waitForSummary(this.timeout() / 4); summaryEvent = getGenerateSummaryEvent(sequenceNumber); assert(summaryEvent !== undefined, "generate summary event is undefined"); // all dataStores @@ -172,7 +179,7 @@ describeNoCompat("Generate Summary Stats", (getTestObjectProvider) => { mainDataStore._root.delete("dataStore4"); mainDataStore._root.delete("dataStore5"); - sequenceNumber = await waitForSummary(); + sequenceNumber = await waitForSummary(this.timeout() / 4); summaryEvent = getGenerateSummaryEvent(sequenceNumber); assert(summaryEvent !== undefined, "generate summary event is undefined"); // all dataStores diff --git a/server/routerlicious/kubernetes/README.md b/server/routerlicious/kubernetes/README.md index c15aa9c50969..50c213a4d8b9 100644 --- a/server/routerlicious/kubernetes/README.md +++ b/server/routerlicious/kubernetes/README.md @@ -50,8 +50,10 @@ You'll also need to have a Redis, MongoDB, Rabbitmq, and Historian instances run We install MongoDB and Rabbitmq from the helm stable repository. We also configure MongoDB to use the managed-premium storage class in AKS. -`helm install --set persistence.storageClass=managed-premium,persistence.size=4094Gi,usePassword=false stable/mongodb` -`helm install --set rbacEnabled=false,rabbitmq.username=prague,rabbitmq.password=[rabbitmq password],persistence.enabled=true,persistence.size=16Gi stable/rabbitmq` +In the following commands you can omit the optional key+value pairs to use the defaults defined in the Helm Chart. Also, replace the `` with the appropriate value. + +`helm install --set persistence.storageClass=managed-premium,persistence.size=4094Gi,usePassword=false,image.registry=,image.repository=,image.tag= bitnami/mongodb` +`helm install --set rbac.create=false,auth.username=prague,auth.password=[password],persistence.enabled=true,persistence.size=16Gi,image.registry=,image.repository=,image.tag= bitnami/rabbitmq` Redis, Kafka and Historian come from the `/server/charts` directory. You'll want to install each of them. diff --git a/server/routerlicious/kubernetes/nginx/README.md b/server/routerlicious/kubernetes/nginx/README.md index 382843c31aab..471a39c4bbb5 100644 --- a/server/routerlicious/kubernetes/nginx/README.md +++ b/server/routerlicious/kubernetes/nginx/README.md @@ -50,15 +50,19 @@ HELM_RELEASE_NAME=ingress-controller-prod VALUES_FILE=values-prod.yaml ``` -Then define some common variables and deploy the Helm chart: +Then define some common variables and deploy the Helm chart. In the following commands you can omit the optional key+value pairs to use the defaults defined in the Helm Chart. ```bash HELM_CHART_NAME=ingress-nginx HELM_CHART_REPO=https://kubernetes.github.io/ingress-nginx -HELM_CHART_VERSION=4.1.4 +HELM_CHART_VERSION=4.2.1 + +helm upgrade --install --set controller.image.registry= \ + --set controller.image.image= \ + --set controller.image.tag= \ + --set controller.image.digest= \ + $HELM_RELEASE_NAME $HELM_CHART_NAME --version $HELM_CHART_VERSION --repo $HELM_CHART_REPO -f $VALUES_FILE --namespace $K8S_NAMESPACE --create-namespace -helm upgrade --install $HELM_RELEASE_NAME $HELM_CHART_NAME --version $HELM_CHART_VERSION --repo $HELM_CHART_REPO -f $VALUES_FILE --namespace $K8S_NAMESPACE --create-namespace -``` The output will include a command that you can use to check the status of the `Service` object, something similar to this: