From 46abe8260b3e990d7a8dfbbbd1aaa3d98c906c40 Mon Sep 17 00:00:00 2001 From: Tyler Butler Date: Tue, 7 Jan 2025 16:30:42 -0800 Subject: [PATCH 01/27] refactor(check:policy): Remove the assert tagging policy (#23492) --- .../repoPolicyCheck/assertShortCode.ts | 260 ------------------ .../src/library/repoPolicyCheck/index.ts | 2 - 2 files changed, 262 deletions(-) delete mode 100644 build-tools/packages/build-cli/src/library/repoPolicyCheck/assertShortCode.ts diff --git a/build-tools/packages/build-cli/src/library/repoPolicyCheck/assertShortCode.ts b/build-tools/packages/build-cli/src/library/repoPolicyCheck/assertShortCode.ts deleted file mode 100644 index d3da607a7652..000000000000 --- a/build-tools/packages/build-cli/src/library/repoPolicyCheck/assertShortCode.ts +++ /dev/null @@ -1,260 +0,0 @@ -/*! - * Copyright (c) Microsoft Corporation and contributors. All rights reserved. - * Licensed under the MIT License. - */ - -import fs from "node:fs"; -import path from "node:path"; -import { - NoSubstitutionTemplateLiteral, - Node, - NumericLiteral, - Project, - SourceFile, - StringLiteral, - SyntaxKind, -} from "ts-morph"; - -import { Handler } from "./common.js"; - -const shortCodes = new Map(); -const newAssetFiles = new Set(); -const codeToMsgMap = new Map(); -let maxShortCode = -1; - -function getCallsiteString(msg: Node): string { - // Use filepath:line number so that the error message can be navigated to by clicking on it in vscode. - return `${msg.getSourceFile().getFilePath()}:${msg.getStartLineNumber()}`; -} - -/** - * Map from assertion function name to the index of its message argument. - * - * TODO: - * This should be moved into a configuration file. - */ -const assertionFunctions: ReadonlyMap = new Map([["assert", 1]]); - -/** - * Given a source file, this function will look for all assert functions contained in it and return the message parameters. - * This includes both functions named "assert" and ones named "fail" - * all the functions which is the message parameter - * @param sourceFile - The file to get the assert message parameters for. - * @returns An array of all the assert message parameters - */ -function getAssertMessageParams(sourceFile: SourceFile): Node[] { - const calls = sourceFile.getDescendantsOfKind(SyntaxKind.CallExpression); - const messageArgs: Node[] = []; - for (const call of calls) { - const messageIndex = assertionFunctions.get(call.getExpression().getText()); - if (messageIndex !== undefined) { - const args = call.getArguments(); - if (args.length >= messageIndex && args[messageIndex] !== undefined) { - const messageArg = args[messageIndex]; - messageArgs.push(messageArg); - } - } - } - return messageArgs; -} - -export const handler: Handler = { - name: "assert-short-codes", - match: - /^(packages|experimental|(common\/lib\/common-utils)|(server\/routerlicious\/packages\/protocol-base)).*\/tsconfig\.json/i, - handler: async (tsconfigPath) => { - if (tsconfigPath.includes("test")) { - return; - } - // load the project based on the tsconfig - const project = new Project({ - skipFileDependencyResolution: true, - tsConfigFilePath: tsconfigPath, - }); - - const templateErrors: Node[] = []; - const otherErrors: Node[] = []; - - // walk all the files in the project - for (const sourceFile of project.getSourceFiles()) { - // walk the assert message params in the file - for (const msg of getAssertMessageParams(sourceFile)) { - const nodeKind = msg.getKind(); - switch (nodeKind) { - // If it's a number, validate it's a shortcode - case SyntaxKind.NumericLiteral: { - const numLit = msg as NumericLiteral; - if (!numLit.getText().startsWith("0x")) { - return `Shortcodes must be provided by automation and be in hex format: ${numLit.getText()}\n\t${getCallsiteString( - numLit, - )}`; - } - const numLitValue = numLit.getLiteralValue(); - if (shortCodes.has(numLitValue)) { - // if we find two usages of the same short code then fail - return `Duplicate shortcode 0x${numLitValue.toString( - 16, - )} detected\n\t${getCallsiteString( - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - shortCodes.get(numLitValue)!, - )}\n\t${getCallsiteString(numLit)}`; - } - shortCodes.set(numLitValue, numLit); - // calculate the maximun short code to ensure we don't duplicate - maxShortCode = Math.max(numLitValue, maxShortCode); - - // If comment already exists, extract it for the mapping file - const comments = msg.getTrailingCommentRanges(); - if (comments.length > 0) { - let originalErrorText = comments[0] - .getText() - .replace(/\/\*/g, "") - .replace(/\*\//g, "") - .trim(); - - // Replace leading+trailing double quotes and backticks. - // Only do it if the initial and final characters in the string are the only occurrences of - // the double quotes / backticks, to avoid messing up comments that use them in a different - // way. If we clean up the assert comments that have them, this code could go away. - const shouldRemoveSurroundingQuotes = (input: string): boolean => { - return ( - (input.startsWith('"') && input.indexOf('"', 1) === input.length - 1) || - (input.startsWith("`") && input.indexOf("`", 1) === input.length - 1) - ); - }; - - // eslint-disable-next-line max-depth - if (shouldRemoveSurroundingQuotes(originalErrorText)) { - // eslint-disable-next-line unicorn/prefer-string-slice - originalErrorText = originalErrorText.substring( - 1, - originalErrorText.length - 1, - ); - } - codeToMsgMap.set(numLit.getText(), originalErrorText); - } - break; - } - // If it's a simple string literal, track the file for replacements later - case SyntaxKind.StringLiteral: - case SyntaxKind.NoSubstitutionTemplateLiteral: { - newAssetFiles.add(sourceFile); - break; - } - // Anything else isn't supported - case SyntaxKind.TemplateExpression: { - templateErrors.push(msg); - break; - } - case SyntaxKind.BinaryExpression: - case SyntaxKind.CallExpression: { - // TODO: why are CallExpression and BinaryExpression silently allowed? - break; - } - default: { - otherErrors.push(msg); - break; - } - } - } - } - - const errorMessages: string[] = []; - if (templateErrors.length > 0) { - errorMessages.push( - `Template expressions are not supported in assertions (they'll be replaced by a short code anyway). ` + - // eslint-disable-next-line unicorn/no-array-callback-reference - `Use a string literal instead.\n${templateErrors.map(getCallsiteString).join("\n")}`, - ); - } - if (otherErrors.length > 0) { - errorMessages.push( - `Unsupported argument kind:\n${otherErrors - .map((msg) => `${SyntaxKind[msg.getKind()]}: ${getCallsiteString(msg)}`) - .join("\n")}`, - ); - } - if (errorMessages.length > 0) { - // eslint-disable-next-line unicorn/error-message - throw new Error(errorMessages.join("\n\n")); - } - }, - final: (root, resolve) => { - const errors: string[] = []; - - // eslint-disable-next-line unicorn/consistent-function-scoping - function isStringLiteral(msg: Node): msg is StringLiteral | NoSubstitutionTemplateLiteral { - const kind = msg.getKind(); - return ( - kind === SyntaxKind.StringLiteral || kind === SyntaxKind.NoSubstitutionTemplateLiteral - ); - } - - // go through all the newly collected asserts and add short codes - for (const s of newAssetFiles) { - // another policy may have changed the file, so reload it - s.refreshFromFileSystemSync(); - for (const msg of getAssertMessageParams(s)) { - // here we only want to look at those messages that are strings, - // as we validated existing short codes above - if (isStringLiteral(msg)) { - // resolve === fix - if (resolve) { - // for now we don't care about filling gaps, but possible - const shortCode = ++maxShortCode; - shortCodes.set(shortCode, msg); - const text = msg.getLiteralText(); - const shortCodeStr = `0x${shortCode.toString(16).padStart(3, "0")}`; - // replace the message with shortcode, and put the message in a comment - msg.replaceWithText(`${shortCodeStr} /* ${text} */`); - codeToMsgMap.set(shortCodeStr, text); - } else { - // TODO: if we are not in resolve mode we - // allow messages that are not short code. this seems like the right - // behavior for main. we may want to enforce shortcodes in release branches in the future - // errors.push(`no assert shortcode: ${getCallsiteString(msg)}`); - break; - } - } - } - if (resolve) { - s.saveSync(); - } - } - const result = errors.length > 0 ? { error: errors.join("\n") } : undefined; - if (resolve) { - writeShortCodeMappingFile(); - } - return result; - }, -}; - -function writeShortCodeMappingFile(): void { - const mapContents = [...codeToMsgMap.entries()] - .sort() - // eslint-disable-next-line unicorn/no-array-reduce - .reduce>((accum, current) => { - accum[current[0]] = current[1]; - return accum; - }, {}); - const targetFolder = "packages/runtime/test-runtime-utils/src"; - - if (!fs.existsSync(targetFolder)) { - fs.mkdirSync(targetFolder, { recursive: true }); - } - - const fileContents = `/*! - * Copyright (c) Microsoft Corporation and contributors. All rights reserved. - * Licensed under the MIT License. - * - * THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY - */ - -// Auto-generated by policy-check in @fluidframework/build-tools. - -export const shortCodeMap = ${JSON.stringify(mapContents, undefined, "\t")}; -`; - fs.writeFileSync(path.join(targetFolder, "assertionShortCodesMap.ts"), fileContents, { - encoding: "utf8", - }); -} diff --git a/build-tools/packages/build-cli/src/library/repoPolicyCheck/index.ts b/build-tools/packages/build-cli/src/library/repoPolicyCheck/index.ts index 7183761a5c08..f338935c481d 100644 --- a/build-tools/packages/build-cli/src/library/repoPolicyCheck/index.ts +++ b/build-tools/packages/build-cli/src/library/repoPolicyCheck/index.ts @@ -3,7 +3,6 @@ * Licensed under the MIT License. */ -import { handler as assertShortCodeHandler } from "./assertShortCode.js"; import { type Handler } from "./common.js"; import { handlers as copyrightFileHeaderHandlers } from "./copyrightFileHeader.js"; import { handler as dockerfilePackageHandler } from "./dockerfilePackages.js"; @@ -24,7 +23,6 @@ export const policyHandlers: Handler[] = [ dockerfilePackageHandler, fluidCaseHandler, ...lockfileHandlers, - assertShortCodeHandler, ...pnpmHandlers, ...fluidBuildTasks, noJsFileHandler, From dababa595e6b6b535d41edd92449d8e1d8874f6c Mon Sep 17 00:00:00 2001 From: "Craig Macomber (Microsoft)" <42876482+CraigMacomber@users.noreply.github.com> Date: Tue, 7 Jan 2025 18:14:21 -0800 Subject: [PATCH 02/27] Make forest config tree-shake compatible (#23488) ## Description Establish a new pattern for configuration to replace enums with something compatible with tree shaking. This has the annoyance that the different options are no longer grouped under a scope. It has the benefits that different values for the same option can now have different release tags, and that the code supporting the unused options can be removed with tree shaking (including for users of the public API, which can't see the alternative options but used to include them in the bundle anyway). Apply this pattern to ForestType, giving a >7KB reduction in the tree bundle, as measure by our bundle size tests. ## Breaking Changes This is an incompatible change to the forest configuration alpha APIs: users will need to point to the new opaque objects instead of enum members. All impacted code will fail to build rather than having runtime errors. --- .../dds/tree/api-report/tree.alpha.api.md | 16 ++- packages/dds/tree/src/index.ts | 5 +- packages/dds/tree/src/shared-tree/index.ts | 5 +- .../dds/tree/src/shared-tree/sharedTree.ts | 100 +++++++++++------- .../chunkEncodingEndToEnd.spec.ts | 6 +- .../dds/tree/src/test/memory/tree.spec.ts | 10 +- .../src/test/shared-tree/sharedTree.spec.ts | 12 ++- .../api-report/fluid-framework.alpha.api.md | 16 ++- 8 files changed, 107 insertions(+), 63 deletions(-) diff --git a/packages/dds/tree/api-report/tree.alpha.api.md b/packages/dds/tree/api-report/tree.alpha.api.md index 472b792b94ef..36bb82bbb513 100644 --- a/packages/dds/tree/api-report/tree.alpha.api.md +++ b/packages/dds/tree/api-report/tree.alpha.api.md @@ -186,13 +186,19 @@ export interface ForestOptions { readonly forest?: ForestType; } -// @alpha -export enum ForestType { - Expensive = 2, - Optimized = 1, - Reference = 0 +// @alpha @sealed +export interface ForestType extends ErasedType_2<"ForestType"> { } +// @alpha +export const ForestTypeExpensiveDebug: ForestType; + +// @alpha +export const ForestTypeOptimized: ForestType; + +// @alpha +export const ForestTypeReference: ForestType; + // @alpha @deprecated export function getBranch(tree: ITree): BranchableTree; diff --git a/packages/dds/tree/src/index.ts b/packages/dds/tree/src/index.ts index a182906ce6b9..4469a8eddf24 100644 --- a/packages/dds/tree/src/index.ts +++ b/packages/dds/tree/src/index.ts @@ -32,7 +32,7 @@ export { export { type ITreeInternal, type SharedTreeOptions, - ForestType, + type ForestType, type SharedTreeFormatOptions, SharedTreeFormatVersion, Tree, @@ -55,6 +55,9 @@ export { type TransactionResultExt, type TransactionResultSuccess, type TransactionResultFailed, + ForestTypeOptimized, + ForestTypeExpensiveDebug, + ForestTypeReference, } from "./shared-tree/index.js"; export { diff --git a/packages/dds/tree/src/shared-tree/index.ts b/packages/dds/tree/src/shared-tree/index.ts index f585e2c4cdda..88edd442695e 100644 --- a/packages/dds/tree/src/shared-tree/index.ts +++ b/packages/dds/tree/src/shared-tree/index.ts @@ -9,7 +9,7 @@ export { type SharedTreeOptions, SharedTree, getBranch, - ForestType, + type ForestType, type SharedTreeContentSnapshot, type SharedTreeFormatOptions, SharedTreeFormatVersion, @@ -17,6 +17,9 @@ export { defaultSharedTreeOptions, type ForestOptions, type ITreeInternal, + ForestTypeOptimized, + ForestTypeExpensiveDebug, + ForestTypeReference, } from "./sharedTree.js"; export { diff --git a/packages/dds/tree/src/shared-tree/sharedTree.ts b/packages/dds/tree/src/shared-tree/sharedTree.ts index 7d70ec7f7b8b..841c3da1c2d4 100644 --- a/packages/dds/tree/src/shared-tree/sharedTree.ts +++ b/packages/dds/tree/src/shared-tree/sharedTree.ts @@ -3,8 +3,8 @@ * Licensed under the MIT License. */ -import { assert, unreachableCase } from "@fluidframework/core-utils/internal"; -import type { IFluidHandle } from "@fluidframework/core-interfaces/internal"; +import { assert } from "@fluidframework/core-utils/internal"; +import type { ErasedType, IFluidHandle } from "@fluidframework/core-interfaces/internal"; import type { IChannelAttributes, IChannelFactory, @@ -186,30 +186,6 @@ function getCodecVersions(formatVersion: number): ExplicitCodecVersions { return versions; } -/** - * Build and return a forest of the requested type. - */ -export function buildConfiguredForest( - type: ForestType, - schema: TreeStoredSchemaSubscription, - idCompressor: IIdCompressor, -): IEditableForest { - switch (type) { - case ForestType.Optimized: - return buildChunkedForest( - makeTreeChunker(schema, defaultSchemaPolicy), - undefined, - idCompressor, - ); - case ForestType.Reference: - return buildForest(); - case ForestType.Expensive: - return buildForest(undefined, true); - default: - unreachableCase(type); - } -} - /** * Shared tree, configured with a good set of indexes and field kinds which will maintain compatibility over time. * @@ -596,26 +572,70 @@ export interface SharedTreeFormatOptions { /** * Used to distinguish between different forest types. + * @remarks + * Current options are {@link ForestTypeReference}, {@link ForestTypeOptimized} and {@link ForestTypeExpensiveDebug}. + * @sealed @alpha + */ +export interface ForestType extends ErasedType<"ForestType"> {} + +/** + * Reference implementation of forest. + * @remarks + * A simple implementation with minimal complexity and moderate debuggability, validation and performance. + * @privateRemarks + * The "ObjectForest" forest type. * @alpha */ -export enum ForestType { - /** - * The "ObjectForest" forest type. - */ - Reference = 0, - /** - * The "ChunkedForest" forest type. - */ - Optimized = 1, - /** - * The "ObjectForest" forest type with expensive asserts for debugging. - */ - Expensive = 2, +export const ForestTypeReference = toForestType(() => buildForest()); + +/** + * Optimized implementation of forest. + * @remarks + * A complex optimized forest implementation, which has minimal validation and debuggability to optimize for performance. + * Uses an internal representation optimized for size designed to scale to larger datasets with reduced overhead. + * @privateRemarks + * The "ChunkedForest" forest type. + * @alpha + */ +export const ForestTypeOptimized = toForestType( + (schema: TreeStoredSchemaSubscription, idCompressor: IIdCompressor) => + buildChunkedForest(makeTreeChunker(schema, defaultSchemaPolicy), undefined, idCompressor), +); + +/** + * Slow implementation of forest intended only for debugging. + * @remarks + * Includes validation with scales poorly. + * May be asymptotically slower than {@link ForestTypeReference}, and may perform very badly with larger data sizes. + * @privateRemarks + * The "ObjectForest" forest type with expensive asserts for debugging. + * @alpha + */ +export const ForestTypeExpensiveDebug = toForestType(() => buildForest(undefined, true)); + +type ForestFactory = ( + schema: TreeStoredSchemaSubscription, + idCompressor: IIdCompressor, +) => IEditableForest; + +function toForestType(factory: ForestFactory): ForestType { + return factory as unknown as ForestType; +} + +/** + * Build and return a forest of the requested type. + */ +export function buildConfiguredForest( + factory: ForestType, + schema: TreeStoredSchemaSubscription, + idCompressor: IIdCompressor, +): IEditableForest { + return (factory as unknown as ForestFactory)(schema, idCompressor); } export const defaultSharedTreeOptions: Required = { jsonValidator: noopValidator, - forest: ForestType.Reference, + forest: ForestTypeReference, treeEncodeType: TreeCompressionStrategy.Compressed, formatVersion: SharedTreeFormatVersion.v3, disposeForksAfterTransaction: true, diff --git a/packages/dds/tree/src/test/feature-libraries/chunked-forest/chunkEncodingEndToEnd.spec.ts b/packages/dds/tree/src/test/feature-libraries/chunked-forest/chunkEncodingEndToEnd.spec.ts index cf559afe6acf..c1691af4e33e 100644 --- a/packages/dds/tree/src/test/feature-libraries/chunked-forest/chunkEncodingEndToEnd.spec.ts +++ b/packages/dds/tree/src/test/feature-libraries/chunked-forest/chunkEncodingEndToEnd.spec.ts @@ -48,11 +48,11 @@ import { cursorForJsonableTreeNode, } from "../../../feature-libraries/index.js"; import { - ForestType, type TreeStoredContent, type ISharedTreeEditor, SharedTreeFactory, Tree, + ForestTypeOptimized, } from "../../../shared-tree/index.js"; import { MockTreeCheckout, @@ -86,7 +86,7 @@ import { MockFluidDataStoreRuntime } from "@fluidframework/test-runtime-utils/in const options = { jsonValidator: typeboxValidator, - forest: ForestType.Optimized, + forest: ForestTypeOptimized, summaryEncodeType: TreeCompressionStrategy.Compressed, }; @@ -397,7 +397,7 @@ describe("End to end chunked encoding", () => { it("Initializing tree creates uniform chunks with encoded identifiers", async () => { const factory = new SharedTreeFactory({ jsonValidator: typeboxValidator, - forest: ForestType.Optimized, + forest: ForestTypeOptimized, }); const runtime = new MockFluidDataStoreRuntime({ diff --git a/packages/dds/tree/src/test/memory/tree.spec.ts b/packages/dds/tree/src/test/memory/tree.spec.ts index eca106396eb6..6294aabc0cf2 100644 --- a/packages/dds/tree/src/test/memory/tree.spec.ts +++ b/packages/dds/tree/src/test/memory/tree.spec.ts @@ -14,7 +14,8 @@ import { testIdCompressor } from "../utils.js"; import { configuredSharedTree, - ForestType, + ForestTypeOptimized, + ForestTypeReference, SchemaFactory, TreeCompressionStrategy, TreeViewConfiguration, @@ -233,11 +234,14 @@ describe("SharedTree memory usage", () => { ) { describe(title, () => { for (const numberOfNodes of testNodeCounts) { - for (const forestType of [ForestType.Reference, ForestType.Optimized]) { + for (const [forestName, forestType] of [ + ["ObjectForest", ForestTypeReference], + ["ChunkedForest", ForestTypeOptimized], + ] as const) { benchmarkMemory( new (class implements IMemoryTestObject { public readonly title = - `initialize ${numberOfNodes} nodes into tree using ${forestType === 0 ? "ObjectForest" : "ChunkedForest"}`; + `initialize ${numberOfNodes} nodes into tree using ${forestName}`; private sharedTree: TreeView | undefined; diff --git a/packages/dds/tree/src/test/shared-tree/sharedTree.spec.ts b/packages/dds/tree/src/test/shared-tree/sharedTree.spec.ts index 49c4e7db7257..23cb0d244e3c 100644 --- a/packages/dds/tree/src/test/shared-tree/sharedTree.spec.ts +++ b/packages/dds/tree/src/test/shared-tree/sharedTree.spec.ts @@ -47,7 +47,9 @@ import { // eslint-disable-next-line import/no-internal-modules } from "../../feature-libraries/object-forest/objectForest.js"; import { - ForestType, + ForestTypeExpensiveDebug, + ForestTypeOptimized, + ForestTypeReference, getBranch, type ISharedTree, type SharedTree, @@ -100,7 +102,7 @@ const enableSchemaValidation = true; const DebugSharedTree = configuredSharedTree({ jsonValidator: typeboxValidator, - forest: ForestType.Reference, + forest: ForestTypeReference, }) as ISharedObjectKind as ISharedObjectKind; class MockSharedTreeRuntime extends MockFluidDataStoreRuntime { @@ -1886,7 +1888,7 @@ describe("SharedTree", () => { 1, new SharedTreeFactory({ jsonValidator: typeboxValidator, - forest: ForestType.Reference, + forest: ForestTypeReference, }), ); const forest = trees[0].checkout.forest; @@ -1899,7 +1901,7 @@ describe("SharedTree", () => { 1, new SharedTreeFactory({ jsonValidator: typeboxValidator, - forest: ForestType.Optimized, + forest: ForestTypeOptimized, }), ); assert.equal(trees[0].checkout.forest instanceof ChunkedForest, true); @@ -1910,7 +1912,7 @@ describe("SharedTree", () => { 1, new SharedTreeFactory({ jsonValidator: typeboxValidator, - forest: ForestType.Expensive, + forest: ForestTypeExpensiveDebug, }), ); const forest = trees[0].checkout.forest; diff --git a/packages/framework/fluid-framework/api-report/fluid-framework.alpha.api.md b/packages/framework/fluid-framework/api-report/fluid-framework.alpha.api.md index 79aa71ca9c8c..59ce78a13310 100644 --- a/packages/framework/fluid-framework/api-report/fluid-framework.alpha.api.md +++ b/packages/framework/fluid-framework/api-report/fluid-framework.alpha.api.md @@ -238,13 +238,19 @@ export interface ForestOptions { readonly forest?: ForestType; } -// @alpha -export enum ForestType { - Expensive = 2, - Optimized = 1, - Reference = 0 +// @alpha @sealed +export interface ForestType extends ErasedType<"ForestType"> { } +// @alpha +export const ForestTypeExpensiveDebug: ForestType; + +// @alpha +export const ForestTypeOptimized: ForestType; + +// @alpha +export const ForestTypeReference: ForestType; + // @alpha @deprecated export function getBranch(tree: ITree): BranchableTree; From fe8279c774fcc3c4805b49ce4f64d0e03a64c39b Mon Sep 17 00:00:00 2001 From: Mark Fields Date: Tue, 7 Jan 2025 18:54:32 -0800 Subject: [PATCH 03/27] [For 2.20] Remove IContainerRuntimeOptions.flushmode (#23337) `IContainerRuntimeOptions.flushMode` is removed. See https://github.com/microsoft/FluidFramework/releases/tag/client_v2.12.0#user-content-icontainerruntimeoptionsflushmode-is-now-deprecated-23288 --- .changeset/cruel-horses-cheat.md | 10 ++++++++++ .../container-runtime.legacy.alpha.api.md | 4 +--- .../container-runtime/src/containerRuntime.ts | 15 ++------------- 3 files changed, 13 insertions(+), 16 deletions(-) create mode 100644 .changeset/cruel-horses-cheat.md diff --git a/.changeset/cruel-horses-cheat.md b/.changeset/cruel-horses-cheat.md new file mode 100644 index 000000000000..06db91308f3c --- /dev/null +++ b/.changeset/cruel-horses-cheat.md @@ -0,0 +1,10 @@ +--- +"@fluidframework/container-runtime": minor +--- +--- +"section": legacy +--- + +IContainerRuntimeOptions.flushMode has been removed + +See [2.12.0 release note](https://github.com/microsoft/FluidFramework/releases/tag/client_v2.12.0#user-content-icontainerruntimeoptionsflushmode-is-now-deprecated-23288) diff --git a/packages/runtime/container-runtime/api-report/container-runtime.legacy.alpha.api.md b/packages/runtime/container-runtime/api-report/container-runtime.legacy.alpha.api.md index b7faa6065487..90264330f335 100644 --- a/packages/runtime/container-runtime/api-report/container-runtime.legacy.alpha.api.md +++ b/packages/runtime/container-runtime/api-report/container-runtime.legacy.alpha.api.md @@ -34,7 +34,7 @@ export enum ContainerMessageType { // @alpha @deprecated export class ContainerRuntime extends TypedEventEmitter implements IContainerRuntime, IRuntime, ISummarizerRuntime, ISummarizerInternalsProvider, IProvideFluidHandleContext { - protected constructor(context: IContainerContext, registry: IFluidDataStoreRegistry, metadata: IContainerRuntimeMetadata | undefined, electedSummarizerData: ISerializedElection | undefined, chunks: [string, string[]][], dataStoreAliasMap: [string, string][], runtimeOptions: Readonly> & IContainerRuntimeOptions>, containerScope: FluidObject, baseLogger: ITelemetryBaseLogger, existing: boolean, blobManagerSnapshot: IBlobManagerLoadInfo, _storage: IDocumentStorageService, createIdCompressor: () => Promise, documentsSchemaController: DocumentsSchemaController, featureGatesForTelemetry: Record, provideEntryPoint: (containerRuntime: IContainerRuntime) => Promise, requestHandler?: ((request: IRequest, runtime: IContainerRuntime) => Promise) | undefined, summaryConfiguration?: ISummaryConfiguration, recentBatchInfo?: [number, string][]); + protected constructor(context: IContainerContext, registry: IFluidDataStoreRegistry, metadata: IContainerRuntimeMetadata | undefined, electedSummarizerData: ISerializedElection | undefined, chunks: [string, string[]][], dataStoreAliasMap: [string, string][], runtimeOptions: Readonly>, containerScope: FluidObject, baseLogger: ITelemetryBaseLogger, existing: boolean, blobManagerSnapshot: IBlobManagerLoadInfo, _storage: IDocumentStorageService, createIdCompressor: () => Promise, documentsSchemaController: DocumentsSchemaController, featureGatesForTelemetry: Record, provideEntryPoint: (containerRuntime: IContainerRuntime) => Promise, requestHandler?: ((request: IRequest, runtime: IContainerRuntime) => Promise) | undefined, summaryConfiguration?: ISummaryConfiguration, recentBatchInfo?: [number, string][]); // (undocumented) protected addContainerStateToSummary(summaryTree: ISummaryTreeWithStats, fullTree: boolean, trackState: boolean, telemetryContext?: ITelemetryContext): void; addedGCOutboundRoute(fromPath: string, toPath: string, messageTimestampMs?: number): void; @@ -342,8 +342,6 @@ export interface IContainerRuntimeOptions { readonly compressionOptions?: ICompressionRuntimeOptions; readonly enableRuntimeIdCompressor?: IdCompressorMode; readonly explicitSchemaControl?: boolean; - // @deprecated - readonly flushMode?: FlushMode; // (undocumented) readonly gcOptions?: IGCRuntimeOptions; readonly loadSequenceNumberVerification?: "close" | "log" | "bypass"; diff --git a/packages/runtime/container-runtime/src/containerRuntime.ts b/packages/runtime/container-runtime/src/containerRuntime.ts index d9b3a9822b97..f19e966fbb18 100644 --- a/packages/runtime/container-runtime/src/containerRuntime.ts +++ b/packages/runtime/container-runtime/src/containerRuntime.ts @@ -470,15 +470,7 @@ export interface IContainerRuntimeOptions { * 3. "bypass" will skip the check entirely. This is not recommended. */ readonly loadSequenceNumberVerification?: "close" | "log" | "bypass"; - /** - * Sets the flush mode for the runtime. In Immediate flush mode the runtime will immediately - * send all operations to the driver layer, while in TurnBased the operations will be buffered - * and then sent them as a single batch at the end of the turn. - * By default, flush mode is TurnBased. - * - * @deprecated Only the default value TurnBased is supported. This option will be removed in the future. - */ - readonly flushMode?: FlushMode; + /** * Enables the runtime to compress ops. See {@link ICompressionRuntimeOptions}. */ @@ -1512,10 +1504,7 @@ export class ContainerRuntime electedSummarizerData: ISerializedElection | undefined, chunks: [string, string[]][], dataStoreAliasMap: [string, string][], - runtimeOptions: Readonly< - Required> & - IContainerRuntimeOptions // Let flushMode and enabledGroupedBatching be optional now since they're soon to be removed - >, + runtimeOptions: Readonly>, private readonly containerScope: FluidObject, // Create a custom ITelemetryBaseLogger to output telemetry events. public readonly baseLogger: ITelemetryBaseLogger, From c15a136e9ead82033bc4e8f053f7e8af1159b32f Mon Sep 17 00:00:00 2001 From: MarioJGMsoft Date: Wed, 8 Jan 2025 09:28:21 -0800 Subject: [PATCH 04/27] Remove reentrantBatchGroupingEnabled (#23493) ## Description For the work in [#8124](https://dev.azure.com/fluidframework/235294da-091d-4c29-84fc-cdfc3d90890b/_workitems/edit/8124) to be completed, we need to remove the ability for reentrant batches to configure whether grouping is on or off, it will now be on by default. Acceptance Criteria: Fluid users can no longer configure reentrantBatchGroupingEnabled Execution Plan: Remove reentrantBatchGroupingEnabled configuration from containerRuntime.ts and opGroupingManager.ts. ## Reviewer Guidance Let me know if there's a better way to accomplish the task. Fixes: [AB#27892](https://dev.azure.com/fluidframework/235294da-091d-4c29-84fc-cdfc3d90890b/_workitems/edit/27892) --- .../container-runtime/src/containerRuntime.ts | 3 --- .../src/opLifecycle/opGroupingManager.ts | 6 ++---- .../opLifecycle/OpGroupingManager.spec.ts | 21 ++----------------- .../src/test/opLifecycle/outbox.spec.ts | 8 ------- .../remoteMessageProcessor.spec.ts | 2 -- 5 files changed, 4 insertions(+), 36 deletions(-) diff --git a/packages/runtime/container-runtime/src/containerRuntime.ts b/packages/runtime/container-runtime/src/containerRuntime.ts index f19e966fbb18..e4b0b16a2707 100644 --- a/packages/runtime/container-runtime/src/containerRuntime.ts +++ b/packages/runtime/container-runtime/src/containerRuntime.ts @@ -1654,9 +1654,6 @@ export class ContainerRuntime groupedBatchingEnabled: this.groupedBatchingEnabled, opCountThreshold: this.mc.config.getNumber("Fluid.ContainerRuntime.GroupedBatchingOpCount") ?? 2, - reentrantBatchGroupingEnabled: - this.mc.config.getBoolean("Fluid.ContainerRuntime.GroupedBatchingReentrancy") ?? - true, }, this.mc.logger, ); diff --git a/packages/runtime/container-runtime/src/opLifecycle/opGroupingManager.ts b/packages/runtime/container-runtime/src/opLifecycle/opGroupingManager.ts index 918a82f516af..622bb470db8a 100644 --- a/packages/runtime/container-runtime/src/opLifecycle/opGroupingManager.ts +++ b/packages/runtime/container-runtime/src/opLifecycle/opGroupingManager.ts @@ -35,7 +35,6 @@ export function isGroupedBatch(op: ISequencedDocumentMessage): boolean { export interface OpGroupingManagerConfig { readonly groupedBatchingEnabled: boolean; readonly opCountThreshold: number; - readonly reentrantBatchGroupingEnabled: boolean; } export class OpGroupingManager { @@ -156,9 +155,8 @@ export class OpGroupingManager { this.config.groupedBatchingEnabled && // The number of ops in the batch must surpass the configured threshold // or be empty (to allow for empty batches to be grouped) - (batch.messages.length === 0 || batch.messages.length >= this.config.opCountThreshold) && - // Support for reentrant batches must be explicitly enabled - (this.config.reentrantBatchGroupingEnabled || batch.hasReentrantOps !== true) + (batch.messages.length === 0 || batch.messages.length >= this.config.opCountThreshold) + // Support for reentrant batches will be on by default ); } } diff --git a/packages/runtime/container-runtime/src/test/opLifecycle/OpGroupingManager.spec.ts b/packages/runtime/container-runtime/src/test/opLifecycle/OpGroupingManager.spec.ts index 8cf41ab8754a..fabf6938f0f9 100644 --- a/packages/runtime/container-runtime/src/test/opLifecycle/OpGroupingManager.spec.ts +++ b/packages/runtime/container-runtime/src/test/opLifecycle/OpGroupingManager.spec.ts @@ -57,23 +57,19 @@ describe("OpGroupingManager", () => { const options: ConfigOption[] = [ { enabled: false, expectedResult: false }, { enabled: true, tooSmall: true, expectedResult: false }, - { enabled: true, reentrant: true, expectedResult: false }, - { enabled: true, reentrant: true, reentryEnabled: true, expectedResult: true }, + { enabled: true, reentrant: true, expectedResult: true }, { enabled: true, expectedResult: true }, ]; options.forEach((option) => { it(`shouldGroup: groupedBatchingEnabled [${option.enabled}] tooSmall [${ option.tooSmall === true - }] reentrant [${option.reentrant === true}] reentryEnabled [${ - option.reentryEnabled === true - }]`, () => { + }] reentrant [${option.reentrant === true}]`, () => { assert.strictEqual( new OpGroupingManager( { groupedBatchingEnabled: option.enabled, opCountThreshold: option.tooSmall === true ? 10 : 2, - reentrantBatchGroupingEnabled: option.reentryEnabled ?? false, }, mockLogger, ).shouldGroup(createBatch(5, option.reentrant)), @@ -90,7 +86,6 @@ describe("OpGroupingManager", () => { { groupedBatchingEnabled: false, opCountThreshold: 2, - reentrantBatchGroupingEnabled: false, }, mockLogger, ).groupBatch(createBatch(5)); @@ -102,7 +97,6 @@ describe("OpGroupingManager", () => { { groupedBatchingEnabled: true, opCountThreshold: 2, - reentrantBatchGroupingEnabled: false, }, mockLogger, ).groupBatch(createBatch(5)); @@ -123,7 +117,6 @@ describe("OpGroupingManager", () => { { groupedBatchingEnabled: true, opCountThreshold: 2, - reentrantBatchGroupingEnabled: false, }, mockLogger, ).groupBatch(createBatch(5, false, false, batchId)); @@ -137,7 +130,6 @@ describe("OpGroupingManager", () => { { groupedBatchingEnabled: false, opCountThreshold: 2, - reentrantBatchGroupingEnabled: false, }, mockLogger, ).createEmptyGroupedBatch("resubmittingBatchId", 0); @@ -150,7 +142,6 @@ describe("OpGroupingManager", () => { { groupedBatchingEnabled: true, opCountThreshold: 2, - reentrantBatchGroupingEnabled: false, }, mockLogger, ).createEmptyGroupedBatch(batchId, 0); @@ -169,7 +160,6 @@ describe("OpGroupingManager", () => { { groupedBatchingEnabled: true, opCountThreshold: 2, - reentrantBatchGroupingEnabled: false, }, mockLogger, ).shouldGroup({ @@ -187,7 +177,6 @@ describe("OpGroupingManager", () => { { groupedBatchingEnabled: true, opCountThreshold: 10, - reentrantBatchGroupingEnabled: false, }, mockLogger, ).groupBatch(createBatch(5)); @@ -200,7 +189,6 @@ describe("OpGroupingManager", () => { { groupedBatchingEnabled: true, opCountThreshold: 2, - reentrantBatchGroupingEnabled: false, }, mockLogger, ).groupBatch(createBatch(5, false, true)); @@ -213,7 +201,6 @@ describe("OpGroupingManager", () => { { groupedBatchingEnabled: true, opCountThreshold: 2, - reentrantBatchGroupingEnabled: false, }, mockLogger, ).groupBatch(createBatch(5, false, true, "batchId")); @@ -227,7 +214,6 @@ describe("OpGroupingManager", () => { { groupedBatchingEnabled: true, opCountThreshold: 2, - reentrantBatchGroupingEnabled: true, }, mockLogger, ); @@ -280,7 +266,6 @@ describe("OpGroupingManager", () => { { groupedBatchingEnabled: true, opCountThreshold: 2, - reentrantBatchGroupingEnabled: true, }, mockLogger, ); @@ -299,7 +284,6 @@ describe("OpGroupingManager", () => { { groupedBatchingEnabled: false, opCountThreshold: 2, - reentrantBatchGroupingEnabled: true, }, mockLogger, ); @@ -344,7 +328,6 @@ describe("OpGroupingManager", () => { { groupedBatchingEnabled: false, opCountThreshold: 2, - reentrantBatchGroupingEnabled: true, }, mockLogger, ); diff --git a/packages/runtime/container-runtime/src/test/opLifecycle/outbox.spec.ts b/packages/runtime/container-runtime/src/test/opLifecycle/outbox.spec.ts index 0038e66472b0..204f2293af66 100644 --- a/packages/runtime/container-runtime/src/test/opLifecycle/outbox.spec.ts +++ b/packages/runtime/container-runtime/src/test/opLifecycle/outbox.spec.ts @@ -232,7 +232,6 @@ describe("Outbox", () => { params.opGroupingConfig ?? { groupedBatchingEnabled: false, opCountThreshold: Infinity, - reentrantBatchGroupingEnabled: false, }, mockLogger, ), @@ -326,7 +325,6 @@ describe("Outbox", () => { opGroupingConfig: { groupedBatchingEnabled: true, opCountThreshold: 2, - reentrantBatchGroupingEnabled: true, }, }); currentSeqNumbers.referenceSequenceNumber = 0; @@ -358,7 +356,6 @@ describe("Outbox", () => { opGroupingConfig: { groupedBatchingEnabled: true, opCountThreshold: 3, - reentrantBatchGroupingEnabled: true, }, }); // Flush 1 - resubmit multi-message batch including ID Allocation @@ -1009,7 +1006,6 @@ describe("Outbox", () => { opGroupingConfig: { groupedBatchingEnabled: false, opCountThreshold: 2, - reentrantBatchGroupingEnabled: true, }, }); @@ -1032,7 +1028,6 @@ describe("Outbox", () => { opGroupingConfig: { groupedBatchingEnabled: true, opCountThreshold: 2, - reentrantBatchGroupingEnabled: true, }, }); @@ -1057,7 +1052,6 @@ describe("Outbox", () => { opGroupingConfig: { groupedBatchingEnabled: true, opCountThreshold: 2, - reentrantBatchGroupingEnabled: true, }, }); @@ -1080,7 +1074,6 @@ describe("Outbox", () => { opGroupingConfig: { groupedBatchingEnabled: false, opCountThreshold: 2, - reentrantBatchGroupingEnabled: true, }, }); @@ -1103,7 +1096,6 @@ describe("Outbox", () => { opGroupingConfig: { groupedBatchingEnabled: true, opCountThreshold: 2, - reentrantBatchGroupingEnabled: true, }, }); diff --git a/packages/runtime/container-runtime/src/test/opLifecycle/remoteMessageProcessor.spec.ts b/packages/runtime/container-runtime/src/test/opLifecycle/remoteMessageProcessor.spec.ts index 5a3e5f88d7c2..f2c965b51430 100644 --- a/packages/runtime/container-runtime/src/test/opLifecycle/remoteMessageProcessor.spec.ts +++ b/packages/runtime/container-runtime/src/test/opLifecycle/remoteMessageProcessor.spec.ts @@ -39,7 +39,6 @@ describe("RemoteMessageProcessor", () => { { groupedBatchingEnabled: true, opCountThreshold: Infinity, - reentrantBatchGroupingEnabled: false, }, logger, ), @@ -130,7 +129,6 @@ describe("RemoteMessageProcessor", () => { { groupedBatchingEnabled: true, opCountThreshold: 2, - reentrantBatchGroupingEnabled: false, }, mockLogger, ); From e3d6ba05baa54da527f65ab78384e6912c7b968d Mon Sep 17 00:00:00 2001 From: "Craig Macomber (Microsoft)" <42876482+CraigMacomber@users.noreply.github.com> Date: Wed, 8 Jan 2025 09:37:57 -0800 Subject: [PATCH 05/27] Update typebox (#23502) ## Description Update typebox --- examples/apps/tree-cli-app/package.json | 2 +- packages/dds/tree/package.json | 2 +- pnpm-lock.yaml | 13 +++++++++---- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/examples/apps/tree-cli-app/package.json b/examples/apps/tree-cli-app/package.json index 7442cd78ec54..66554e603655 100644 --- a/examples/apps/tree-cli-app/package.json +++ b/examples/apps/tree-cli-app/package.json @@ -38,7 +38,7 @@ "@fluidframework/id-compressor": "workspace:~", "@fluidframework/runtime-utils": "workspace:~", "@fluidframework/tree": "workspace:~", - "@sinclair/typebox": "^0.32.29" + "@sinclair/typebox": "^0.34.13" }, "devDependencies": { "@biomejs/biome": "~1.9.3", diff --git a/packages/dds/tree/package.json b/packages/dds/tree/package.json index e4063e6fcf96..455befd11e50 100644 --- a/packages/dds/tree/package.json +++ b/packages/dds/tree/package.json @@ -164,7 +164,7 @@ "@fluidframework/runtime-utils": "workspace:~", "@fluidframework/shared-object-base": "workspace:~", "@fluidframework/telemetry-utils": "workspace:~", - "@sinclair/typebox": "^0.32.29", + "@sinclair/typebox": "^0.34.13", "@tylerbu/sorted-btree-es6": "^1.8.0", "@types/ungap__structured-clone": "^1.2.0", "@ungap/structured-clone": "^1.2.0", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index a1b94a4dbafd..0944bff6ce5f 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1131,8 +1131,8 @@ importers: specifier: workspace:~ version: link:../../../packages/dds/tree '@sinclair/typebox': - specifier: ^0.32.29 - version: 0.32.35 + specifier: ^0.34.13 + version: 0.34.13 devDependencies: '@biomejs/biome': specifier: ~1.9.3 @@ -9146,8 +9146,8 @@ importers: specifier: workspace:~ version: link:../../utils/telemetry-utils '@sinclair/typebox': - specifier: ^0.32.29 - version: 0.32.35 + specifier: ^0.34.13 + version: 0.34.13 '@tylerbu/sorted-btree-es6': specifier: ^1.8.0 version: 1.8.0 @@ -23938,6 +23938,11 @@ packages: /@sinclair/typebox@0.32.35: resolution: {integrity: sha512-Ul3YyOTU++to8cgNkttakC0dWvpERr6RYoHO2W47DLbFvrwBDJUY31B1sImH6JZSYc4Kt4PyHtoPNu+vL2r2dA==} + dev: true + + /@sinclair/typebox@0.34.13: + resolution: {integrity: sha512-ceVKqyCEgC355Kw0s/0tyfY9MzMQINSykJ/pG2w6YnaZyrcjV48svZpr8lVZrYgWjzOmrIPBhQRAtr/7eJpA5g==} + dev: false /@sindresorhus/is@4.6.0: resolution: {integrity: sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==} From c55eaac80d2a931ce5cfd49b29eea0376bf40301 Mon Sep 17 00:00:00 2001 From: "Craig Macomber (Microsoft)" <42876482+CraigMacomber@users.noreply.github.com> Date: Wed, 8 Jan 2025 10:01:10 -0800 Subject: [PATCH 06/27] fix(build-cli): Update promptToGenerateReleaseNotes to use non-deprecated flag (#23482) ## Description Update promptToGenerateReleaseNotes to use non-deprecated flag --- build-tools/packages/build-cli/src/handlers/promptFunctions.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools/packages/build-cli/src/handlers/promptFunctions.ts b/build-tools/packages/build-cli/src/handlers/promptFunctions.ts index 933d7b66ce64..fe392e122a51 100644 --- a/build-tools/packages/build-cli/src/handlers/promptFunctions.ts +++ b/build-tools/packages/build-cli/src/handlers/promptFunctions.ts @@ -506,7 +506,7 @@ export const promptToGenerateReleaseNotes: StateHandlerFunction = async ( { title: "FIRST: generate:releaseNotes", message: `To generate the release notes, run the following command from the root of your release repo:`, - cmd: `flub generate releaseNotes -g ${releaseGroup} -t ${bumpType} --out RELEASE_NOTES/${releaseVersion}.md`, + cmd: `flub generate releaseNotes -g ${releaseGroup} -t ${bumpType} --outFile RELEASE_NOTES/${releaseVersion}.md`, }, { title: "FINALLY: merge the resulting changes", From e21e970fe7a19273b2c5d71091dbdc71afd1ea6c Mon Sep 17 00:00:00 2001 From: yann-achard-MS <97201204+yann-achard-MS@users.noreply.github.com> Date: Wed, 8 Jan 2025 10:17:50 -0800 Subject: [PATCH 07/27] Build: more retries on npm install failures (round 2) (#23503) ## Description This is a follow up to https://github.com/microsoft/FluidFramework/pull/23487. NPM installs are prone to failing with ECONNRESET when fetching from pkgs.dev.azure.com. Increasing the number of retries should reduce the number of definitive failures (and the number of pipeline alerts). See conversation [here](https://teams.microsoft.com/l/message/19:07c78dc203f74d24a204f097ffa0fd6b@thread.skype/1736278532976?tenantId=72f988bf-86f1-41af-91ab-2d7cd011db47&groupId=9ce27575-2f82-4689-abdb-bcff07e8063b&parentMessageId=1736278532976&teamName=Fluid%20Framework&channelName=FF%20Hot&createdTime=1736278532976) for more details. ## Breaking Changes None --- .../templates/include-test-perf-benchmarks-install-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/pipelines/templates/include-test-perf-benchmarks-install-package.yml b/tools/pipelines/templates/include-test-perf-benchmarks-install-package.yml index c5f588a85362..59e8b03def21 100644 --- a/tools/pipelines/templates/include-test-perf-benchmarks-install-package.yml +++ b/tools/pipelines/templates/include-test-perf-benchmarks-install-package.yml @@ -93,7 +93,7 @@ steps: # Install package that has performance tests - task: Bash@3 displayName: Install package with perf tests - ${{ parameters.testPackageName }} - retryCountOnTaskFailure: 1 + retryCountOnTaskFailure: 4 inputs: targetType: 'inline' workingDirectory: ${{ parameters.installPath }} From 00577d3b038935192a6445b11f39dbe963798702 Mon Sep 17 00:00:00 2001 From: Matt Rakow Date: Wed, 8 Jan 2025 10:51:18 -0800 Subject: [PATCH 08/27] test: Reenable bogus load test (#23494) --- .../src/test/containerCreate.spec.ts | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/packages/service-clients/end-to-end-tests/azure-client/src/test/containerCreate.spec.ts b/packages/service-clients/end-to-end-tests/azure-client/src/test/containerCreate.spec.ts index dd9e3efb9bf8..fb3e5aa4fd0e 100644 --- a/packages/service-clients/end-to-end-tests/azure-client/src/test/containerCreate.spec.ts +++ b/packages/service-clients/end-to-end-tests/azure-client/src/test/containerCreate.spec.ts @@ -201,17 +201,25 @@ for (const testOpts of testMatrix) { * * Note: This test is currently skipped because it is failing when ran against tinylicious (azure-local-service). */ - it.skip("cannot load improperly created container (cannot load a non-existent container)", async () => { + it("cannot load improperly created container (cannot load a non-existent container)", async () => { const consoleErrorFn = console.error; console.error = (): void => {}; const containerAndServicesP = client.getContainer("containerConfig", schema, "2"); const errorFn = (error: Error): boolean => { assert.notStrictEqual(error.message, undefined, "Azure Client error is undefined"); - assert.strict( - error.message.startsWith("R11s fetch error"), - `Unexpected error: ${error.message}`, - ); + // AFR gives R11s fetch error, T9s gives 0x8e4 + if (process.env.FLUID_CLIENT === "azure") { + assert.strict( + "errorType" in error && + error.errorType === "fileNotFoundOrAccessDeniedError" && + "statusCode" in error && + error.statusCode === 404, + `Unexpected error: ${error.message}`, + ); + } else { + assert.strict(error.message === "0x8e4", `Unexpected error: ${error.message}`); + } return true; }; From 61ba06aa9881c30ffeeedcaaede9c5a1a0c81abd Mon Sep 17 00:00:00 2001 From: Mark Fields Date: Wed, 8 Jan 2025 11:58:37 -0800 Subject: [PATCH 09/27] Move ContainerRuntime class to internal scope (#23341) Stop exporting ContainerRuntime class under the `/legacy` path. See [deprecation release note](https://github.com/microsoft/FluidFramework/releases/tag/client_v2.12.0#user-content-the-containerruntime-class-is-now-deprecated-23331) --------- Co-authored-by: jzaffiro <110866475+jzaffiro@users.noreply.github.com> --- .changeset/better-mails-judge.md | 15 ++ .../api-report/aqueduct.legacy.alpha.api.md | 3 +- .../baseContainerRuntimeFactory.ts | 16 +- .../attributor/src/mixinAttributor.ts | 7 - .../container-runtime.legacy.alpha.api.md | 147 ------------------ .../container-runtime/src/containerRuntime.ts | 6 +- .../src/loadTestDataStore.ts | 2 - .../src/testContainerRuntimeFactory.ts | 7 - 8 files changed, 24 insertions(+), 179 deletions(-) create mode 100644 .changeset/better-mails-judge.md diff --git a/.changeset/better-mails-judge.md b/.changeset/better-mails-judge.md new file mode 100644 index 000000000000..ebc134a405cb --- /dev/null +++ b/.changeset/better-mails-judge.md @@ -0,0 +1,15 @@ +--- +"@fluidframework/aqueduct": minor +"@fluid-experimental/attributor": minor +"@fluidframework/container-runtime": minor +"@fluidframework/test-utils": minor +--- +--- +"section": legacy +--- + +ContainerRuntime class is no longer exported + +Use `IContainerRuntime` to replace type usages and use the free function `loadContainerRuntime` to replace usages of the static method `ContainerRuntime.loadRuntime`. + +See the [deprecation release note](https://github.com/microsoft/FluidFramework/releases/tag/client_v2.12.0#user-content-the-containerruntime-class-is-now-deprecated-23331) for more details. diff --git a/packages/framework/aqueduct/api-report/aqueduct.legacy.alpha.api.md b/packages/framework/aqueduct/api-report/aqueduct.legacy.alpha.api.md index ab51b0de266b..71f0b34270a4 100644 --- a/packages/framework/aqueduct/api-report/aqueduct.legacy.alpha.api.md +++ b/packages/framework/aqueduct/api-report/aqueduct.legacy.alpha.api.md @@ -12,8 +12,7 @@ export class BaseContainerRuntimeFactory extends RuntimeFactoryHelper implements get IFluidDataStoreRegistry(): IFluidDataStoreRegistry; instantiateFirstTime(runtime: IContainerRuntime): Promise; instantiateFromExisting(runtime: IContainerRuntime): Promise; - // @deprecated - preInitialize(context: IContainerContext, existing: boolean): Promise; + preInitialize(context: IContainerContext, existing: boolean): Promise; } // @alpha diff --git a/packages/framework/aqueduct/src/container-runtime-factories/baseContainerRuntimeFactory.ts b/packages/framework/aqueduct/src/container-runtime-factories/baseContainerRuntimeFactory.ts index 7bbe7e050a2e..dd3b02138674 100644 --- a/packages/framework/aqueduct/src/container-runtime-factories/baseContainerRuntimeFactory.ts +++ b/packages/framework/aqueduct/src/container-runtime-factories/baseContainerRuntimeFactory.ts @@ -3,11 +3,13 @@ * Licensed under the MIT License. */ -import type { IContainerContext } from "@fluidframework/container-definitions/internal"; +import type { + IContainerContext, + IRuntime, +} from "@fluidframework/container-definitions/internal"; import { - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope - ContainerRuntime, FluidDataStoreRegistry, + loadContainerRuntime, type IContainerRuntimeOptions, } from "@fluidframework/container-runtime/internal"; import type { IContainerRuntime } from "@fluidframework/container-runtime-definitions/internal"; @@ -121,14 +123,11 @@ export class BaseContainerRuntimeFactory * Called at the start of initializing a container, to create the container runtime instance. * @param context - The context for the container being initialized * @param existing - Whether the container already exists and is being loaded (else it's being created new just now) - * - * @deprecated This function should not be called directly, use instantiateRuntime instead. */ public async preInitialize( context: IContainerContext, existing: boolean, - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope - ): Promise { + ): Promise { const scope: Partial = context.scope; if (this.dependencyContainer) { const dc = new DependencyContainer( @@ -138,8 +137,7 @@ export class BaseContainerRuntimeFactory scope.IFluidDependencySynthesizer = dc; } - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope - return ContainerRuntime.loadRuntime({ + return loadContainerRuntime({ context, existing, runtimeOptions: this.runtimeOptions, diff --git a/packages/framework/attributor/src/mixinAttributor.ts b/packages/framework/attributor/src/mixinAttributor.ts index ec421f21a01e..69e7583e63c0 100644 --- a/packages/framework/attributor/src/mixinAttributor.ts +++ b/packages/framework/attributor/src/mixinAttributor.ts @@ -4,7 +4,6 @@ */ import { type IContainerContext } from "@fluidframework/container-definitions/internal"; -// eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope import { ContainerRuntime } from "@fluidframework/container-runtime/internal"; import type { IContainerRuntimeOptions } from "@fluidframework/container-runtime/internal"; import { type IContainerRuntime } from "@fluidframework/container-runtime-definitions/internal"; @@ -53,9 +52,7 @@ export async function getRuntimeAttributor( * @internal */ export const mixinAttributor = ( - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope Base: typeof ContainerRuntime = ContainerRuntime, - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope ): typeof ContainerRuntime => class ContainerRuntimeWithAttributor extends Base { public static async loadRuntime(params: { @@ -64,14 +61,12 @@ export const mixinAttributor = ( existing: boolean; runtimeOptions?: IContainerRuntimeOptions; containerScope?: FluidObject; - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope containerRuntimeCtor?: typeof ContainerRuntime; /** * @deprecated Will be removed once Loader LTS version is "2.0.0-internal.7.0.0". Migrate all usage of IFluidRouter to the "entryPoint" pattern. Refer to Removing-IFluidRouter.md */ requestHandler?: (request: IRequest, runtime: IContainerRuntime) => Promise; provideEntryPoint: (containerRuntime: IContainerRuntime) => Promise; - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope }): Promise { const { context, @@ -81,7 +76,6 @@ export const mixinAttributor = ( provideEntryPoint, runtimeOptions, containerScope, - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope containerRuntimeCtor = ContainerRuntimeWithAttributor as unknown as typeof ContainerRuntime, } = params; @@ -129,5 +123,4 @@ export const mixinAttributor = ( return runtime; } - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope } as unknown as typeof ContainerRuntime; diff --git a/packages/runtime/container-runtime/api-report/container-runtime.legacy.alpha.api.md b/packages/runtime/container-runtime/api-report/container-runtime.legacy.alpha.api.md index 90264330f335..8dd1ec9a80f9 100644 --- a/packages/runtime/container-runtime/api-report/container-runtime.legacy.alpha.api.md +++ b/packages/runtime/container-runtime/api-report/container-runtime.legacy.alpha.api.md @@ -32,153 +32,6 @@ export enum ContainerMessageType { Rejoin = "rejoin" } -// @alpha @deprecated -export class ContainerRuntime extends TypedEventEmitter implements IContainerRuntime, IRuntime, ISummarizerRuntime, ISummarizerInternalsProvider, IProvideFluidHandleContext { - protected constructor(context: IContainerContext, registry: IFluidDataStoreRegistry, metadata: IContainerRuntimeMetadata | undefined, electedSummarizerData: ISerializedElection | undefined, chunks: [string, string[]][], dataStoreAliasMap: [string, string][], runtimeOptions: Readonly>, containerScope: FluidObject, baseLogger: ITelemetryBaseLogger, existing: boolean, blobManagerSnapshot: IBlobManagerLoadInfo, _storage: IDocumentStorageService, createIdCompressor: () => Promise, documentsSchemaController: DocumentsSchemaController, featureGatesForTelemetry: Record, provideEntryPoint: (containerRuntime: IContainerRuntime) => Promise, requestHandler?: ((request: IRequest, runtime: IContainerRuntime) => Promise) | undefined, summaryConfiguration?: ISummaryConfiguration, recentBatchInfo?: [number, string][]); - // (undocumented) - protected addContainerStateToSummary(summaryTree: ISummaryTreeWithStats, fullTree: boolean, trackState: boolean, telemetryContext?: ITelemetryContext): void; - addedGCOutboundRoute(fromPath: string, toPath: string, messageTimestampMs?: number): void; - // (undocumented) - get attachState(): AttachState; - // (undocumented) - readonly baseLogger: ITelemetryBaseLogger; - // (undocumented) - readonly clientDetails: IClientDetails; - // (undocumented) - get clientId(): string | undefined; - // (undocumented) - readonly closeFn: (error?: ICriticalContainerError) => void; - collectGarbage(options: { - logger?: ITelemetryLoggerExt; - runSweep?: boolean; - fullGC?: boolean; - }, telemetryContext?: ITelemetryContext): Promise; - // (undocumented) - get connected(): boolean; - // (undocumented) - get containerRuntime(): this; - // (undocumented) - createDataStore(pkg: Readonly, loadingGroupId?: string): Promise; - // @deprecated (undocumented) - _createDataStoreWithProps(pkg: Readonly, props?: any): Promise; - // (undocumented) - createDetachedDataStore(pkg: Readonly, loadingGroupId?: string): IFluidDataStoreContextDetached; - createSummary(blobRedirectTable?: Map, telemetryContext?: ITelemetryContext): ISummaryTree; - // (undocumented) - deleteChildSummarizerNode(id: string): void; - deleteSweepReadyNodes(sweepReadyRoutes: readonly string[]): readonly string[]; - get deltaManager(): IDeltaManager; - // (undocumented) - dispose(error?: Error): void; - // (undocumented) - get disposed(): boolean; - // (undocumented) - readonly disposeFn: (error?: ICriticalContainerError) => void; - // (undocumented) - enqueueSummarize(options: IEnqueueSummarizeOptions): EnqueueSummarizeResult; - ensureNoDataModelChanges(callback: () => T): T; - // (undocumented) - get flushMode(): FlushMode; - // @deprecated - get gcThrowOnTombstoneUsage(): boolean; - // @deprecated - get gcTombstoneEnforcementAllowed(): boolean; - generateDocumentUniqueId(): string | (number & { - readonly SessionUnique: "cea55054-6b82-4cbf-ad19-1fa645ea3b3e"; - } & { - readonly OpNormalized: "9209432d-a959-4df7-b2ad-767ead4dbcae"; - }); - // (undocumented) - readonly getAbsoluteUrl: (relativeUrl: string) => Promise; - getAliasedDataStoreEntryPoint(alias: string): Promise | undefined>; - // (undocumented) - getAudience(): IAudience; - // (undocumented) - getCreateChildSummarizerNodeFn(id: string, createParam: CreateChildSummarizerNodeParam): (summarizeInternal: SummarizeInternalFn, getGCDataFn: (fullGC?: boolean) => Promise) => ISummarizerNodeWithGC; - getCurrentReferenceTimestampMs(): number | undefined; - getEntryPoint(): Promise; - getGCData(fullGC?: boolean): Promise; - getGCNodePackagePath(nodePath: string): Promise; - getNodeType(nodePath: string): GCNodeType; - // (undocumented) - getPendingLocalState(props?: IGetPendingLocalStateProps): unknown; - // (undocumented) - getQuorum(): IQuorumClients; - getSnapshotForLoadingGroupId(loadingGroupIds: string[], pathParts: string[]): Promise<{ - snapshotTree: ISnapshotTree; - sequenceNumber: number; - }>; - get idCompressor(): (IIdCompressor & IIdCompressorCore) | undefined; - // (undocumented) - get idCompressorMode(): IdCompressorMode; - // (undocumented) - get IFluidDataStoreRegistry(): IFluidDataStoreRegistry; - // (undocumented) - get IFluidHandleContext(): IFluidHandleContext; - get isDirty(): boolean; - protected _loadIdCompressor: Promise | undefined; - static loadRuntime(params: { - context: IContainerContext; - registryEntries: NamedFluidDataStoreRegistryEntries; - existing: boolean; - runtimeOptions?: IContainerRuntimeOptions; - containerScope?: FluidObject; - containerRuntimeCtor?: typeof ContainerRuntime; - requestHandler?: (request: IRequest, runtime: IContainerRuntime) => Promise; - provideEntryPoint: (containerRuntime: IContainerRuntime) => Promise; - }): Promise; - // (undocumented) - makeLocallyVisible(): void; - // (undocumented) - notifyOpReplay(message: ISequencedDocumentMessage): Promise; - // (undocumented) - onSchemaChange(schema: IDocumentSchemaCurrent): void; - // (undocumented) - readonly options: Record; - orderSequentially(callback: () => T): T; - process({ ...messageCopy }: ISequencedDocumentMessage, local: boolean): void; - // (undocumented) - processSignal(message: ISignalMessage, local: boolean): void; - refreshLatestSummaryAck(options: IRefreshSummaryAckOptions): Promise; - resolveHandle(request: IRequest): Promise; - // (undocumented) - get scope(): FluidObject; - get sessionSchema(): { - explicitSchemaControl?: true | undefined; - compressionLz4?: true | undefined; - idCompressorMode?: IdCompressorMode; - opGroupingEnabled?: true | undefined; - disallowedVersions?: string[] | undefined; - }; - // (undocumented) - setAttachState(attachState: AttachState.Attaching | AttachState.Attached): void; - // (undocumented) - setChannelDirty(address: string): void; - // (undocumented) - setConnectionState(connected: boolean, clientId?: string): void; - // (undocumented) - get storage(): IDocumentStorageService; - // (undocumented) - submitMessage(type: ContainerMessageType.FluidDataStoreOp | ContainerMessageType.Alias | ContainerMessageType.Attach, contents: any, localOpMetadata?: unknown): void; - submitSignal(type: string, content: unknown, targetClientId?: string): void; - submitSummary(options: ISubmitSummaryOptions): Promise; - summarize(options: { - fullTree?: boolean; - trackState?: boolean; - summaryLogger?: ITelemetryLoggerExt; - runGC?: boolean; - fullGC?: boolean; - runSweep?: boolean; - }): Promise; - // (undocumented) - summarizeOnDemand(options: IOnDemandSummarizeOptions): ISummarizeResults; - get summarizerClientId(): string | undefined; - updateTombstonedRoutes(tombstonedRoutes: readonly string[]): void; - updateUsedRoutes(usedRoutes: readonly string[]): void; - // (undocumented) - uploadBlob(blob: ArrayBufferLike, signal?: AbortSignal): Promise>; -} - // @alpha export const currentDocumentVersionSchema = 1; diff --git a/packages/runtime/container-runtime/src/containerRuntime.ts b/packages/runtime/container-runtime/src/containerRuntime.ts index e4b0b16a2707..aa66add01236 100644 --- a/packages/runtime/container-runtime/src/containerRuntime.ts +++ b/packages/runtime/container-runtime/src/containerRuntime.ts @@ -847,11 +847,7 @@ export async function loadContainerRuntime( * Represents the runtime of the container. Contains helper functions/state of the container. * It will define the store level mappings. * - * @deprecated To be removed from the Legacy-Alpha API in version 2.20.0. - * Use the loadContainerRuntime function and interfaces IContainerRuntime / IRuntime instead. - * - * @legacy - * @alpha + * @internal */ export class ContainerRuntime extends TypedEventEmitter diff --git a/packages/test/test-service-load/src/loadTestDataStore.ts b/packages/test/test-service-load/src/loadTestDataStore.ts index 291a16f3b2b3..6efc5ce107ef 100644 --- a/packages/test/test-service-load/src/loadTestDataStore.ts +++ b/packages/test/test-service-load/src/loadTestDataStore.ts @@ -13,7 +13,6 @@ import { } from "@fluidframework/aqueduct/internal"; import { ILoaderOptions } from "@fluidframework/container-definitions/internal"; import { - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope ContainerRuntime, IContainerRuntimeOptions, } from "@fluidframework/container-runtime/internal"; @@ -135,7 +134,6 @@ class LoadTestDataStoreModel { // If we did not create the data store above, load it by getting its url. if (gcDataStore === undefined) { const gcDataStoreId = root.get(gcDataStoreIdKey); - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope const response = await (containerRuntime as ContainerRuntime).resolveHandle({ url: `/${gcDataStoreId}`, }); diff --git a/packages/test/test-utils/src/testContainerRuntimeFactory.ts b/packages/test/test-utils/src/testContainerRuntimeFactory.ts index 6dd6d818f839..138ef3e2c7a7 100644 --- a/packages/test/test-utils/src/testContainerRuntimeFactory.ts +++ b/packages/test/test-utils/src/testContainerRuntimeFactory.ts @@ -5,7 +5,6 @@ import { IContainerContext, IRuntime } from "@fluidframework/container-definitions/internal"; import { - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope ContainerRuntime, DefaultSummaryConfiguration, type IContainerRuntimeOptionsInternal, @@ -58,9 +57,7 @@ interface backCompat_ContainerRuntime { runtimeOptions?: IContainerRuntimeOptionsInternal, containerScope?: FluidObject, existing?: boolean, - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope containerRuntimeCtor?: typeof ContainerRuntime, - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope ): Promise; } @@ -69,7 +66,6 @@ interface backCompat_ContainerRuntime { * @internal */ export const createTestContainerRuntimeFactory = ( - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope containerRuntimeCtor: typeof ContainerRuntime, ) => { return class extends RuntimeFactoryHelper { @@ -92,7 +88,6 @@ export const createTestContainerRuntimeFactory = ( super(); } - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope public async instantiateFirstTime(runtime: ContainerRuntime): Promise { // Back-compat - old code does not return IDataStore for rootContext.attachRuntime() call! // Thus need to leverage old API createDetachedRootDataStore() that is gone in latest releases. @@ -111,7 +106,6 @@ export const createTestContainerRuntimeFactory = ( assert(result === "Success" || result === undefined, "success"); } - // eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope public async instantiateFromExisting(runtime: ContainerRuntime): Promise { // Validate we can load root data stores. // We should be able to load any data store that was created in initializeFirstTime! @@ -190,5 +184,4 @@ export const createTestContainerRuntimeFactory = ( * A container runtime factory that allows you to set runtime options * @internal */ -// eslint-disable-next-line import/no-deprecated -- ContainerRuntime class to be moved to internal scope export const TestContainerRuntimeFactory = createTestContainerRuntimeFactory(ContainerRuntime); From 61f22640f91fd81144d4555d84f036555783e70f Mon Sep 17 00:00:00 2001 From: Navin Agarwal <45832642+agarwal-navin@users.noreply.github.com> Date: Wed, 8 Jan 2025 11:59:10 -0800 Subject: [PATCH 10/27] Simplified ScheduleManager and renamed it to InboundBatchAggregator (#23446) Renamed `ScheduleManagerCore` to `InboundBatchAggregator` because its primary objective is to ensure that an incoming batch is fully available in the delta queue before it starts processing. Also, simplfied it as following: - Removed `ScheduleManager` because all it was doing was calling `DeltaScheduler` and emitting `batchBegin` and `batchEnd` events. Moved the event emission directly into container runtime which was recently refactored to consolidate this logic in one place. `DeltaScheduler` now processes batches on these events from container runtime. - Removed the logic to modify `batch` metadata in delta manager's `prepareSend` event handler. This is an artifact of very old logic where adding the batch metadata was distributed between container runtime and delta manager. For a long time now, container runtime is in full control of adding (or not adding) batch metadata to messages in a batch. This was done by https://github.com/microsoft/FluidFramework/pull/11832. - Removed the `scheduleManager.spec.ts` unit tests. All it was doing was validating `batchBegin` and `batchEnd` events which is the same as the ones done in `batching.spec.ts` unit tests. Moreover, it was using custom logic to identify batch beginning and end from the inbound messages so it wasn't really testing any real code. [AB#1981](https://dev.azure.com/fluidframework/235294da-091d-4c29-84fc-cdfc3d90890b/_workitems/edit/1981) --- .../runtime/container-runtime/package.json | 12 +- .../container-runtime/src/containerRuntime.ts | 23 +- .../container-runtime/src/deltaScheduler.ts | 32 +- ...leManager.ts => inboundBatchAggregator.ts} | 173 +++---- .../src/pendingStateManager.ts | 2 +- .../src/test/batching.spec.ts | 38 +- .../src/test/scheduleManager.spec.ts | 424 ------------------ packages/test/functional-tests/package.json | 4 + .../src/test/containerRuntime.spec.ts | 145 +++--- pnpm-lock.yaml | 12 + 10 files changed, 194 insertions(+), 671 deletions(-) rename packages/runtime/container-runtime/src/{scheduleManager.ts => inboundBatchAggregator.ts} (71%) delete mode 100644 packages/runtime/container-runtime/src/test/scheduleManager.spec.ts diff --git a/packages/runtime/container-runtime/package.json b/packages/runtime/container-runtime/package.json index bd5a46090580..4fcc023f5a59 100644 --- a/packages/runtime/container-runtime/package.json +++ b/packages/runtime/container-runtime/package.json @@ -63,16 +63,6 @@ "default": "./dist/deltaScheduler.js" } }, - "./internal/test/scheduleManager": { - "import": { - "types": "./lib/scheduleManager.d.ts", - "default": "./lib/scheduleManager.js" - }, - "require": { - "types": "./dist/scheduleManager.d.ts", - "default": "./dist/scheduleManager.js" - } - }, "./internal/test/blobManager": { "import": { "types": "./lib/blobManager/index.d.ts", @@ -122,7 +112,7 @@ "build:test": "npm run build:test:esm && npm run build:test:cjs", "build:test:cjs": "fluid-tsc commonjs --project ./src/test/tsconfig.cjs.json", "build:test:esm": "tsc --project ./src/test/tsconfig.json", - "check:are-the-types-wrong": "attw --pack . --exclude-entrypoints ./internal/test/containerRuntime ./internal/test/deltaScheduler ./internal/test/scheduleManager ./internal/test/blobManager ./internal/test/summary ./internal/test/gc", + "check:are-the-types-wrong": "attw --pack . --exclude-entrypoints ./internal/test/containerRuntime ./internal/test/deltaScheduler ./internal/test/blobManager ./internal/test/summary ./internal/test/gc", "check:biome": "biome check .", "check:exports": "concurrently \"npm:check:exports:*\"", "check:exports:bundle-release-tags": "api-extractor run --config api-extractor/api-extractor-lint-bundle.json", diff --git a/packages/runtime/container-runtime/src/containerRuntime.ts b/packages/runtime/container-runtime/src/containerRuntime.ts index aa66add01236..8760bfe0abd3 100644 --- a/packages/runtime/container-runtime/src/containerRuntime.ts +++ b/packages/runtime/container-runtime/src/containerRuntime.ts @@ -157,6 +157,7 @@ import { DeltaManagerPendingOpsProxy, DeltaManagerSummarizerProxy, } from "./deltaManagerProxies.js"; +import { DeltaScheduler } from "./deltaScheduler.js"; import { GCNodeType, GarbageCollector, @@ -166,6 +167,7 @@ import { gcGenerationOptionName, type GarbageCollectionMessage, } from "./gc/index.js"; +import { InboundBatchAggregator } from "./inboundBatchAggregator.js"; import { ContainerMessageType, type ContainerRuntimeDocumentSchemaMessage, @@ -199,7 +201,6 @@ import { IPendingLocalState, PendingStateManager, } from "./pendingStateManager.js"; -import { ScheduleManager } from "./scheduleManager.js"; import { DocumentsSchemaController, EnqueueSummarizeResult, @@ -1389,7 +1390,8 @@ export class ContainerRuntime * It is created only by summarizing container (i.e. one with clientType === "summarizer") */ private readonly _summarizer?: Summarizer; - private readonly scheduleManager: ScheduleManager; + private readonly deltaScheduler: DeltaScheduler; + private readonly inboundBatchAggregator: InboundBatchAggregator; private readonly blobManager: BlobManager; private readonly pendingStateManager: PendingStateManager; private readonly duplicateBatchDetector: DuplicateBatchDetector | undefined; @@ -1874,11 +1876,16 @@ export class ContainerRuntime closeContainer: (error?: ICriticalContainerError) => this.closeFn(error), }); - this.scheduleManager = new ScheduleManager( + this.deltaScheduler = new DeltaScheduler( this.innerDeltaManager, this, + createChildLogger({ logger: this.logger, namespace: "DeltaScheduler" }), + ); + + this.inboundBatchAggregator = new InboundBatchAggregator( + this.innerDeltaManager, () => this.clientId, - createChildLogger({ logger: this.logger, namespace: "ScheduleManager" }), + createChildLogger({ logger: this.logger, namespace: "InboundBatchAggregator" }), ); const disablePartialFlush = this.mc.config.getBoolean( @@ -2191,6 +2198,8 @@ export class ContainerRuntime this._summarizer?.dispose(); this.channelCollection.dispose(); this.pendingStateManager.dispose(); + this.inboundBatchAggregator.dispose(); + this.deltaScheduler.dispose(); this.emit("dispose"); this.removeAllListeners(); } @@ -2896,7 +2905,7 @@ export class ContainerRuntime private _processedClientSequenceNumber: number | undefined; /** - * Processes inbound message(s). It calls schedule manager according to the messages' location in the batch. + * Processes inbound message(s). It calls delta scheduler according to the messages' location in the batch. * @param messagesWithMetadata - messages to process along with their metadata. * @param locationInBatch - Are we processing the start and/or end of a batch? * @param local - true if the messages were originally generated by the client receiving it. @@ -2918,7 +2927,7 @@ export class ContainerRuntime if (locationInBatch.batchStart) { const firstMessage = messagesWithMetadata[0]?.message; assert(firstMessage !== undefined, 0xa31 /* Batch must have at least one message */); - this.scheduleManager.batchBegin(firstMessage); + this.emit("batchBegin", firstMessage); } let error: unknown; @@ -3018,7 +3027,7 @@ export class ContainerRuntime if (locationInBatch.batchEnd) { const lastMessage = messagesWithMetadata[messagesWithMetadata.length - 1]?.message; assert(lastMessage !== undefined, 0xa32 /* Batch must have at least one message */); - this.scheduleManager.batchEnd(error, lastMessage); + this.emit("batchEnd", error, lastMessage); } } } diff --git a/packages/runtime/container-runtime/src/deltaScheduler.ts b/packages/runtime/container-runtime/src/deltaScheduler.ts index 0bdf853d846b..782b328fc515 100644 --- a/packages/runtime/container-runtime/src/deltaScheduler.ts +++ b/packages/runtime/container-runtime/src/deltaScheduler.ts @@ -3,9 +3,10 @@ * Licensed under the MIT License. */ -import { performance } from "@fluid-internal/client-utils"; +import { performance, type TypedEventEmitter } from "@fluid-internal/client-utils"; import { IDeltaManagerFull } from "@fluidframework/container-definitions/internal"; import { ISequencedDocumentMessage } from "@fluidframework/driver-definitions/internal"; +import type { IContainerRuntimeBaseEvents } from "@fluidframework/runtime-definitions/internal"; import { ITelemetryLoggerExt, formatTick } from "@fluidframework/telemetry-utils/internal"; /** @@ -22,7 +23,6 @@ import { ITelemetryLoggerExt, formatTick } from "@fluidframework/telemetry-utils * processed, the time and number of turns it took to process the ops. */ export class DeltaScheduler { - private readonly deltaManager: IDeltaManagerFull; // The time for processing ops in a single turn. public static readonly processingTime = 50; @@ -50,16 +50,22 @@ export class DeltaScheduler { | undefined; constructor( - deltaManager: IDeltaManagerFull, + private readonly deltaManager: IDeltaManagerFull, + private readonly runtimeEventsEmitter: TypedEventEmitter, private readonly logger: ITelemetryLoggerExt, ) { - this.deltaManager = deltaManager; - this.deltaManager.inbound.on("idle", () => { - this.inboundQueueIdle(); - }); + this.deltaManager.inbound.on("idle", this.inboundQueueIdle); + runtimeEventsEmitter.on("batchBegin", this.batchBegin); + runtimeEventsEmitter.on("batchEnd", this.batchEnd); } - public batchBegin(message: ISequencedDocumentMessage) { + public dispose() { + this.deltaManager.inbound.off("idle", this.inboundQueueIdle); + this.runtimeEventsEmitter.off("batchBegin", this.batchBegin); + this.runtimeEventsEmitter.off("batchEnd", this.batchEnd); + } + + private readonly batchBegin = (message: ISequencedDocumentMessage) => { if (!this.processingStartTime) { this.processingStartTime = performance.now(); } @@ -76,9 +82,9 @@ export class DeltaScheduler { startTime: performance.now(), }; } - } + }; - public batchEnd(message: ISequencedDocumentMessage) { + private readonly batchEnd = (error: any, message: ISequencedDocumentMessage) => { if (this.schedulingLog) { this.schedulingLog.numberOfBatchesProcessed++; this.schedulingLog.lastSequenceNumber = message.sequenceNumber; @@ -129,9 +135,9 @@ export class DeltaScheduler { this.processingStartTime = undefined; } } - } + }; - private inboundQueueIdle() { + private readonly inboundQueueIdle = () => { if (this.schedulingLog) { // Add the time taken for processing the final ops to the total processing time in the // telemetry log object. @@ -161,7 +167,7 @@ export class DeltaScheduler { // Reset the processing times. this.processingStartTime = undefined; this.currentAllowedProcessingTimeForTurn = DeltaScheduler.processingTime; - } + }; /** * This function tells whether we should run the scheduler. diff --git a/packages/runtime/container-runtime/src/scheduleManager.ts b/packages/runtime/container-runtime/src/inboundBatchAggregator.ts similarity index 71% rename from packages/runtime/container-runtime/src/scheduleManager.ts rename to packages/runtime/container-runtime/src/inboundBatchAggregator.ts index 2a11537a303a..5545dac3e80f 100644 --- a/packages/runtime/container-runtime/src/scheduleManager.ts +++ b/packages/runtime/container-runtime/src/inboundBatchAggregator.ts @@ -3,24 +3,18 @@ * Licensed under the MIT License. */ -import type { EventEmitter } from "@fluid-internal/client-utils"; import { performance } from "@fluid-internal/client-utils"; import { IDeltaManagerFull } from "@fluidframework/container-definitions/internal"; import { assert } from "@fluidframework/core-utils/internal"; -import { - IDocumentMessage, - ISequencedDocumentMessage, -} from "@fluidframework/driver-definitions/internal"; +import { ISequencedDocumentMessage } from "@fluidframework/driver-definitions/internal"; import { isRuntimeMessage } from "@fluidframework/driver-utils/internal"; import { ITelemetryLoggerExt, DataCorruptionError, DataProcessingError, - createChildLogger, extractSafePropertiesFromMessage, } from "@fluidframework/telemetry-utils/internal"; -import { DeltaScheduler } from "./deltaScheduler.js"; import { IBatchMetadata } from "./metadata.js"; import { pkgVersion } from "./packageVersion.js"; @@ -31,46 +25,10 @@ type IRuntimeMessageMetadata = }; /** - * This class has the following responsibilities: - * - * 1. It tracks batches as we process ops and raises "batchBegin" and "batchEnd" events. - * As part of it, it validates batch correctness (i.e. no system ops in the middle of batch) - * - * 2. It creates instance of ScheduleManagerCore that ensures we never start processing ops from batch - * unless all ops of the batch are in. + * This class ensures that we aggregate a complete batch of incoming ops before processing them. It basically ensures + * that we never start processing ops in a batch IF we do not have all ops in the batch. */ -export class ScheduleManager { - private readonly deltaScheduler: DeltaScheduler; - - constructor( - private readonly deltaManager: IDeltaManagerFull, - private readonly emitter: EventEmitter, - readonly getClientId: () => string | undefined, - private readonly logger: ITelemetryLoggerExt, - ) { - this.deltaScheduler = new DeltaScheduler( - this.deltaManager, - createChildLogger({ logger: this.logger, namespace: "DeltaScheduler" }), - ); - void new ScheduleManagerCore(deltaManager, getClientId, logger); - } - - public batchBegin(message: ISequencedDocumentMessage) { - this.emitter.emit("batchBegin", message); - this.deltaScheduler.batchBegin(message); - } - - public batchEnd(error: any | undefined, message: ISequencedDocumentMessage) { - this.emitter.emit("batchEnd", error, message); - this.deltaScheduler.batchEnd(message); - } -} - -/** - * This class controls pausing and resuming of inbound queue to ensure that we never - * start processing ops in a batch IF we do not have all ops in the batch. - */ -class ScheduleManagerCore { +export class InboundBatchAggregator { private pauseSequenceNumber: number | undefined; private currentBatchClientId: string | undefined; private localPaused = false; @@ -82,38 +40,8 @@ class ScheduleManagerCore { private readonly getClientId: () => string | undefined, private readonly logger: ITelemetryLoggerExt, ) { - // Listen for delta manager sends and add batch metadata to messages - this.deltaManager.on("prepareSend", (messages: IDocumentMessage[]) => { - if (messages.length === 0) { - return; - } - - // First message will have the batch flag set to true if doing a batched send - const firstMessageMetadata = messages[0].metadata as IRuntimeMessageMetadata; - if (!firstMessageMetadata?.batch) { - return; - } - - // If the batch contains only a single op, clear the batch flag. - if (messages.length === 1) { - delete firstMessageMetadata.batch; - return; - } - - // Set the batch flag to false on the last message to indicate the end of the send batch - const lastMessage = messages[messages.length - 1]; - // TODO: It's not clear if this shallow clone is required, as opposed to just setting "batch" to false. - - lastMessage.metadata = { ...(lastMessage.metadata as any), batch: false }; - }); - // Listen for updates and peek at the inbound - this.deltaManager.inbound.on("push", (message: ISequencedDocumentMessage) => { - this.trackPending(message); - }); - - // Start with baseline - empty inbound queue. - assert(!this.localPaused, 0x293 /* "initial state" */); + this.deltaManager.inbound.on("push", this.trackPending); const allPending = this.deltaManager.inbound.toArray(); for (const pending of allPending) { @@ -121,16 +49,21 @@ class ScheduleManagerCore { } // We are intentionally directly listening to the "op" to inspect system ops as well. - // If we do not observe system ops, we are likely to hit 0x296 assert when system ops + // If we do not observe system ops, we are likely to hit an error when system ops // precedes start of incomplete batch. - this.deltaManager.on("op", (message) => this.afterOpProcessing(message)); + this.deltaManager.on("op", this.afterOpProcessing); + } + + public dispose() { + this.deltaManager.off("op", this.afterOpProcessing); + this.deltaManager.inbound.off("push", this.trackPending); } /** - * The only public function in this class - called when we processed an op, - * to make decision if op processing should be paused or not after that. + * Callback for DeltaManager's "op" event, for us to make decision if op processing should + * be paused or not after that. */ - public afterOpProcessing(message: ISequencedDocumentMessage) { + private readonly afterOpProcessing = (message: ISequencedDocumentMessage) => { assert( !this.localPaused, 0x294 /* "can't have op processing paused if we are processing an op" */, @@ -157,7 +90,7 @@ class ScheduleManagerCore { throw DataProcessingError.create( // Former assert 0x296 "Incomplete batch", - "ScheduleManager", + "InboundBatchAggregator", message, { type: message.type, @@ -174,47 +107,12 @@ class ScheduleManagerCore { this.pauseQueue(); } } - } - - private pauseQueue() { - assert(!this.localPaused, 0x297 /* "always called from resumed state" */); - this.localPaused = true; - this.timePaused = performance.now(); - // eslint-disable-next-line @typescript-eslint/no-floating-promises - this.deltaManager.inbound.pause(); - } - - private resumeQueue(startBatch: number, messageEndBatch: ISequencedDocumentMessage) { - const endBatch = messageEndBatch.sequenceNumber; - const duration = this.localPaused ? performance.now() - this.timePaused : undefined; - - this.batchCount++; - if (this.batchCount % 1000 === 1) { - this.logger.sendTelemetryEvent({ - eventName: "BatchStats", - sequenceNumber: endBatch, - length: endBatch - startBatch + 1, - msnDistance: endBatch - messageEndBatch.minimumSequenceNumber, - duration, - batchCount: this.batchCount, - interrupted: this.localPaused, - }); - } - - // Return early if no change in value - if (!this.localPaused) { - return; - } - - this.localPaused = false; - - this.deltaManager.inbound.resume(); - } + }; /** * Called for each incoming op (i.e. inbound "push" notification) */ - private trackPending(message: ISequencedDocumentMessage) { + private readonly trackPending = (message: ISequencedDocumentMessage) => { assert( this.deltaManager.inbound.length !== 0, 0x298 /* "we have something in the queue that generates this event" */, @@ -324,5 +222,40 @@ class ScheduleManagerCore { // Continuation of current batch. Do nothing assert(this.currentBatchClientId !== undefined, 0x2a1 /* "logic error" */); } + }; + + private pauseQueue() { + assert(!this.localPaused, 0x297 /* "always called from resumed state" */); + this.localPaused = true; + this.timePaused = performance.now(); + // eslint-disable-next-line @typescript-eslint/no-floating-promises + this.deltaManager.inbound.pause(); + } + + private resumeQueue(startBatch: number, messageEndBatch: ISequencedDocumentMessage) { + const endBatch = messageEndBatch.sequenceNumber; + const duration = this.localPaused ? performance.now() - this.timePaused : undefined; + + this.batchCount++; + if (this.batchCount % 1000 === 1) { + this.logger.sendTelemetryEvent({ + eventName: "BatchStats", + sequenceNumber: endBatch, + length: endBatch - startBatch + 1, + msnDistance: endBatch - messageEndBatch.minimumSequenceNumber, + duration, + batchCount: this.batchCount, + interrupted: this.localPaused, + }); + } + + // Return early if no change in value + if (!this.localPaused) { + return; + } + + this.localPaused = false; + + this.deltaManager.inbound.resume(); } } diff --git a/packages/runtime/container-runtime/src/pendingStateManager.ts b/packages/runtime/container-runtime/src/pendingStateManager.ts index 69b9ece6c000..2b6e72ebfb51 100644 --- a/packages/runtime/container-runtime/src/pendingStateManager.ts +++ b/packages/runtime/container-runtime/src/pendingStateManager.ts @@ -638,7 +638,7 @@ export class PendingStateManager implements IDisposable { /** * We must preserve the distinct batches on resubmit. * Note: It is not possible for the PendingStateManager to receive a partially acked batch. It will - * either receive the whole batch ack or nothing at all. @see ScheduleManager for how this works. + * either receive the whole batch ack or nothing at all. See {@link InboundBatchAggregator} for how this works. */ if (batchMetadataFlag === undefined) { // Single-message batch diff --git a/packages/runtime/container-runtime/src/test/batching.spec.ts b/packages/runtime/container-runtime/src/test/batching.spec.ts index 639f6511f810..d2c573b4db2a 100644 --- a/packages/runtime/container-runtime/src/test/batching.spec.ts +++ b/packages/runtime/container-runtime/src/test/batching.spec.ts @@ -23,7 +23,6 @@ import { SinonFakeTimers, createSandbox, useFakeTimers } from "sinon"; import type { ChannelCollection } from "../channelCollection.js"; import { ContainerRuntime } from "../containerRuntime.js"; -import { DeltaScheduler } from "../deltaScheduler.js"; import { ContainerMessageType } from "../messageTypes.js"; describe("Runtime batching", () => { @@ -132,14 +131,14 @@ describe("Runtime batching", () => { * processing the messages in the queue. */ function processBatch(batch: ISequencedDocumentMessage[], cr: ContainerRuntime) { - // Push the messages in the inbound queue. This is done because ScheduleManager listens to the "push" event + // Push the messages in the inbound queue. This is done because InboundBatchAggregator listens to the "push" event // emitted by the inbound queue to do batch validations. for (const batchMessage of batch) { mockDeltaManager.inbound.push(batchMessage); } // Process the messages in the inbound queue. - // Process is called on the delta manager because ScheduleManager listens to the "op" event on delta manager + // Process is called on the delta manager because InboundBatchAggregator listens to the "op" event on delta manager // as well to do validation. // Process is called on the container runtime because it is the one that actually processes the messages and // has its own set of validations. @@ -269,30 +268,14 @@ describe("Runtime batching", () => { /** * These tests validate that container runtime handles batch begin and end correctly. It should emit - * batch begin and end events and delta scheduler's batch begin and end methods should be called. + * batch begin and end events. */ describe("Batch begin and end", () => { let batchBeginCount = 0; let batchEndCount = 0; let containerRuntimeStub: sinon.SinonStub; - let schedulerBatchBeginStub: sinon.SinonStub; - let schedulerBatchEndStub: sinon.SinonStub; - - type ContainerRuntimeWithScheduler = Omit & { - scheduleManager: { deltaScheduler: DeltaScheduler }; - }; beforeEach(async () => { - const containerRuntimeWithDeltaScheduler = - containerRuntime as unknown as ContainerRuntimeWithScheduler; - schedulerBatchBeginStub = sandbox.stub( - containerRuntimeWithDeltaScheduler.scheduleManager.deltaScheduler, - "batchBegin", - ); - schedulerBatchEndStub = sandbox.stub( - containerRuntimeWithDeltaScheduler.scheduleManager.deltaScheduler, - "batchEnd", - ); containerRuntimeStub = patchContainerRuntime(containerRuntime); containerRuntime.on("batchBegin", () => { batchBeginCount++; @@ -314,20 +297,9 @@ describe("Runtime batching", () => { }); } - function validateBatchBeginAndEnd(schedulerCalled: boolean = true) { + function validateBatchBeginAndEnd() { assert.strictEqual(batchBeginCount, 1, "Batch begin should have been emitted once"); assert.strictEqual(batchEndCount, 1, "Batch end should have been emitted once"); - if (!schedulerCalled) { - return; - } - assert( - schedulerBatchBeginStub.calledOnce, - "Delta scheduler batch begin should have been called once", - ); - assert( - schedulerBatchEndStub.calledOnce, - "Delta scheduler batch end should have been called once", - ); } it("handles batch begin and end for successfully processing modern runtime messages", async () => { @@ -488,7 +460,7 @@ describe("Runtime batching", () => { "Non batch messages should be processed successfully", ); - validateBatchBeginAndEnd(false /* schedulerCalled */); + validateBatchBeginAndEnd(); }); }); }); diff --git a/packages/runtime/container-runtime/src/test/scheduleManager.spec.ts b/packages/runtime/container-runtime/src/test/scheduleManager.spec.ts deleted file mode 100644 index be594207815c..000000000000 --- a/packages/runtime/container-runtime/src/test/scheduleManager.spec.ts +++ /dev/null @@ -1,424 +0,0 @@ -/*! - * Copyright (c) Microsoft Corporation and contributors. All rights reserved. - * Licensed under the MIT License. - */ - -import { strict as assert } from "assert"; - -import { EventEmitter } from "@fluid-internal/client-utils"; -import { - MessageType, - ISequencedDocumentMessage, -} from "@fluidframework/driver-definitions/internal"; -import { createChildLogger } from "@fluidframework/telemetry-utils/internal"; -import { MockDeltaManager } from "@fluidframework/test-runtime-utils/internal"; - -import { asBatchMetadata } from "../metadata.js"; -import { ScheduleManager } from "../scheduleManager.js"; - -describe("ScheduleManager", () => { - describe("Batch processing events", () => { - let batchBegin: number = 0; - let batchEnd: number = 0; - let sequenceNumber: number = 0; - let emitter: EventEmitter; - let deltaManager: MockDeltaManager; - let scheduleManager: ScheduleManager; - const testClientId = "test-client"; - let batchClientId: string | undefined; - - beforeEach(() => { - emitter = new EventEmitter(); - deltaManager = new MockDeltaManager(); - deltaManager.inbound.processCallback = (message: ISequencedDocumentMessage) => { - // Simulate batch accumulation by container runtime's remote message processor. - // It saves batch messages until the entire batch is received. It calls batch begin - // and end on schedule manager accordingly. - const batchMetadataFlag = asBatchMetadata(message.metadata)?.batch; - if (batchMetadataFlag === true) { - assert( - batchClientId === undefined, - "Received batch message while another batch is in progress", - ); - scheduleManager.batchBegin(message); - batchClientId = message.clientId ?? undefined; - } else if (batchMetadataFlag === false) { - assert(batchClientId !== undefined, "Received batch end message without batch"); - scheduleManager.batchEnd(undefined /* error */, message); - batchClientId = undefined; - } else if (batchClientId === undefined) { - scheduleManager.batchBegin(message); - scheduleManager.batchEnd(undefined /* error */, message); - } - deltaManager.emit("op", message); - }; - scheduleManager = new ScheduleManager( - deltaManager, - emitter, - () => testClientId, - createChildLogger({ namespace: "fluid:testScheduleManager" }), - ); - - emitter.on("batchBegin", () => { - // When we receive a "batchBegin" event, we should not have any outstanding - // events, i.e., batchBegin and batchEnd should be equal. - assert.strictEqual( - batchBegin, - batchEnd, - "Received batchBegin before previous batchEnd", - ); - batchBegin++; - }); - - emitter.on("batchEnd", () => { - batchEnd++; - // Every "batchEnd" event should correspond to a "batchBegin" event, i.e., - // batchBegin and batchEnd should be equal. - assert.strictEqual( - batchBegin, - batchEnd, - "Received batchEnd without corresponding batchBegin", - ); - }); - }); - - afterEach(() => { - batchBegin = 0; - batchEnd = 0; - sequenceNumber = 0; - }); - - /** - * Pushes single op to the inbound queue. Adds proper sequence numbers to them - */ - function pushOp(partialMessage: Partial) { - sequenceNumber++; - const message = { ...partialMessage, sequenceNumber }; - deltaManager.inbound.push(message as ISequencedDocumentMessage); - } - - /** - * awaits until all ops that could be processed are processed. - */ - async function processOps() { - const inbound = deltaManager.inbound; - while (!inbound.paused && inbound.length > 0) { - await Promise.resolve(); - } - } - - it("Single non-batch message", async () => { - const message: Partial = { - clientId: testClientId, - type: MessageType.Operation, - }; - - // Send a non-batch message. - pushOp(message); - - await processOps(); - - assert.strictEqual(deltaManager.inbound.length, 0, "Did not process all ops"); - assert.strictEqual(batchBegin, 1, "Did not receive correct batchBegin events"); - assert.strictEqual(batchEnd, 1, "Did not receive correct batchEnd events"); - }); - - it("Multiple non-batch messages", async () => { - const message: Partial = { - clientId: testClientId, - type: MessageType.Operation, - }; - - // Sent 5 non-batch messages. - pushOp(message); - pushOp(message); - pushOp(message); - pushOp(message); - pushOp(message); - - await processOps(); - - assert.strictEqual(deltaManager.inbound.length, 0, "Did not process all ops"); - assert.strictEqual(batchBegin, 5, "Did not receive correct batchBegin events"); - assert.strictEqual(batchEnd, 5, "Did not receive correct batchEnd events"); - }); - - it("Message with non batch-related metadata", async () => { - const message: Partial = { - clientId: testClientId, - type: MessageType.Operation, - metadata: { foo: 1 }, - }; - - pushOp(message); - await processOps(); - - // We should have a "batchBegin" and a "batchEnd" event for the batch. - assert.strictEqual(deltaManager.inbound.length, 0, "Did not process all ops"); - assert.strictEqual( - batchBegin, - 1, - "Did not receive correct batchBegin event for the batch", - ); - assert.strictEqual(batchEnd, 1, "Did not receive correct batchEnd event for the batch"); - }); - - it("Messages in a single batch", async () => { - const batchBeginMessage: Partial = { - clientId: testClientId, - type: MessageType.Operation, - metadata: { batch: true }, - }; - - const batchMessage: Partial = { - clientId: testClientId, - type: MessageType.Operation, - }; - - const batchEndMessage: Partial = { - clientId: testClientId, - type: MessageType.Operation, - metadata: { batch: false }, - }; - - // Send a batch with 4 messages. - pushOp(batchBeginMessage); - pushOp(batchMessage); - pushOp(batchMessage); - - await processOps(); - assert.strictEqual( - deltaManager.inbound.length, - 3, - "Some of partial batch ops were processed", - ); - - pushOp(batchEndMessage); - await processOps(); - - // We should have only received one "batchBegin" and one "batchEnd" event for the batch. - assert.strictEqual(deltaManager.inbound.length, 0, "Did not process all ops"); - assert.strictEqual( - batchBegin, - 1, - "Did not receive correct batchBegin event for the batch", - ); - assert.strictEqual(batchEnd, 1, "Did not receive correct batchEnd event for the batch"); - }); - - it("two batches", async () => { - const batchBeginMessage: Partial = { - clientId: testClientId, - type: MessageType.Operation, - metadata: { batch: true }, - }; - - const batchMessage: Partial = { - clientId: testClientId, - type: MessageType.Operation, - }; - - const batchEndMessage: Partial = { - clientId: testClientId, - type: MessageType.Operation, - metadata: { batch: false }, - }; - - // Pause to not allow ops to be processed while we accumulated them. - await deltaManager.inbound.pause(); - - // Send a batch with 4 messages. - pushOp(batchBeginMessage); - pushOp(batchMessage); - pushOp(batchMessage); - pushOp(batchEndMessage); - - // Add incomplete batch - pushOp(batchBeginMessage); - pushOp(batchMessage); - pushOp(batchMessage); - - assert.strictEqual( - deltaManager.inbound.length, - 7, - "none of the batched ops are processed yet", - ); - - void deltaManager.inbound.resume(); - await processOps(); - - assert.strictEqual( - deltaManager.inbound.length, - 3, - "none of the second batch ops are processed yet", - ); - assert.strictEqual( - batchBegin, - 1, - "Did not receive correct batchBegin event for the batch", - ); - assert.strictEqual(batchEnd, 1, "Did not receive correct batchEnd event for the batch"); - - // End the batch - all ops should be processed. - pushOp(batchEndMessage); - await processOps(); - - assert.strictEqual(deltaManager.inbound.length, 0, "processed all ops"); - assert.strictEqual( - batchBegin, - 2, - "Did not receive correct batchBegin event for the batch", - ); - assert.strictEqual(batchEnd, 2, "Did not receive correct batchEnd event for the batch"); - }); - - it("non-batched ops followed by batch", async () => { - const batchBeginMessage: Partial = { - clientId: testClientId, - type: MessageType.Operation, - metadata: { batch: true }, - }; - - const batchMessage: Partial = { - clientId: testClientId, - type: MessageType.Operation, - }; - - const batchEndMessage: Partial = { - clientId: testClientId, - type: MessageType.Operation, - metadata: { batch: false }, - }; - - // Pause to not allow ops to be processed while we accumulated them. - await deltaManager.inbound.pause(); - - // Send a batch with 2 messages. - pushOp(batchMessage); - pushOp(batchMessage); - - // Add incomplete batch - pushOp(batchBeginMessage); - pushOp(batchMessage); - pushOp(batchMessage); - - await processOps(); - - assert.strictEqual( - deltaManager.inbound.length, - 5, - "none of the batched ops are processed yet", - ); - - void deltaManager.inbound.resume(); - await processOps(); - - assert.strictEqual( - deltaManager.inbound.length, - 3, - "none of the second batch ops are processed yet", - ); - - // End the batch - all ops should be processed. - pushOp(batchEndMessage); - await processOps(); - - assert.strictEqual(deltaManager.inbound.length, 0, "processed all ops"); - assert.strictEqual( - batchBegin, - 3, - "Did not receive correct batchBegin event for the batch", - ); - assert.strictEqual(batchEnd, 3, "Did not receive correct batchEnd event for the batch"); - }); - - function testWrongBatches() { - const clientId1: string = "test-client-1"; - const clientId2: string = "test-client-2"; - - const batchBeginMessage: Partial = { - clientId: clientId1, - type: MessageType.Operation, - metadata: { batch: true }, - }; - - const batchMessage: Partial = { - clientId: clientId1, - type: MessageType.Operation, - }; - - const messagesToFail: Partial[] = [ - // System op from same client - { - clientId: clientId1, - type: MessageType.NoOp, - }, - - // Batch messages interleaved with a batch begin message from same client - batchBeginMessage, - - // Send a message from another client. This should result in a a violation! - { - clientId: clientId2, - type: MessageType.Operation, - }, - - // Send a message from another client with non batch-related metadata. This should result - // in a "batchEnd" event for the previous batch since the client id changes. Also, we - // should get a "batchBegin" and a "batchEnd" event for the new client. - { - clientId: clientId2, - type: MessageType.Operation, - metadata: { foo: 1 }, - }, - - // Send a batch from another client. This should result in a "batchEnd" event for the - // previous batch since the client id changes. Also, we should get one "batchBegin" and - // one "batchEnd" event for the batch from the new client. - { - clientId: clientId2, - type: MessageType.Operation, - metadata: { batch: true }, - }, - ]; - - let counter = 0; - for (const messageToFail of messagesToFail) { - counter++; - it(`Partial batch messages, case ${counter}`, async () => { - // Send a batch with 3 messages from first client but don't send batch end message. - pushOp(batchBeginMessage); - pushOp(batchMessage); - pushOp(batchMessage); - - await processOps(); - assert.strictEqual( - deltaManager.inbound.length, - 3, - "Some of partial batch ops were processed", - ); - - assert.throws(() => pushOp(messageToFail)); - - assert.strictEqual( - deltaManager.inbound.length, - 4, - "Some of batch ops were processed", - ); - assert.strictEqual( - batchBegin, - 0, - "Did not receive correct batchBegin event for the batch", - ); - assert.strictEqual( - batchEnd, - 0, - "Did not receive correct batchBegin event for the batch", - ); - }); - } - } - - testWrongBatches(); - }); -}); diff --git a/packages/test/functional-tests/package.json b/packages/test/functional-tests/package.json index ca193664efea..37eea6e20978 100644 --- a/packages/test/functional-tests/package.json +++ b/packages/test/functional-tests/package.json @@ -68,8 +68,10 @@ "@fluidframework/build-common": "^2.0.3", "@fluidframework/build-tools": "^0.51.0", "@fluidframework/cell": "workspace:~", + "@fluidframework/container-definitions": "workspace:~", "@fluidframework/container-loader": "workspace:~", "@fluidframework/container-runtime": "workspace:~", + "@fluidframework/container-runtime-definitions": "workspace:~", "@fluidframework/datastore-definitions": "workspace:~", "@fluidframework/driver-definitions": "workspace:~", "@fluidframework/eslint-config-fluid": "^5.6.0", @@ -86,6 +88,7 @@ "@types/events_pkg": "npm:@types/events@^3.0.0", "@types/mocha": "^10.0.10", "@types/node": "^18.19.0", + "@types/sinon": "^17.0.3", "c8": "^8.0.1", "copyfiles": "^2.4.1", "cross-env": "^7.0.3", @@ -96,6 +99,7 @@ "moment": "^2.21.0", "prettier": "~3.0.3", "rimraf": "^4.4.0", + "sinon": "^18.0.1", "ts-loader": "^9.5.1", "typescript": "~5.4.5", "webpack": "^5.94.0", diff --git a/packages/test/functional-tests/src/test/containerRuntime.spec.ts b/packages/test/functional-tests/src/test/containerRuntime.spec.ts index 8b645e6f5fc3..671691fe7268 100644 --- a/packages/test/functional-tests/src/test/containerRuntime.spec.ts +++ b/packages/test/functional-tests/src/test/containerRuntime.spec.ts @@ -9,26 +9,37 @@ import { MockDocumentDeltaConnection, MockDocumentService, } from "@fluid-private/test-loader-utils"; +import { + AttachState, + IContainerContext, + ICriticalContainerError, + IRuntime, +} from "@fluidframework/container-definitions/internal"; // eslint-disable-next-line import/no-internal-modules import { ConnectionManager } from "@fluidframework/container-loader/internal/test/connectionManager"; // eslint-disable-next-line import/no-internal-modules import { IConnectionManagerFactoryArgs } from "@fluidframework/container-loader/internal/test/contracts"; // eslint-disable-next-line import/no-internal-modules import { DeltaManager } from "@fluidframework/container-loader/internal/test/deltaManager"; +import { + ContainerMessageType, + loadContainerRuntime, +} from "@fluidframework/container-runtime/internal"; // eslint-disable-next-line import/no-internal-modules import { DeltaScheduler } from "@fluidframework/container-runtime/internal/test/deltaScheduler"; -// ADO:1981 -// eslint-disable-next-line import/no-internal-modules -import { ScheduleManager } from "@fluidframework/container-runtime/internal/test/scheduleManager"; +import { IContainerRuntime } from "@fluidframework/container-runtime-definitions/internal"; import { IClient } from "@fluidframework/driver-definitions"; import { ISequencedDocumentSystemMessage, MessageType, ISequencedDocumentMessage, } from "@fluidframework/driver-definitions/internal"; -import { createChildLogger } from "@fluidframework/telemetry-utils/internal"; -import events_pkg from "events_pkg"; -const { EventEmitter } = events_pkg; +import { + createChildLogger, + mixinMonitoringContext, +} from "@fluidframework/telemetry-utils/internal"; +import { MockAudience, MockQuorumClients } from "@fluidframework/test-runtime-utils/internal"; +import { SinonFakeTimers, useFakeTimers } from "sinon"; describe("Container Runtime", () => { /** @@ -38,13 +49,33 @@ describe("Container Runtime", () => { */ describe("Async op processing", () => { let deltaManager: DeltaManager; - let scheduleManager: ScheduleManager; let deltaConnection: MockDocumentDeltaConnection; + let containerRuntime: IContainerRuntime & IRuntime; let seq: number; const docId = "docId"; let batchBegin: number = 0; let batchEnd: number = 0; - let batchClientId: string | undefined; + let clock: SinonFakeTimers; + + // Create a mock container context to be used with container runtime. + const getMockContext = ( + dm: DeltaManager, + ): Partial => { + const mockContext = { + attachState: AttachState.Attached, + deltaManager: dm, + audience: new MockAudience(), + quorum: new MockQuorumClients(), + taggedLogger: mixinMonitoringContext(createChildLogger({})).logger, + clientDetails: { capabilities: { interactive: true } }, + closeFn: (_error?: ICriticalContainerError): void => {}, + updateDirtyContainerState: (_dirty: boolean) => {}, + getLoadedFromVersion: () => undefined, + clientId: "test-client-1", + connected: true, + }; + return mockContext; + }; const startDeltaManager = async (): Promise => new Promise((resolve) => { @@ -73,6 +104,11 @@ describe("Container Runtime", () => { minimumSequenceNumber: 0, sequenceNumber: seq++, type: MessageType.Operation, + // Use Rejoin message type to avoid processing the op. Rejoin is a no-op in container runtime. + contents: { + type: ContainerMessageType.Rejoin, + contents: "", + }, }; messages.push(message); } @@ -82,44 +118,20 @@ describe("Container Runtime", () => { // Function to process an inbound op. It adds delay to simulate time taken in processing an op. function processOp(message: ISequencedDocumentMessage): void { - // Simulate batch accumulation by container runtime's remote message processor. - // It saves batch messages until the entire batch is received. It calls batch begin - // and end on schedule manager accordingly. - const batchMetadataFlag = (message.metadata as { batch?: boolean } | undefined)?.batch; - let batchStart: boolean = false; - let batchComplete: boolean = false; - if (batchMetadataFlag === true) { - assert( - batchClientId === undefined, - "Received batch message while another batch is in progress", - ); - batchClientId = message.clientId ?? undefined; - batchStart = true; - } else if (batchMetadataFlag === false) { - assert(batchClientId !== undefined, "Received batch end message without batch"); - batchClientId = undefined; - batchComplete = true; - } else if (batchClientId === undefined) { - batchStart = true; - batchComplete = true; - } - - if (batchStart) { - scheduleManager.batchBegin(message); - } - - // Add delay such that each op takes greater than the DeltaScheduler's processing time to process. + // Add delay to container runtime's op processing such that each op takes greater than the + // DeltaScheduler's processing time to process. const processingDelay = DeltaScheduler.processingTime + 10; - const startTime = Date.now(); - while (Date.now() - startTime < processingDelay) {} - - if (batchComplete) { - scheduleManager.batchEnd(undefined /* error */, message); - } - + containerRuntime.once("op", () => { + clock.tick(processingDelay); + }); + containerRuntime.process(message, false); deltaManager.emit("op", message); } + before(() => { + clock = useFakeTimers({ shouldAdvanceTime: true }); + }); + beforeEach(async () => { seq = 1; deltaConnection = new MockDocumentDeltaConnection("test"); @@ -144,15 +156,19 @@ describe("Container Runtime", () => { ), ); - const emitter = new EventEmitter(); - scheduleManager = new ScheduleManager( - deltaManager, - emitter, - () => "test-client", // clientId, - createChildLogger({ namespace: "fluid:testScheduleManager" }), - ); + const mockProvideEntryPoint = async () => ({ + myProp: "myValue", + }); + containerRuntime = await loadContainerRuntime({ + context: getMockContext(deltaManager) as IContainerContext, + registryEntries: [], + existing: true, + runtimeOptions: {}, + provideEntryPoint: mockProvideEntryPoint, + }); + assert(containerRuntime !== undefined, "Container runtime should be defined"); - emitter.on("batchBegin", () => { + containerRuntime.on("batchBegin", () => { // When we receive a "batchBegin" event, we should not have any outstanding // events, i.e., batchBegin and batchEnd should be equal. assert.strictEqual( @@ -163,7 +179,7 @@ describe("Container Runtime", () => { batchBegin++; }); - emitter.on("batchEnd", () => { + containerRuntime.on("batchEnd", () => { batchEnd++; // Every "batchEnd" event should correspond to a "batchBegin" event, i.e., // batchBegin and batchEnd should be equal. @@ -184,10 +200,15 @@ describe("Container Runtime", () => { }); afterEach(() => { + clock.reset(); batchBegin = 0; batchEnd = 0; }); + after(() => { + clock.restore(); + }); + it("Batch messages that take longer than DeltaScheduler's processing time to process", async () => { await startDeltaManager(); // Since each message takes more than DeltaScheduler.processingTime to process (see processOp above), @@ -204,8 +225,8 @@ describe("Container Runtime", () => { // Batch messages are processed in a single turn. So, we should have received the batch events. assert.strictEqual( - 1, batchBegin, + 1, "Did not receive correct batchBegin event for the batch", ); assert.strictEqual(1, batchEnd, "Did not receive correct batchEnd event for the batch"); @@ -239,13 +260,13 @@ describe("Container Runtime", () => { // We should have received all the batch events. assert.strictEqual( - count, batchBegin, + count, "Did not receive correct batchBegin event for the batch", ); assert.strictEqual( - count, batchEnd, + count, "Did not receive correct batchEnd event for the batch", ); }); @@ -267,11 +288,11 @@ describe("Container Runtime", () => { // We should have received the batch events for the non-batch message in the first turn. assert.strictEqual( - 1, batchBegin, + 1, "Did not receive correct batchBegin event for the batch", ); - assert.strictEqual(1, batchEnd, "Did not receive correct batchEnd event for the batch"); + assert.strictEqual(batchEnd, 1, "Did not receive correct batchEnd event for the batch"); // Yield the event loop so that the batch messages can be processed. await yieldEventLoop(); @@ -279,11 +300,11 @@ describe("Container Runtime", () => { // We should have now received the batch events for the batch ops since they would have processed in // a single turn. assert.strictEqual( - 2, batchBegin, + 2, "Did not receive correct batchBegin event for the batch", ); - assert.strictEqual(2, batchEnd, "Did not receive correct batchEnd event for the batch"); + assert.strictEqual(batchEnd, 2, "Did not receive correct batchEnd event for the batch"); }); it(`Batch messages followed by a non-batch message that take longer than @@ -303,11 +324,11 @@ describe("Container Runtime", () => { // We should have received the batch events for the batch messages in the first turn. assert.strictEqual( - 1, batchBegin, + 1, "Did not receive correct batchBegin event for the batch", ); - assert.strictEqual(1, batchEnd, "Did not receive correct batchEnd event for the batch"); + assert.strictEqual(batchEnd, 1, "Did not receive correct batchEnd event for the batch"); // Yield the event loop so that the single non-batch op can be processed. await yieldEventLoop(); @@ -315,11 +336,11 @@ describe("Container Runtime", () => { // We should have now received the batch events for the non-batch op since it would have processed in // a single turn. assert.strictEqual( - 2, batchBegin, + 2, "Did not receive correct batchBegin event for the batch", ); - assert.strictEqual(2, batchEnd, "Did not receive correct batchEnd event for the batch"); + assert.strictEqual(batchEnd, 2, "Did not receive correct batchEnd event for the batch"); }); it("Reconnects after receiving a leave op", async () => { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 0944bff6ce5f..d15f5d7a9160 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -13407,12 +13407,18 @@ importers: '@fluidframework/cell': specifier: workspace:~ version: link:../../dds/cell + '@fluidframework/container-definitions': + specifier: workspace:~ + version: link:../../common/container-definitions '@fluidframework/container-loader': specifier: workspace:~ version: link:../../loader/container-loader '@fluidframework/container-runtime': specifier: workspace:~ version: link:../../runtime/container-runtime + '@fluidframework/container-runtime-definitions': + specifier: workspace:~ + version: link:../../runtime/container-runtime-definitions '@fluidframework/datastore-definitions': specifier: workspace:~ version: link:../../runtime/datastore-definitions @@ -13461,6 +13467,9 @@ importers: '@types/node': specifier: ^18.19.0 version: 18.19.67 + '@types/sinon': + specifier: ^17.0.3 + version: 17.0.3 c8: specifier: ^8.0.1 version: 8.0.1 @@ -13491,6 +13500,9 @@ importers: rimraf: specifier: ^4.4.0 version: 4.4.1 + sinon: + specifier: ^18.0.1 + version: 18.0.1 ts-loader: specifier: ^9.5.1 version: 9.5.1(typescript@5.4.5)(webpack@5.97.1) From 90bc246b2dfdc832d9bb12f35f18cdcf30e6a637 Mon Sep 17 00:00:00 2001 From: Joshua Smithrud <54606601+Josmithr@users.noreply.github.com> Date: Wed, 8 Jan 2025 12:15:26 -0800 Subject: [PATCH 11/27] fix(api-markdown-documenter): Fix `minimumReleaseLevel` property typing (#23506) The property was incorrectly using `Omit` instead of `Exclude`, the latter being the correct options for excluding a member of a type union. --- .../api-report/api-markdown-documenter.alpha.api.md | 2 +- .../api-report/api-markdown-documenter.beta.api.md | 2 +- .../api-report/api-markdown-documenter.public.api.md | 2 +- .../src/api-item-transforms/configuration/DocumentationSuite.ts | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tools/api-markdown-documenter/api-report/api-markdown-documenter.alpha.api.md b/tools/api-markdown-documenter/api-report/api-markdown-documenter.alpha.api.md index 048d96b1e324..62d1934aa871 100644 --- a/tools/api-markdown-documenter/api-report/api-markdown-documenter.alpha.api.md +++ b/tools/api-markdown-documenter/api-report/api-markdown-documenter.alpha.api.md @@ -315,7 +315,7 @@ export interface DocumentationSuiteConfiguration { readonly hierarchyBoundaries: HierarchyBoundaries; readonly includeBreadcrumb: boolean; readonly includeTopLevelDocumentHeading: boolean; - readonly minimumReleaseLevel: Omit; + readonly minimumReleaseLevel: Exclude; readonly skipPackage: (apiPackage: ApiPackage) => boolean; } diff --git a/tools/api-markdown-documenter/api-report/api-markdown-documenter.beta.api.md b/tools/api-markdown-documenter/api-report/api-markdown-documenter.beta.api.md index d7c3c4006493..648210647a72 100644 --- a/tools/api-markdown-documenter/api-report/api-markdown-documenter.beta.api.md +++ b/tools/api-markdown-documenter/api-report/api-markdown-documenter.beta.api.md @@ -315,7 +315,7 @@ export interface DocumentationSuiteConfiguration { readonly hierarchyBoundaries: HierarchyBoundaries; readonly includeBreadcrumb: boolean; readonly includeTopLevelDocumentHeading: boolean; - readonly minimumReleaseLevel: Omit; + readonly minimumReleaseLevel: Exclude; readonly skipPackage: (apiPackage: ApiPackage) => boolean; } diff --git a/tools/api-markdown-documenter/api-report/api-markdown-documenter.public.api.md b/tools/api-markdown-documenter/api-report/api-markdown-documenter.public.api.md index cb0b02f27518..a424cca00739 100644 --- a/tools/api-markdown-documenter/api-report/api-markdown-documenter.public.api.md +++ b/tools/api-markdown-documenter/api-report/api-markdown-documenter.public.api.md @@ -315,7 +315,7 @@ export interface DocumentationSuiteConfiguration { readonly hierarchyBoundaries: HierarchyBoundaries; readonly includeBreadcrumb: boolean; readonly includeTopLevelDocumentHeading: boolean; - readonly minimumReleaseLevel: Omit; + readonly minimumReleaseLevel: Exclude; readonly skipPackage: (apiPackage: ApiPackage) => boolean; } diff --git a/tools/api-markdown-documenter/src/api-item-transforms/configuration/DocumentationSuite.ts b/tools/api-markdown-documenter/src/api-item-transforms/configuration/DocumentationSuite.ts index b0982296cbc2..711bc396bbe8 100644 --- a/tools/api-markdown-documenter/src/api-item-transforms/configuration/DocumentationSuite.ts +++ b/tools/api-markdown-documenter/src/api-item-transforms/configuration/DocumentationSuite.ts @@ -234,7 +234,7 @@ export interface DocumentationSuiteConfiguration { * releaseLevel: ReleaseTag.Beta * ``` */ - readonly minimumReleaseLevel: Omit; + readonly minimumReleaseLevel: Exclude; } /** From c77462d9352ab5c7b0528461aa9845dbbfa0028d Mon Sep 17 00:00:00 2001 From: Joshua Smithrud <54606601+Josmithr@users.noreply.github.com> Date: Wed, 8 Jan 2025 12:30:21 -0800 Subject: [PATCH 12/27] refactor(api-markdown-documenter): Consistent release levels (#23507) Interfaces were previously introduced to represent parameters to HTML rendering functions. They were added as `@public`, but the related functions are `@alpha`. This PR makes the interfaces `@alpha` to align with their corresponding functions. While this is technically a breaking change, the types strictly exist for input to functions that are `@alpha`, so this shouldn't have any impact on users. --- .../api-report/api-markdown-documenter.alpha.api.md | 4 ++-- .../api-report/api-markdown-documenter.beta.api.md | 8 -------- .../api-report/api-markdown-documenter.public.api.md | 8 -------- tools/api-markdown-documenter/src/RenderHtml.ts | 4 ++-- 4 files changed, 4 insertions(+), 20 deletions(-) diff --git a/tools/api-markdown-documenter/api-report/api-markdown-documenter.alpha.api.md b/tools/api-markdown-documenter/api-report/api-markdown-documenter.alpha.api.md index 62d1934aa871..77f543aac24d 100644 --- a/tools/api-markdown-documenter/api-report/api-markdown-documenter.alpha.api.md +++ b/tools/api-markdown-documenter/api-report/api-markdown-documenter.alpha.api.md @@ -650,7 +650,7 @@ export { ReleaseTag } // @alpha function renderApiModelAsHtml(options: RenderApiModelAsHtmlOptions): Promise; -// @public +// @alpha interface RenderApiModelAsHtmlOptions extends ApiItemTransformationOptions, RenderDocumentAsHtmlConfiguration, FileSystemConfiguration { } @@ -674,7 +674,7 @@ export interface RenderDocumentAsHtmlConfiguration extends ToHtmlConfiguration, // @alpha function renderDocumentsAsHtml(documents: DocumentNode[], options: RenderDocumentsAsHtmlOptions): Promise; -// @public +// @alpha interface RenderDocumentsAsHtmlOptions extends RenderDocumentAsHtmlConfiguration, FileSystemConfiguration { } diff --git a/tools/api-markdown-documenter/api-report/api-markdown-documenter.beta.api.md b/tools/api-markdown-documenter/api-report/api-markdown-documenter.beta.api.md index 648210647a72..f2e0d32cc7db 100644 --- a/tools/api-markdown-documenter/api-report/api-markdown-documenter.beta.api.md +++ b/tools/api-markdown-documenter/api-report/api-markdown-documenter.beta.api.md @@ -647,10 +647,6 @@ export class PlainTextNode extends DocumentationLiteralNodeBase implemen export { ReleaseTag } -// @public -interface RenderApiModelAsHtmlOptions extends ApiItemTransformationOptions, RenderDocumentAsHtmlConfiguration, FileSystemConfiguration { -} - // @public function renderApiModelAsMarkdown(options: RenderApiModelAsMarkdownOptions): Promise; @@ -668,10 +664,6 @@ function renderDocument_2(document: DocumentNode, config: MarkdownRenderConfigur export interface RenderDocumentAsHtmlConfiguration extends ToHtmlConfiguration, RenderHtmlConfiguration { } -// @public -interface RenderDocumentsAsHtmlOptions extends RenderDocumentAsHtmlConfiguration, FileSystemConfiguration { -} - // @public function renderDocumentsAsMarkdown(documents: DocumentNode[], options: RenderDocumentsAsMarkdownOptions): Promise; diff --git a/tools/api-markdown-documenter/api-report/api-markdown-documenter.public.api.md b/tools/api-markdown-documenter/api-report/api-markdown-documenter.public.api.md index a424cca00739..79f04e72ae03 100644 --- a/tools/api-markdown-documenter/api-report/api-markdown-documenter.public.api.md +++ b/tools/api-markdown-documenter/api-report/api-markdown-documenter.public.api.md @@ -625,10 +625,6 @@ export class PlainTextNode extends DocumentationLiteralNodeBase implemen export { ReleaseTag } -// @public -interface RenderApiModelAsHtmlOptions extends ApiItemTransformationOptions, RenderDocumentAsHtmlConfiguration, FileSystemConfiguration { -} - // @public function renderApiModelAsMarkdown(options: RenderApiModelAsMarkdownOptions): Promise; @@ -646,10 +642,6 @@ function renderDocument_2(document: DocumentNode, config: MarkdownRenderConfigur export interface RenderDocumentAsHtmlConfiguration extends ToHtmlConfiguration, RenderHtmlConfiguration { } -// @public -interface RenderDocumentsAsHtmlOptions extends RenderDocumentAsHtmlConfiguration, FileSystemConfiguration { -} - // @public function renderDocumentsAsMarkdown(documents: DocumentNode[], options: RenderDocumentsAsMarkdownOptions): Promise; diff --git a/tools/api-markdown-documenter/src/RenderHtml.ts b/tools/api-markdown-documenter/src/RenderHtml.ts index c02352240419..e480429ed9aa 100644 --- a/tools/api-markdown-documenter/src/RenderHtml.ts +++ b/tools/api-markdown-documenter/src/RenderHtml.ts @@ -18,7 +18,7 @@ import { type RenderDocumentAsHtmlConfiguration, renderDocumentAsHtml } from "./ /** * API Model HTML rendering options. * - * @public + * @alpha */ export interface RenderApiModelAsHtmlOptions extends ApiItemTransformationOptions, @@ -55,7 +55,7 @@ export async function renderApiModelAsHtml(options: RenderApiModelAsHtmlOptions) /** * Options for rendering {@link DocumentNode}s as HTML. * - * @public + * @alpha */ export interface RenderDocumentsAsHtmlOptions extends RenderDocumentAsHtmlConfiguration, From c8670c4b2cbfe810c58cc4cbf7f9349d6d26c031 Mon Sep 17 00:00:00 2001 From: Joshua Smithrud <54606601+Josmithr@users.noreply.github.com> Date: Wed, 8 Jan 2025 12:31:23 -0800 Subject: [PATCH 13/27] refactor(api-markdown-documenter): Make property `readonly` (#23509) Makes `FileSystemConfiguration.outputDirectoryPath` readonly. --- tools/api-markdown-documenter/CHANGELOG.md | 1 + .../api-report/api-markdown-documenter.alpha.api.md | 2 +- .../api-report/api-markdown-documenter.beta.api.md | 2 +- .../api-report/api-markdown-documenter.public.api.md | 2 +- tools/api-markdown-documenter/src/FileSystemConfiguration.ts | 2 +- 5 files changed, 5 insertions(+), 4 deletions(-) diff --git a/tools/api-markdown-documenter/CHANGELOG.md b/tools/api-markdown-documenter/CHANGELOG.md index 1c926086d39d..e894a70422c9 100644 --- a/tools/api-markdown-documenter/CHANGELOG.md +++ b/tools/api-markdown-documenter/CHANGELOG.md @@ -68,6 +68,7 @@ await MarkdownRenderer.renderApiModel({ - `ApiItemTransformations` - `ApiItemTransformationConfiguration` - `DocumentationSuiteOptions` +- `FileSystemConfiguration` - `HtmlRenderer.RenderHtmlConfig` - `LintApiModelConfiguration` - `MarkdownRenderer.Renderers` diff --git a/tools/api-markdown-documenter/api-report/api-markdown-documenter.alpha.api.md b/tools/api-markdown-documenter/api-report/api-markdown-documenter.alpha.api.md index 77f543aac24d..4ee7702592a4 100644 --- a/tools/api-markdown-documenter/api-report/api-markdown-documenter.alpha.api.md +++ b/tools/api-markdown-documenter/api-report/api-markdown-documenter.alpha.api.md @@ -374,7 +374,7 @@ export class FencedCodeBlockNode extends DocumentationParentNodeBase implements // @public export interface FileSystemConfiguration { readonly newlineKind?: NewlineKind; - outputDirectoryPath: string; + readonly outputDirectoryPath: string; } // @public diff --git a/tools/api-markdown-documenter/api-report/api-markdown-documenter.beta.api.md b/tools/api-markdown-documenter/api-report/api-markdown-documenter.beta.api.md index f2e0d32cc7db..1b6136ac16b2 100644 --- a/tools/api-markdown-documenter/api-report/api-markdown-documenter.beta.api.md +++ b/tools/api-markdown-documenter/api-report/api-markdown-documenter.beta.api.md @@ -374,7 +374,7 @@ export class FencedCodeBlockNode extends DocumentationParentNodeBase implements // @public export interface FileSystemConfiguration { readonly newlineKind?: NewlineKind; - outputDirectoryPath: string; + readonly outputDirectoryPath: string; } // @public diff --git a/tools/api-markdown-documenter/api-report/api-markdown-documenter.public.api.md b/tools/api-markdown-documenter/api-report/api-markdown-documenter.public.api.md index 79f04e72ae03..73549aafc554 100644 --- a/tools/api-markdown-documenter/api-report/api-markdown-documenter.public.api.md +++ b/tools/api-markdown-documenter/api-report/api-markdown-documenter.public.api.md @@ -374,7 +374,7 @@ export class FencedCodeBlockNode extends DocumentationParentNodeBase implements // @public export interface FileSystemConfiguration { readonly newlineKind?: NewlineKind; - outputDirectoryPath: string; + readonly outputDirectoryPath: string; } // @public diff --git a/tools/api-markdown-documenter/src/FileSystemConfiguration.ts b/tools/api-markdown-documenter/src/FileSystemConfiguration.ts index 2265a661cdfb..2250702730ff 100644 --- a/tools/api-markdown-documenter/src/FileSystemConfiguration.ts +++ b/tools/api-markdown-documenter/src/FileSystemConfiguration.ts @@ -14,7 +14,7 @@ export interface FileSystemConfiguration { /** * The directory under which the document files will be generated. */ - outputDirectoryPath: string; + readonly outputDirectoryPath: string; /** * Specifies what type of newlines API Documenter should use when writing output files. From acc9dbf0cd73b2741fb7a8bdda2df047f4917314 Mon Sep 17 00:00:00 2001 From: Jason Hartman Date: Wed, 8 Jan 2025 12:35:49 -0800 Subject: [PATCH 14/27] Revert "eslint(container-runtime): Prefix container-runtime before enabling no-unchecked-record-access" (#23499) Reverts microsoft/FluidFramework#23437 1. Where `?` was used to address linter defect, TypeScript appears to mostly ignore that these cases may lead to `undefined` result which is not accepted per type specifications. Use of `?` is thus a behavior change that will shift point of failure away from where it could first be detected - revert those behavior changes. (Many of the test uses of `?` in original change are permissible as there is a follow-up assertion that will fail. But those were not separated during revert.) 2. Where `T | undefined` was used to address linter defect, TypeScript will narrow without `undefined` without `noUncheckedIndexAccess` enabled. Thus, the code appears more confusing as there is a non-respected type annotation. --- .../src/blobManager/blobManagerSnapSum.ts | 2 +- .../container-runtime/src/channelCollection.ts | 2 +- .../container-runtime/src/containerRuntime.ts | 3 +-- .../src/gc/garbageCollection.ts | 3 +-- .../container-runtime/src/gc/gcConfigs.ts | 2 +- .../container-runtime/src/gc/gcHelpers.ts | 4 ++-- .../src/gc/gcReferenceGraphAlgorithm.ts | 2 +- .../src/test/channelCollection.spec.ts | 2 +- .../src/test/gc/garbageCollection.spec.ts | 16 ++++++---------- .../src/test/gc/gcSummaryStateTracker.spec.ts | 18 +++++++++--------- 10 files changed, 24 insertions(+), 30 deletions(-) diff --git a/packages/runtime/container-runtime/src/blobManager/blobManagerSnapSum.ts b/packages/runtime/container-runtime/src/blobManager/blobManagerSnapSum.ts index 330524365942..8d6c54207c99 100644 --- a/packages/runtime/container-runtime/src/blobManager/blobManagerSnapSum.ts +++ b/packages/runtime/container-runtime/src/blobManager/blobManagerSnapSum.ts @@ -47,7 +47,7 @@ const loadV1 = async ( return {}; } let redirectTableEntries: [string, string][] = []; - const tableId: string | undefined = blobsTree.blobs[redirectTableBlobName]; + const tableId = blobsTree.blobs[redirectTableBlobName]; if (tableId) { redirectTableEntries = await readAndParse(context.storage, tableId); } diff --git a/packages/runtime/container-runtime/src/channelCollection.ts b/packages/runtime/container-runtime/src/channelCollection.ts index ad20d3ff87b9..0fed13948adb 100644 --- a/packages/runtime/container-runtime/src/channelCollection.ts +++ b/packages/runtime/container-runtime/src/channelCollection.ts @@ -1551,7 +1551,7 @@ export function getSummaryForDatastores( } if (rootHasIsolatedChannels(metadata)) { - const datastoresSnapshot: ISnapshotTree | undefined = snapshot.trees[channelsTreeName]; + const datastoresSnapshot = snapshot.trees[channelsTreeName]; assert(!!datastoresSnapshot, 0x168 /* Expected tree in snapshot not found */); return datastoresSnapshot; } else { diff --git a/packages/runtime/container-runtime/src/containerRuntime.ts b/packages/runtime/container-runtime/src/containerRuntime.ts index 8760bfe0abd3..96a50678b9e7 100644 --- a/packages/runtime/container-runtime/src/containerRuntime.ts +++ b/packages/runtime/container-runtime/src/containerRuntime.ts @@ -64,7 +64,6 @@ import { ISequencedDocumentMessage, ISignalMessage, type ISummaryContext, - type SummaryObject, } from "@fluidframework/driver-definitions/internal"; import { readAndParse } from "@fluidframework/driver-utils/internal"; import type { IIdCompressor } from "@fluidframework/id-compressor"; @@ -4152,7 +4151,7 @@ export class ContainerRuntime // Counting dataStores and handles // Because handles are unchanged dataStores in the current logic, // summarized dataStore count is total dataStore count minus handle count - const dataStoreTree: SummaryObject | undefined = summaryTree.tree[channelsTreeName]; + const dataStoreTree = summaryTree.tree[channelsTreeName]; assert(dataStoreTree.type === SummaryType.Tree, 0x1fc /* "summary is not a tree" */); const handleCount = Object.values(dataStoreTree.tree).filter( diff --git a/packages/runtime/container-runtime/src/gc/garbageCollection.ts b/packages/runtime/container-runtime/src/gc/garbageCollection.ts index 274dced46c90..ef5da9a762a6 100644 --- a/packages/runtime/container-runtime/src/gc/garbageCollection.ts +++ b/packages/runtime/container-runtime/src/gc/garbageCollection.ts @@ -5,7 +5,6 @@ import { IRequest } from "@fluidframework/core-interfaces"; import { assert, LazyPromise, Timer } from "@fluidframework/core-utils/internal"; -import type { ISnapshotTree } from "@fluidframework/driver-definitions/internal"; import { IGarbageCollectionDetailsBase, ISummarizeResult, @@ -227,7 +226,7 @@ export class GarbageCollector implements IGarbageCollector { try { // For newer documents, GC data should be present in the GC tree in the root of the snapshot. - const gcSnapshotTree: ISnapshotTree | undefined = baseSnapshot.trees[gcTreeKey]; + const gcSnapshotTree = baseSnapshot.trees[gcTreeKey]; if (gcSnapshotTree === undefined) { // back-compat - Older documents get their gc data reset for simplicity as there are few of them // incremental gc summary will not work with older gc data as well diff --git a/packages/runtime/container-runtime/src/gc/gcConfigs.ts b/packages/runtime/container-runtime/src/gc/gcConfigs.ts index 1324058bb839..42a0f6d4405d 100644 --- a/packages/runtime/container-runtime/src/gc/gcConfigs.ts +++ b/packages/runtime/container-runtime/src/gc/gcConfigs.ts @@ -87,7 +87,7 @@ export function generateGCConfigs( tombstoneTimeoutMs = testOverrideTombstoneTimeoutMs ?? computeTombstoneTimeout(sessionExpiryTimeoutMs); - const gcGeneration = createParams.gcOptions?.[gcGenerationOptionName]; + const gcGeneration = createParams.gcOptions[gcGenerationOptionName]; if (gcGeneration !== undefined) { persistedGcFeatureMatrix = { gcGeneration }; } diff --git a/packages/runtime/container-runtime/src/gc/gcHelpers.ts b/packages/runtime/container-runtime/src/gc/gcHelpers.ts index c4ae3dd9a787..13692392e9fc 100644 --- a/packages/runtime/container-runtime/src/gc/gcHelpers.ts +++ b/packages/runtime/container-runtime/src/gc/gcHelpers.ts @@ -109,7 +109,7 @@ export function concatGarbageCollectionStates( } for (const [nodeId, nodeData] of Object.entries(gcState2.gcNodes)) { - let combineNodeData: IGarbageCollectionNodeData | undefined = combinedGCNodes[nodeId]; + let combineNodeData = combinedGCNodes[nodeId]; if (combineNodeData === undefined) { combineNodeData = { outboundRoutes: Array.from(nodeData.outboundRoutes), @@ -202,7 +202,7 @@ export async function getGCDataFromSnapshot( continue; } - const blobId: string | undefined = gcSnapshotTree.blobs[key]; + const blobId = gcSnapshotTree.blobs[key]; if (blobId === undefined) { continue; } diff --git a/packages/runtime/container-runtime/src/gc/gcReferenceGraphAlgorithm.ts b/packages/runtime/container-runtime/src/gc/gcReferenceGraphAlgorithm.ts index 90e1340e2c32..8550d0f18b26 100644 --- a/packages/runtime/container-runtime/src/gc/gcReferenceGraphAlgorithm.ts +++ b/packages/runtime/container-runtime/src/gc/gcReferenceGraphAlgorithm.ts @@ -31,7 +31,7 @@ export function runGarbageCollection( // Get the node for the referenced id and add its outbound routes to referencedIds since they are // also referenced. - const routes: string[] | undefined = referenceGraph[id]; + const routes = referenceGraph[id]; if (routes !== undefined) { referencedIds.push(...routes); } diff --git a/packages/runtime/container-runtime/src/test/channelCollection.spec.ts b/packages/runtime/container-runtime/src/test/channelCollection.spec.ts index 7e6132d395c6..2f1b87035e01 100644 --- a/packages/runtime/container-runtime/src/test/channelCollection.spec.ts +++ b/packages/runtime/container-runtime/src/test/channelCollection.spec.ts @@ -115,7 +115,7 @@ describe("Runtime", () => { assert.strictEqual(snapshot.id, "channels-id", "Should be lower-level"); assert.strictEqual(Object.keys(snapshot.trees).length, 4, "Should have 4 datastores"); // Put in variable to avoid type-narrowing bug - const nonDataStore1: ISnapshotTree | undefined = snapshot.trees[nonDataStorePaths[0]]; + const nonDataStore1 = snapshot.trees[nonDataStorePaths[0]]; assert.strictEqual( nonDataStore1?.id, "lower-non-datastore-1", diff --git a/packages/runtime/container-runtime/src/test/gc/garbageCollection.spec.ts b/packages/runtime/container-runtime/src/test/gc/garbageCollection.spec.ts index a05f30ed10f9..7e9fbcc9f7b6 100644 --- a/packages/runtime/container-runtime/src/test/gc/garbageCollection.spec.ts +++ b/packages/runtime/container-runtime/src/test/gc/garbageCollection.spec.ts @@ -10,10 +10,7 @@ import { ContainerErrorTypes } from "@fluidframework/container-definitions/inter import { IErrorBase, ITelemetryBaseEvent } from "@fluidframework/core-interfaces"; import { Timer } from "@fluidframework/core-utils/internal"; import { SummaryType } from "@fluidframework/driver-definitions"; -import { - ISnapshotTree, - type SummaryObject, -} from "@fluidframework/driver-definitions/internal"; +import { ISnapshotTree } from "@fluidframework/driver-definitions/internal"; import { IGarbageCollectionDetailsBase, ISummarizeResult, @@ -1510,8 +1507,7 @@ describe("Garbage Collection Tests", () => { assert(summaryTree?.summary.type === SummaryType.Tree, "The summary should be a tree"); // Get the deleted node ids from summary and validate that its the same as the one GC loaded from. - const deletedNodesBlob: SummaryObject | undefined = - summaryTree.summary.tree[gcDeletedBlobKey]; + const deletedNodesBlob = summaryTree.summary.tree[gcDeletedBlobKey]; assert( deletedNodesBlob.type === SummaryType.Blob, "Deleted blob not present in summary", @@ -1560,8 +1556,7 @@ describe("Garbage Collection Tests", () => { assert(gcSummary?.summary.type === SummaryType.Tree, "The summary should be a tree"); // Get the deleted node ids from summary and validate that its the same as the one GC loaded from. - const deletedNodesBlob: SummaryObject | undefined = - gcSummary.summary.tree[gcDeletedBlobKey]; + const deletedNodesBlob = gcSummary.summary.tree[gcDeletedBlobKey]; assert( deletedNodesBlob.type === SummaryType.Handle, "Deleted nodes state should be a handle", @@ -1649,12 +1644,13 @@ describe("Garbage Collection Tests", () => { assert(summaryTree.type === SummaryType.Tree, "Expecting a summary tree!"); let rootGCState: IGarbageCollectionState = { gcNodes: {} }; - for (const [key, gcBlob] of Object.entries(summaryTree.tree)) { + for (const key of Object.keys(summaryTree.tree)) { // Skip blobs that do not start with the GC prefix. if (!key.startsWith(gcBlobPrefix)) { continue; } + const gcBlob = summaryTree.tree[key]; assert(gcBlob?.type === SummaryType.Blob, `GC blob not available`); const gcState = JSON.parse(gcBlob.content as string) as IGarbageCollectionState; // Merge the GC state of this blob into the root GC state. @@ -1955,7 +1951,7 @@ describe("Garbage Collection Tests", () => { defaultGCData.gcNodes[nodeE] = [nodeA]; // 4. Add reference from A to D with calling addedOutboundReference - defaultGCData.gcNodes[nodeA]?.push(nodeD); + defaultGCData.gcNodes[nodeA].push(nodeD); garbageCollector.addedOutboundReference(nodeA, nodeD, Date.now()); // 5. Run GC and generate summary 2. E = [A -\> B, A -\> C, A -\> E, D -\> C, E -\> A]. diff --git a/packages/runtime/container-runtime/src/test/gc/gcSummaryStateTracker.spec.ts b/packages/runtime/container-runtime/src/test/gc/gcSummaryStateTracker.spec.ts index ac2649a7f89e..197732656746 100644 --- a/packages/runtime/container-runtime/src/test/gc/gcSummaryStateTracker.spec.ts +++ b/packages/runtime/container-runtime/src/test/gc/gcSummaryStateTracker.spec.ts @@ -81,15 +81,15 @@ describe("GCSummaryStateTracker tests", () => { ); assert(summary?.summary.type === SummaryType.Tree, "GC summary should be a tree"); assert( - summary.summary.tree[gcStateBlobKey]?.type === SummaryType.Blob, + summary.summary.tree[gcStateBlobKey].type === SummaryType.Blob, "GC state should be written as a blob", ); assert( - summary.summary.tree[gcTombstoneBlobKey]?.type === SummaryType.Handle, + summary.summary.tree[gcTombstoneBlobKey].type === SummaryType.Handle, "Tombstone state should be written as handle", ); assert( - summary.summary.tree[gcDeletedBlobKey]?.type === SummaryType.Handle, + summary.summary.tree[gcDeletedBlobKey].type === SummaryType.Handle, "Deleted nodes should be written as handle", ); }); @@ -106,15 +106,15 @@ describe("GCSummaryStateTracker tests", () => { ); assert(summary?.summary.type === SummaryType.Tree, "GC summary should be a tree"); assert( - summary.summary.tree[gcStateBlobKey]?.type === SummaryType.Handle, + summary.summary.tree[gcStateBlobKey].type === SummaryType.Handle, "GC state should be written as handle", ); assert( - summary.summary.tree[gcTombstoneBlobKey]?.type === SummaryType.Blob, + summary.summary.tree[gcTombstoneBlobKey].type === SummaryType.Blob, "Tombstone state should be written as a blob", ); assert( - summary.summary.tree[gcDeletedBlobKey]?.type === SummaryType.Handle, + summary.summary.tree[gcDeletedBlobKey].type === SummaryType.Handle, "Deleted nodes should be written as handle", ); }); @@ -131,15 +131,15 @@ describe("GCSummaryStateTracker tests", () => { ); assert(summary?.summary.type === SummaryType.Tree, "GC summary should be a tree"); assert( - summary.summary.tree[gcStateBlobKey]?.type === SummaryType.Handle, + summary.summary.tree[gcStateBlobKey].type === SummaryType.Handle, "GC state should be written as handle", ); assert( - summary.summary.tree[gcTombstoneBlobKey]?.type === SummaryType.Handle, + summary.summary.tree[gcTombstoneBlobKey].type === SummaryType.Handle, "Tombstone state should be written as handle", ); assert( - summary.summary.tree[gcDeletedBlobKey]?.type === SummaryType.Blob, + summary.summary.tree[gcDeletedBlobKey].type === SummaryType.Blob, "Deleted nodes should be written as a blob", ); }); From ca172e788ab90efc936edd3481c177d2e9937af5 Mon Sep 17 00:00:00 2001 From: Jason Hartman Date: Wed, 8 Jan 2025 12:36:37 -0800 Subject: [PATCH 15/27] Partially revert #23432 "Prefix ... no-unchecked-record-access..." (#23486) Partially revert "Prefix trivial issues when enabling no-unchecked-record-access for client packages (#23432)" This reverts commit 88caebd330d8b44221ba4b011398a9c8218d38b5 where: 1. `?` was used to address linter defect. TypeScript appears to mostly ignore that these cases may lead to `undefined` result which is not accepted per type specifications. Use of `?` is thus a behavior change that will shift point of failure away from where it could first be detected - revert those behavior changes. (Many of the test uses of `?` in original change are permissible as there is a follow-up assertion that will fail. But those were not separated during revert.) 2. `T | undefined` was used to address linter defect. TypeScript will narrow without `undefined` without `noUncheckedIndexAccess` enabled. The only files that retained changes from original commit are: - packages/dds/merge-tree/src/test/beastTest.spec.ts - packages/drivers/odsp-driver/src/WriteBufferUtils.ts - packages/runtime/id-compressor/src/test/idCompressor.spec.ts - packages/tools/fetch-tool/src/fluidFetchSnapshot.ts --- .../dds/attributable-map/src/mapKernel.ts | 2 +- packages/dds/map/src/directory.ts | 2 +- packages/dds/map/src/mapKernel.ts | 2 +- .../dds/map/src/test/mocha/directory.spec.ts | 20 +++++++++---------- packages/dds/sequence/src/revertibles.ts | 12 +++++------ .../odsp-driver/src/WriteBufferUtils.ts | 4 ++-- .../odsp-driver/src/compactSnapshotParser.ts | 10 +++++----- .../src/odspDocumentStorageServiceBase.ts | 5 ++--- .../odsp-driver/src/odspDriverUrlResolver.ts | 2 +- .../src/test/createNewUtilsTests.spec.ts | 14 +++++-------- .../src/test/getUrlAndHeadersWithAuth.spec.ts | 2 +- .../src/test/prefetchSnapshotTests.spec.ts | 8 ++++---- .../src/storageImplementations.ts | 4 ++-- .../src/documentService.ts | 6 +++--- .../src/documentServiceFactory.ts | 8 ++++---- .../test/summaryCompresssionTester.spec.ts | 6 +++--- .../src/test/containerRuntime.spec.ts | 5 ++--- .../src/test/gc/gcStats.spec.ts | 2 +- .../src/test/idCompressor.spec.ts | 2 +- .../src/test/summaryUtils.spec.ts | 10 +++++----- .../test/test-drivers/src/odspTestDriver.ts | 3 ++- .../src/routerliciousTestDriver.ts | 3 +-- .../test/test-service-load/src/FileLogger.ts | 2 +- .../data-visualization/DataVisualization.ts | 3 +-- .../graphs/DynamicComposedChart.tsx | 2 +- 25 files changed, 66 insertions(+), 73 deletions(-) diff --git a/experimental/dds/attributable-map/src/mapKernel.ts b/experimental/dds/attributable-map/src/mapKernel.ts index 4d95cfd61b37..512ae72ceedb 100644 --- a/experimental/dds/attributable-map/src/mapKernel.ts +++ b/experimental/dds/attributable-map/src/mapKernel.ts @@ -216,7 +216,7 @@ export class AttributableMapKernel { return nextVal.done ? { value: undefined, done: true } : // Unpack the stored value - { value: [nextVal.value[0], nextVal.value[1]?.value], done: false }; + { value: [nextVal.value[0], nextVal.value[1].value], done: false }; }, [Symbol.iterator](): IterableIterator<[string, unknown]> { return this; diff --git a/packages/dds/map/src/directory.ts b/packages/dds/map/src/directory.ts index 98a79eed4217..c867e3907db6 100644 --- a/packages/dds/map/src/directory.ts +++ b/packages/dds/map/src/directory.ts @@ -1607,7 +1607,7 @@ class SubDirectory extends TypedEventEmitter implements IDirec const nextVal = localEntriesIterator.next(); return nextVal.done ? { value: undefined, done: true } - : { value: [nextVal.value[0], nextVal.value[1]?.value], done: false }; + : { value: [nextVal.value[0], nextVal.value[1].value], done: false }; }, [Symbol.iterator](): IterableIterator<[string, unknown]> { return this; diff --git a/packages/dds/map/src/mapKernel.ts b/packages/dds/map/src/mapKernel.ts index e3e183108a03..f1f41f7c7d82 100644 --- a/packages/dds/map/src/mapKernel.ts +++ b/packages/dds/map/src/mapKernel.ts @@ -208,7 +208,7 @@ export class MapKernel { return nextVal.done ? { value: undefined, done: true } : // Unpack the stored value - { value: [nextVal.value[0], nextVal.value[1]?.value], done: false }; + { value: [nextVal.value[0], nextVal.value[1].value], done: false }; }, [Symbol.iterator](): IterableIterator<[string, unknown]> { return this; diff --git a/packages/dds/map/src/test/mocha/directory.spec.ts b/packages/dds/map/src/test/mocha/directory.spec.ts index 86daac63b7be..7b8d46a87b2f 100644 --- a/packages/dds/map/src/test/mocha/directory.spec.ts +++ b/packages/dds/map/src/test/mocha/directory.spec.ts @@ -104,12 +104,12 @@ function serialize(directory1: ISharedDirectory): string { assert.strictEqual(summaryObjectKeys.length, 1, "summary tree should only have one blob"); assert.strictEqual(summaryObjectKeys[0], "header", "summary should have a header blob"); assert.strictEqual( - summaryTree.tree.header?.type, + summaryTree.tree.header.type, SummaryType.Blob, "header is not of SummaryType.Blob", ); - const content = summaryTree.tree.header?.content as string; + const content = summaryTree.tree.header.content as string; return JSON.stringify((JSON.parse(content) as IDirectoryNewStorageFormat).content); } @@ -1725,15 +1725,15 @@ describe("Directory", () => { const fooSubDirIterator = fooSubDir.entries(); const fooSubDirResult1 = fooSubDirIterator.next(); // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - assert.equal(fooSubDirResult1.value?.[0], "testKey"); + assert.equal(fooSubDirResult1.value[0], "testKey"); // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - assert.equal(fooSubDirResult1.value?.[1], "testValue"); + assert.equal(fooSubDirResult1.value[1], "testValue"); assert.equal(fooSubDirResult1.done, false); const fooSubDirResult2 = fooSubDirIterator.next(); // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - assert.equal(fooSubDirResult2.value?.[0], "testKey2"); + assert.equal(fooSubDirResult2.value[0], "testKey2"); // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - assert.equal(fooSubDirResult2.value?.[1], "testValue2"); + assert.equal(fooSubDirResult2.value[1], "testValue2"); assert.equal(fooSubDirResult2.done, false); const fooSubDirResult3 = fooSubDirIterator.next(); assert.equal(fooSubDirResult3.value, undefined); @@ -1755,15 +1755,15 @@ describe("Directory", () => { const fooSubDir2Iterator = fooSubDir2.entries(); const fooSubDir2Result1 = fooSubDir2Iterator.next(); // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - assert.equal(fooSubDir2Result1.value?.[0], "testKey"); + assert.equal(fooSubDir2Result1.value[0], "testKey"); // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - assert.equal(fooSubDir2Result1.value?.[1], "testValue"); + assert.equal(fooSubDir2Result1.value[1], "testValue"); assert.equal(fooSubDir2Result1.done, false); const fooSubDir2Result2 = fooSubDir2Iterator.next(); // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - assert.equal(fooSubDir2Result2.value?.[0], "testKey2"); + assert.equal(fooSubDir2Result2.value[0], "testKey2"); // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - assert.equal(fooSubDir2Result2.value?.[1], "testValue2"); + assert.equal(fooSubDir2Result2.value[1], "testValue2"); assert.equal(fooSubDir2Result2.done, false); const fooSubDir2Result3 = fooSubDir2Iterator.next(); assert.equal(fooSubDir2Result3.value, undefined); diff --git a/packages/dds/sequence/src/revertibles.ts b/packages/dds/sequence/src/revertibles.ts index f37e5537e55b..b32e0ed47dfa 100644 --- a/packages/dds/sequence/src/revertibles.ts +++ b/packages/dds/sequence/src/revertibles.ts @@ -341,7 +341,7 @@ export function appendSharedStringDeltaToRevertibles( revertible.intervals.push({ intervalId: interval.getIntervalId(), - label: interval.properties.referenceRangeLabels?.[0], + label: interval.properties.referenceRangeLabels[0], startOffset: offset, endOffset, }); @@ -351,7 +351,7 @@ export function appendSharedStringDeltaToRevertibles( endIntervals.forEach(({ interval, offset }) => { revertible.intervals.push({ intervalId: interval.getIntervalId(), - label: interval.properties.referenceRangeLabels?.[0], + label: interval.properties.referenceRangeLabels[0], endOffset: offset, }); }); @@ -433,7 +433,7 @@ function revertLocalAdd( revertible: TypedRevertible, ) { const id = getUpdatedIdFromInterval(revertible.interval); - const label = revertible.interval.properties.referenceRangeLabels?.[0]; + const label = revertible.interval.properties.referenceRangeLabels[0]; string.getIntervalCollection(label).removeIntervalById(id); } @@ -461,7 +461,7 @@ function revertLocalDelete( string: ISharedString, revertible: TypedRevertible, ) { - const label = revertible.interval.properties.referenceRangeLabels?.[0]; + const label = revertible.interval.properties.referenceRangeLabels[0]; const collection = string.getIntervalCollection(label); const start = string.localReferencePositionToPosition(revertible.start); const startSlidePos = getSlidePosition(string, revertible.start, start); @@ -500,7 +500,7 @@ function revertLocalChange( string: ISharedString, revertible: TypedRevertible, ) { - const label = revertible.interval.properties.referenceRangeLabels?.[0]; + const label = revertible.interval.properties.referenceRangeLabels[0]; const collection = string.getIntervalCollection(label); const id = getUpdatedIdFromInterval(revertible.interval); const start = string.localReferencePositionToPosition(revertible.start); @@ -540,7 +540,7 @@ function revertLocalPropertyChanged( string: ISharedString, revertible: TypedRevertible, ) { - const label = revertible.interval.properties.referenceRangeLabels?.[0]; + const label = revertible.interval.properties.referenceRangeLabels[0]; const id = getUpdatedIdFromInterval(revertible.interval); const newProps = revertible.propertyDeltas; string.getIntervalCollection(label).change(id, { props: newProps }); diff --git a/packages/drivers/odsp-driver/src/WriteBufferUtils.ts b/packages/drivers/odsp-driver/src/WriteBufferUtils.ts index 09fcbf0f3fb0..fee46a4e001c 100644 --- a/packages/drivers/odsp-driver/src/WriteBufferUtils.ts +++ b/packages/drivers/odsp-driver/src/WriteBufferUtils.ts @@ -233,8 +233,8 @@ function serializeNodeCore( for (const child of nodeCore.nodes) { if (child instanceof NodeCore) { // For a tree node start and end with set/list start and end marker codes. - const startCode: MarkerCodesStart | undefined = MarkerCodesStart[child.type]; - const endCode: MarkerCodesEnd | undefined = MarkerCodesEnd[child.type]; + const startCode = MarkerCodesStart[child.type]; + const endCode = MarkerCodesEnd[child.type]; assert(startCode !== undefined, 0x285 /* "Start code should not undefined" */); assert(endCode !== undefined, 0x286 /* "End code should not undefined" */); buffer.write(startCode); diff --git a/packages/drivers/odsp-driver/src/compactSnapshotParser.ts b/packages/drivers/odsp-driver/src/compactSnapshotParser.ts index 113181ff2b98..9bb774a90df0 100644 --- a/packages/drivers/odsp-driver/src/compactSnapshotParser.ts +++ b/packages/drivers/odsp-driver/src/compactSnapshotParser.ts @@ -72,7 +72,7 @@ function readBlobSection(node: NodeTypes): { const records = getNodeProps(blob); assertBlobCoreInstance(records.data, "data should be of BlobCore type"); const id = getStringInstance(records.id, "blob id should be string"); - blobContents.set(id, records.data?.arrayBuffer); + blobContents.set(id, records.data.arrayBuffer); } } return { blobContents, slowBlobStructureCount }; @@ -88,15 +88,15 @@ function readOpsSection(node: NodeTypes): ISequencedDocumentMessage[] { const records = getNodeProps(node); assertNumberInstance(records.firstSequenceNumber, "Seq number should be a number"); assertNodeCoreInstance(records.deltas, "Deltas should be a Node"); - for (let i = 0; i < records.deltas?.length; ++i) { + for (let i = 0; i < records.deltas.length; ++i) { // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - ops.push(JSON.parse(records.deltas?.getString(i))); + ops.push(JSON.parse(records.deltas.getString(i))); } // Due to a bug at service side, in an edge case service was serializing deltas even // when there are no ops. So just make the code resilient to that bug. Service has also // fixed that bug. assert( - ops.length === 0 || records.firstSequenceNumber?.valueOf() === ops[0].sequenceNumber, + ops.length === 0 || records.firstSequenceNumber.valueOf() === ops[0].sequenceNumber, 0x280 /* "Validate first op seq number" */, ); return ops; @@ -244,7 +244,7 @@ function readSnapshotSection(node: NodeTypes): { const { snapshotTree, slowTreeStructureCount, treeStructureCountWithGroupId } = readTreeSection(records.treeNodes); snapshotTree.id = getStringInstance(records.id, "snapshotId should be string"); - const sequenceNumber = records.sequenceNumber?.valueOf(); + const sequenceNumber = records.sequenceNumber.valueOf(); return { sequenceNumber, snapshotTree, diff --git a/packages/drivers/odsp-driver/src/odspDocumentStorageServiceBase.ts b/packages/drivers/odsp-driver/src/odspDocumentStorageServiceBase.ts index 738a49092d99..39fdb72e3645 100644 --- a/packages/drivers/odsp-driver/src/odspDocumentStorageServiceBase.ts +++ b/packages/drivers/odsp-driver/src/odspDocumentStorageServiceBase.ts @@ -257,9 +257,8 @@ export abstract class OdspDocumentStorageServiceBase implements IDocumentStorage protected combineProtocolAndAppSnapshotTree(snapshotTree: ISnapshotTree): ISnapshotTree { // When we upload the container snapshot, we upload appTree in ".app" and protocol tree in ".protocol" // So when we request the snapshot we get ".app" as tree and not as commit node as in the case just above. - const hierarchicalAppTree: ISnapshotTree | undefined = snapshotTree.trees[".app"]; - const hierarchicalProtocolTree: ISnapshotTree | undefined = - snapshotTree.trees[".protocol"]; + const hierarchicalAppTree = snapshotTree.trees[".app"]; + const hierarchicalProtocolTree = snapshotTree.trees[".protocol"]; const summarySnapshotTree: ISnapshotTree = { blobs: { ...hierarchicalAppTree.blobs, diff --git a/packages/drivers/odsp-driver/src/odspDriverUrlResolver.ts b/packages/drivers/odsp-driver/src/odspDriverUrlResolver.ts index 3222e57606c2..6392ffaf8ff9 100644 --- a/packages/drivers/odsp-driver/src/odspDriverUrlResolver.ts +++ b/packages/drivers/odsp-driver/src/odspDriverUrlResolver.ts @@ -108,7 +108,7 @@ export class OdspDriverUrlResolver implements IUrlResolver { const searchParams = new URLSearchParams(queryString); // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access - const fileName: string = request.headers[DriverHeader.createNew]?.fileName; + const fileName: string = request.headers[DriverHeader.createNew].fileName; const driveID = searchParams.get("driveId"); const filePath = searchParams.get("path"); const packageName = searchParams.get("containerPackageName"); diff --git a/packages/drivers/odsp-driver/src/test/createNewUtilsTests.spec.ts b/packages/drivers/odsp-driver/src/test/createNewUtilsTests.spec.ts index 335db33bf581..2dfed6c5e796 100644 --- a/packages/drivers/odsp-driver/src/test/createNewUtilsTests.spec.ts +++ b/packages/drivers/odsp-driver/src/test/createNewUtilsTests.spec.ts @@ -7,11 +7,7 @@ import { strict as assert } from "node:assert"; import { bufferToString, fromBase64ToUtf8 } from "@fluid-internal/client-utils"; import { ISummaryTree, SummaryType } from "@fluidframework/driver-definitions"; -import { - ISnapshot, - IDocumentAttributes, - type ISnapshotTree, -} from "@fluidframework/driver-definitions/internal"; +import { ISnapshot, IDocumentAttributes } from "@fluidframework/driver-definitions/internal"; import { IFileEntry, IOdspResolvedUrl, @@ -143,18 +139,18 @@ describe("Create New Utils Tests", () => { ); assert.strictEqual(snapshot.blobContents.size, 2, "2 blobs should be there"); - const appTree: ISnapshotTree | undefined = snapshotTree.trees[".app"]; - const protocolTree: ISnapshotTree | undefined = snapshotTree.trees[".protocol"]; + const appTree = snapshotTree.trees[".app"]; + const protocolTree = snapshotTree.trees[".protocol"]; assert(appTree !== undefined, "App tree should be there"); assert(protocolTree !== undefined, "Protocol tree should be there"); - const appTreeBlobId: string | undefined = appTree.blobs.attributes; + const appTreeBlobId = appTree.blobs.attributes; const appTreeBlobValBuffer = snapshot.blobContents.get(appTreeBlobId); assert(appTreeBlobValBuffer !== undefined, "app blob value should exist"); const appTreeBlobVal = bufferToString(appTreeBlobValBuffer, "utf8"); assert(appTreeBlobVal === blobContent, "Blob content should match"); - const docAttributesBlobId: string | undefined = protocolTree.blobs.attributes; + const docAttributesBlobId = protocolTree.blobs.attributes; const docAttributesBuffer = snapshot.blobContents.get(docAttributesBlobId); assert(docAttributesBuffer !== undefined, "protocol attributes blob value should exist"); const docAttributesBlobValue = bufferToString(docAttributesBuffer, "utf8"); diff --git a/packages/drivers/odsp-driver/src/test/getUrlAndHeadersWithAuth.spec.ts b/packages/drivers/odsp-driver/src/test/getUrlAndHeadersWithAuth.spec.ts index efb67adb25b8..661cecb3b2d9 100644 --- a/packages/drivers/odsp-driver/src/test/getUrlAndHeadersWithAuth.spec.ts +++ b/packages/drivers/odsp-driver/src/test/getUrlAndHeadersWithAuth.spec.ts @@ -24,7 +24,7 @@ describe("getHeadersWithAuth", () => { result: { [index: string]: string }, ): void => { assert.strictEqual( - result.Authorization?.endsWith(token), + result.Authorization.endsWith(token), true, "Returned header must contain token", ); diff --git a/packages/drivers/odsp-driver/src/test/prefetchSnapshotTests.spec.ts b/packages/drivers/odsp-driver/src/test/prefetchSnapshotTests.spec.ts index dea58cc7a1ef..051cd324accf 100644 --- a/packages/drivers/odsp-driver/src/test/prefetchSnapshotTests.spec.ts +++ b/packages/drivers/odsp-driver/src/test/prefetchSnapshotTests.spec.ts @@ -626,9 +626,9 @@ describe("Tests for prefetching snapshot", () => { }; const odspCompactSnapshotWithGroupId = convertToCompactSnapshot(snapshotWithGroupId); const snapshotTreeWithGroupIdToCompare: ISnapshotTree = { - blobs: { ...snapshotTreeWithGroupId.trees[".app"]?.blobs }, + blobs: { ...snapshotTreeWithGroupId.trees[".app"].blobs }, trees: { - ...snapshotTreeWithGroupId.trees[".app"]?.trees, + ...snapshotTreeWithGroupId.trees[".app"].trees, ".protocol": snapshotTreeWithGroupId.trees[".protocol"], }, id: "SnapshotId", @@ -866,9 +866,9 @@ describe("Tests for prefetching snapshot", () => { }; const odspCompactSnapshotWithGroupId = convertToCompactSnapshot(snapshotWithGroupId); const snapshotTreeWithGroupIdToCompare: ISnapshotTree = { - blobs: { ...snapshotTreeWithGroupId.trees[".app"]?.blobs }, + blobs: { ...snapshotTreeWithGroupId.trees[".app"].blobs }, trees: { - ...snapshotTreeWithGroupId.trees[".app"]?.trees, + ...snapshotTreeWithGroupId.trees[".app"].trees, ".protocol": snapshotTreeWithGroupId.trees[".protocol"], }, id: "SnapshotId", diff --git a/packages/drivers/replay-driver/src/storageImplementations.ts b/packages/drivers/replay-driver/src/storageImplementations.ts index 9ba488ee2b0a..63c8c02d4102 100644 --- a/packages/drivers/replay-driver/src/storageImplementations.ts +++ b/packages/drivers/replay-driver/src/storageImplementations.ts @@ -80,9 +80,9 @@ export class FileSnapshotReader throw new Error(`Unknown version id: ${versionRequested}`); } - let snapshotTree: ISnapshotTree | undefined = this.trees[versionRequested.id]; + let snapshotTree = this.trees[versionRequested.id]; if (snapshotTree === undefined) { - const tree: ITree | undefined = this.commits[versionRequested.id]; + const tree = this.commits[versionRequested.id]; if (tree === undefined) { throw new Error(`Can't find version ${versionRequested.id}`); } diff --git a/packages/drivers/routerlicious-driver/src/documentService.ts b/packages/drivers/routerlicious-driver/src/documentService.ts index 8cd7245f8f20..71642c821f85 100644 --- a/packages/drivers/routerlicious-driver/src/documentService.ts +++ b/packages/drivers/routerlicious-driver/src/documentService.ts @@ -302,9 +302,9 @@ export class DocumentService } const fluidResolvedUrl = response.resolvedUrl; this._resolvedUrl = fluidResolvedUrl; - this.storageUrl = fluidResolvedUrl.endpoints?.storageUrl; - this.ordererUrl = fluidResolvedUrl.endpoints?.ordererUrl; - this.deltaStorageUrl = fluidResolvedUrl.endpoints?.deltaStorageUrl; + this.storageUrl = fluidResolvedUrl.endpoints.storageUrl; + this.ordererUrl = fluidResolvedUrl.endpoints.ordererUrl; + this.deltaStorageUrl = fluidResolvedUrl.endpoints.deltaStorageUrl; this.deltaStreamUrl = fluidResolvedUrl.endpoints.deltaStreamUrl ?? this.ordererUrl; return true; } diff --git a/packages/drivers/routerlicious-driver/src/documentServiceFactory.ts b/packages/drivers/routerlicious-driver/src/documentServiceFactory.ts index 70f1492939e0..0fb133dc4833 100644 --- a/packages/drivers/routerlicious-driver/src/documentServiceFactory.ts +++ b/packages/drivers/routerlicious-driver/src/documentServiceFactory.ts @@ -213,7 +213,7 @@ export class RouterliciousDocumentServiceFactory implements IDocumentServiceFact } parsedUrl.pathname = replaceDocumentIdInPath(parsedUrl.pathname, documentId); - const deltaStorageUrl = resolvedUrl.endpoints?.deltaStorageUrl; + const deltaStorageUrl = resolvedUrl.endpoints.deltaStorageUrl; if (!deltaStorageUrl) { throw new Error( `All endpoints urls must be provided. [deltaStorageUrl:${deltaStorageUrl}]`, @@ -303,9 +303,9 @@ export class RouterliciousDocumentServiceFactory implements IDocumentServiceFact }, ); - const storageUrl = fluidResolvedUrl.endpoints?.storageUrl; - const ordererUrl = fluidResolvedUrl.endpoints?.ordererUrl; - const deltaStorageUrl = fluidResolvedUrl.endpoints?.deltaStorageUrl; + const storageUrl = fluidResolvedUrl.endpoints.storageUrl; + const ordererUrl = fluidResolvedUrl.endpoints.ordererUrl; + const deltaStorageUrl = fluidResolvedUrl.endpoints.deltaStorageUrl; const deltaStreamUrl = fluidResolvedUrl.endpoints.deltaStreamUrl || ordererUrl; // backward compatibility if (!ordererUrl || !deltaStorageUrl) { throw new Error( diff --git a/packages/loader/driver-utils/src/test/summaryCompresssionTester.spec.ts b/packages/loader/driver-utils/src/test/summaryCompresssionTester.spec.ts index f802dd7afbe1..dd62b57e4094 100644 --- a/packages/loader/driver-utils/src/test/summaryCompresssionTester.spec.ts +++ b/packages/loader/driver-utils/src/test/summaryCompresssionTester.spec.ts @@ -69,7 +69,7 @@ function generateSummaryWithContent(contentSize: number) { ".channels" ] as ISummaryTree ).tree["7a99532d-94ec-43ac-8a53-d9f978ad4ae9"] as ISummaryTree - ).tree?.header; + ).tree.header; let contentString = ""; while (contentString.length < contentSize) { if (contentString.length + 10 > contentSize) { @@ -91,7 +91,7 @@ function generateSummaryWithBinaryContent(startsWith: number, contentSize: numbe ".channels" ] as ISummaryTree ).tree["7a99532d-94ec-43ac-8a53-d9f978ad4ae9"] as ISummaryTree - ).tree?.header; + ).tree.header; const content = new Uint8Array(contentSize); content[0] = startsWith; for (let i = 1; i < contentSize; i = i + 10) { @@ -621,7 +621,7 @@ function getHeaderContent(summary: ISummaryTree) { } function getHeader(summary: ISummaryTree) { - return getHeaderHolder(summary).tree?.header; + return getHeaderHolder(summary).tree.header; } function getHeaderHolder(summary: ISummaryTree) { diff --git a/packages/runtime/container-runtime/src/test/containerRuntime.spec.ts b/packages/runtime/container-runtime/src/test/containerRuntime.spec.ts index 0d0445cd8d4b..f195841000d6 100644 --- a/packages/runtime/container-runtime/src/test/containerRuntime.spec.ts +++ b/packages/runtime/container-runtime/src/test/containerRuntime.spec.ts @@ -31,7 +31,6 @@ import { type FetchSource, type IDocumentAttributes, SummaryType, - type SummaryObject, } from "@fluidframework/driver-definitions/internal"; import { ISummaryTreeWithStats, @@ -2094,7 +2093,7 @@ describe("Runtime", () => { false, ); const { summary } = await containerRuntime.summarize({ fullTree: true }); - const blob: SummaryObject | undefined = summary.tree[recentBatchInfoBlobName]; + const blob = summary.tree[recentBatchInfoBlobName]; assert(blob.type === SummaryType.Blob, "Expected blob"); assert.equal(blob.content, '[[123,"batchId1"]]', "Expected single batchId mapping"); @@ -2424,7 +2423,7 @@ describe("Runtime", () => { ["missingDataStore"], ); assert.deepStrictEqual( - snapshotTree.trees[".channels"]?.trees.missingDataStore, + snapshotTree.trees[".channels"].trees.missingDataStore, snapshot.snapshotTree, "snapshot should be equal", ); diff --git a/packages/runtime/container-runtime/src/test/gc/gcStats.spec.ts b/packages/runtime/container-runtime/src/test/gc/gcStats.spec.ts index ec8b1b66741c..5c80af22601b 100644 --- a/packages/runtime/container-runtime/src/test/gc/gcStats.spec.ts +++ b/packages/runtime/container-runtime/src/test/gc/gcStats.spec.ts @@ -276,7 +276,7 @@ describe("Garbage Collection Stats", () => { ); // Add 2 new nodes and make one of them unreferenced. - defaultGCData.gcNodes["/"]?.push(nodes[4]); + defaultGCData.gcNodes["/"].push(nodes[4]); defaultGCData.gcNodes[nodes[4]] = []; defaultGCData.gcNodes[nodes[5]] = []; diff --git a/packages/runtime/id-compressor/src/test/idCompressor.spec.ts b/packages/runtime/id-compressor/src/test/idCompressor.spec.ts index acc2e659efe3..5b5100244844 100644 --- a/packages/runtime/id-compressor/src/test/idCompressor.spec.ts +++ b/packages/runtime/id-compressor/src/test/idCompressor.spec.ts @@ -1115,7 +1115,7 @@ describe("IdCompressor", () => { const uuids = new Set(); for (const [i, data1] of log1.entries()) { const id1 = compressor1.normalizeToOpSpace(data1.id); - const id2 = compressor2.normalizeToOpSpace(log2[i]?.id); + const id2 = compressor2.normalizeToOpSpace(log2[i].id); assert(isFinalId(id1)); ids.add(id1); assert.equal(id1, id2); diff --git a/packages/runtime/runtime-utils/src/test/summaryUtils.spec.ts b/packages/runtime/runtime-utils/src/test/summaryUtils.spec.ts index a6cc900b63f6..17527dcdbd03 100644 --- a/packages/runtime/runtime-utils/src/test/summaryUtils.spec.ts +++ b/packages/runtime/runtime-utils/src/test/summaryUtils.spec.ts @@ -411,7 +411,7 @@ describe("Summary Utils", () => { const blobContent = "testBlobContent"; summaryTreeBuilder.addBlob("testBlob", blobContent); const summaryTree = summaryTreeBuilder.summary; - const blob: SummaryObject | undefined = summaryTree.tree.testBlob; + const blob = summaryTree.tree.testBlob; assert.strictEqual(blob.type, SummaryType.Blob); assert.strictEqual(blob.content, blobContent); }); @@ -432,7 +432,7 @@ describe("Summary Utils", () => { const handle = "testHandle"; summaryTreeBuilder.addHandle("testHandleKey", SummaryType.Tree, handle); const summaryTree = summaryTreeBuilder.summary; - const handleObject: SummaryObject | undefined = summaryTree.tree.testHandleKey; + const handleObject = summaryTree.tree.testHandleKey; assert.strictEqual(handleObject.type, SummaryType.Handle); assert.strictEqual(handleObject.handleType, SummaryType.Tree); assert.strictEqual(handleObject.handle, handle); @@ -453,7 +453,7 @@ describe("Summary Utils", () => { const attachmentId = "testAttachmentId"; summaryTreeBuilder.addAttachment(attachmentId); const summaryTree = summaryTreeBuilder.summary; - const attachment: SummaryObject | undefined = summaryTree.tree["0"]; + const attachment = summaryTree.tree["0"]; assert.strictEqual(attachment.type, SummaryType.Attachment); assert.strictEqual(attachment.id, attachmentId); }); @@ -473,7 +473,7 @@ describe("Summary Utils", () => { }; summaryTreeBuilder.addWithStats("testKey", summarizeResult); const summaryTree = summaryTreeBuilder.summary; - const subTree: SummaryObject | undefined = summaryTree.tree.testKey; + const subTree = summaryTree.tree.testKey; assert.strictEqual(subTree.type, SummaryType.Tree); const stats = summaryTreeBuilder.stats; assert.strictEqual(stats.blobNodeCount, 1); @@ -491,7 +491,7 @@ describe("Summary Utils", () => { const stats = summaryTreeWithStats.stats; assert.strictEqual(stats.blobNodeCount, 1); assert.strictEqual(stats.totalBlobSize, blobContent.length); - assert.strictEqual(summaryTree.tree.testBlob?.type, SummaryType.Blob); + assert.strictEqual(summaryTree.tree.testBlob.type, SummaryType.Blob); }); }); }); diff --git a/packages/test/test-drivers/src/odspTestDriver.ts b/packages/test/test-drivers/src/odspTestDriver.ts index ac08cd4cfd3d..3e53ca3a0e6d 100644 --- a/packages/test/test-drivers/src/odspTestDriver.ts +++ b/packages/test/test-drivers/src/odspTestDriver.ts @@ -100,8 +100,9 @@ export function getOdspCredentials( const tenants: LoginTenants = JSON.parse(loginTenants); const tenantNames = Object.keys(tenants); const tenant = tenantNames[tenantIndex % tenantNames.length]; + const tenantInfo = tenants[tenant]; // Translate all the user from that user to the full user principle name by appending the tenant domain - const range = tenants[tenant]?.range; + const range = tenantInfo.range; // Return the set of account to choose from a single tenant for (let i = 0; i < range.count; i++) { diff --git a/packages/test/test-drivers/src/routerliciousTestDriver.ts b/packages/test/test-drivers/src/routerliciousTestDriver.ts index 9aab0f4916af..5136522289de 100644 --- a/packages/test/test-drivers/src/routerliciousTestDriver.ts +++ b/packages/test/test-drivers/src/routerliciousTestDriver.ts @@ -87,8 +87,7 @@ function getLegacyConfigFromEnv() { } function getEndpointConfigFromEnv(r11sEndpointName: RouterliciousEndpoint) { - const configStr: string | undefined = - process.env[`fluid__test__driver__${r11sEndpointName}`]; + const configStr = process.env[`fluid__test__driver__${r11sEndpointName}`]; if (r11sEndpointName === "docker") { const dockerDriverPolicies = configStr === undefined ? configStr : JSON.parse(configStr).driverPolicies; diff --git a/packages/test/test-service-load/src/FileLogger.ts b/packages/test/test-service-load/src/FileLogger.ts index b015f3e8ef84..5050a6f7c85b 100644 --- a/packages/test/test-service-load/src/FileLogger.ts +++ b/packages/test/test-service-load/src/FileLogger.ts @@ -89,7 +89,7 @@ class FileLogger implements ITelemetryBufferedLogger { event.category = event.testCategoryOverride; } else if ( typeof event.message === "string" && - event.message?.includes("FaultInjectionNack") + event.message.includes("FaultInjectionNack") ) { event.category = "generic"; } diff --git a/packages/tools/devtools/devtools-core/src/data-visualization/DataVisualization.ts b/packages/tools/devtools/devtools-core/src/data-visualization/DataVisualization.ts index 01670ad64af9..9d425a008a3c 100644 --- a/packages/tools/devtools/devtools-core/src/data-visualization/DataVisualization.ts +++ b/packages/tools/devtools/devtools-core/src/data-visualization/DataVisualization.ts @@ -263,8 +263,7 @@ export class DataVisualizerGraph this.visualizers[sharedObject.attributes.type] ?? visualizeUnknownSharedObject; // Create visualizer node for the shared object - const editorFunction: EditSharedObject | undefined = - this.editors[sharedObject.attributes.type]; + const editorFunction = this.editors[sharedObject.attributes.type]; const visualizerNode = new VisualizerNode( sharedObject, diff --git a/packages/tools/devtools/devtools-view/src/components/graphs/DynamicComposedChart.tsx b/packages/tools/devtools/devtools-view/src/components/graphs/DynamicComposedChart.tsx index 9b00b58cc79b..10adcfa87645 100644 --- a/packages/tools/devtools/devtools-view/src/components/graphs/DynamicComposedChart.tsx +++ b/packages/tools/devtools/devtools-view/src/components/graphs/DynamicComposedChart.tsx @@ -62,7 +62,7 @@ const mergeDataSets = (dataSets: GraphDataSet[]): DataPoint[] => { for (const dataSet of dataSets) { const { yAxisDataKey, xAxisDataKey, uuid } = dataSet.schema; for (const dataPoint of dataSet.data) { - const xAxisDataPoint: string | number | undefined = dataPoint[xAxisDataKey]; + const xAxisDataPoint = dataPoint[xAxisDataKey]; xAxisDataPointToYAxisDataPointMap[xAxisDataPoint] = { ...xAxisDataPointToYAxisDataPointMap[xAxisDataPoint], [uuid]: dataPoint[yAxisDataKey], From fdd88e4a855baf1e28f9e34220652f57e9b17175 Mon Sep 17 00:00:00 2001 From: "Craig Macomber (Microsoft)" <42876482+CraigMacomber@users.noreply.github.com> Date: Wed, 8 Jan 2025 12:50:12 -0800 Subject: [PATCH 16/27] Stop tagging asserts in server (#23505) ## Description Stop tagging asserts in server, and replace existing ones with a non-tagging assert function. --- fluidBuild.config.cjs | 7 +----- .../packages/protocol-base/src/quorum.ts | 23 ++++++++++++++----- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/fluidBuild.config.cjs b/fluidBuild.config.cjs index c36e1c6c0cef..4208e9b439fd 100644 --- a/fluidBuild.config.cjs +++ b/fluidBuild.config.cjs @@ -584,12 +584,7 @@ module.exports = { }, assertTagging: { - enabledPaths: [ - /^common\/lib\/common-utils/i, - /^experimental/i, - /^packages/i, - /^server\/routerlicious\/packages\/protocol-base/i, - ], + enabledPaths: [/^common\/lib\/common-utils/i, /^experimental/i, /^packages/i], assertionFunctions: { assert: 1, }, diff --git a/server/routerlicious/packages/protocol-base/src/quorum.ts b/server/routerlicious/packages/protocol-base/src/quorum.ts index c5ce176e88c2..1b7692f31d9e 100644 --- a/server/routerlicious/packages/protocol-base/src/quorum.ts +++ b/server/routerlicious/packages/protocol-base/src/quorum.ts @@ -6,7 +6,7 @@ import events_pkg from "events_pkg"; const { EventEmitter } = events_pkg; -import { assert, TypedEventEmitter } from "@fluidframework/common-utils"; +import { TypedEventEmitter } from "@fluidframework/common-utils"; import { ICommittedProposal, IQuorum, @@ -17,6 +17,17 @@ import { ISequencedProposal, } from "@fluidframework/protocol-definitions"; +/** + * Throws if condition is false. + * @privateRemarks + * TODO: Migrate this to a common assert pattern or library for server code. + */ +function assert(condition: boolean, message: string): asserts condition { + if (!condition) { + throw new Error(message); + } +} + /** * Structure for tracking proposals that have been sequenced but not approved yet. */ @@ -96,8 +107,8 @@ export class QuorumClients * Adds a new client to the quorum */ public addMember(clientId: string, details: ISequencedClient) { - assert(!!clientId, 0x46f /* clientId has to be non-empty string */); - assert(!this.members.has(clientId), 0x1ce /* clientId not found */); + assert(!!clientId, "clientId has to be non-empty string"); + assert(!this.members.has(clientId), "clientId not found"); this.members.set(clientId, details); this.emit("addMember", clientId, details); @@ -109,8 +120,8 @@ export class QuorumClients * Removes a client from the quorum */ public removeMember(clientId: string) { - assert(!!clientId, 0x470 /* clientId has to be non-empty string */); - assert(this.members.has(clientId), 0x1cf /* clientId not found */); + assert(!!clientId, "clientId has to be non-empty string"); + assert(this.members.has(clientId), "clientId not found"); this.members.delete(clientId); this.emit("removeMember", clientId); @@ -319,7 +330,7 @@ export class QuorumProposals local: boolean, clientSequenceNumber: number, ) { - assert(!this.proposals.has(sequenceNumber), 0x1d0 /* sequenceNumber not found */); + assert(!this.proposals.has(sequenceNumber), "sequenceNumber not found"); const proposal = new PendingProposal(sequenceNumber, key, value, local); this.proposals.set(sequenceNumber, proposal); From 352834c2470301311a27bc086f7dada1549425ae Mon Sep 17 00:00:00 2001 From: Andrei Zenkovitch Date: Wed, 8 Jan 2025 12:51:09 -0800 Subject: [PATCH 17/27] Change shares api path to match other use cases (#23434) ## Description odsp-driver calls '/shares' API to redeem access to file referenced by redeemable sharing link. I am changing API path to include '/driveItem' in it. Technically it is not required for executing redeem operation. However, we have other cases that use '/shares' API and do require to specify '/driveItem' in order to get specific drive item properties. The reason this matter is when caller of this API must possess logical permissions to call this API (for instance, this will be the case when call is made with app-only token) then two separate logical permissions are needed for the '/shares' call with and without '/driveItem'. ## Reviewer Guidance The review process is outlined on [this wiki page](https://github.com/microsoft/FluidFramework/wiki/PR-Guidelines#guidelines). --------- Co-authored-by: Andrei Zenkovitch --- packages/drivers/odsp-driver/src/fetchSnapshot.ts | 15 +++++++-------- packages/drivers/odsp-driver/src/getFileLink.ts | 5 ++--- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/packages/drivers/odsp-driver/src/fetchSnapshot.ts b/packages/drivers/odsp-driver/src/fetchSnapshot.ts index c1de311e75ec..07d673494891 100644 --- a/packages/drivers/odsp-driver/src/fetchSnapshot.ts +++ b/packages/drivers/odsp-driver/src/fetchSnapshot.ts @@ -150,13 +150,8 @@ export async function fetchSnapshotWithRedeem( // eslint-disable-next-line @typescript-eslint/no-unsafe-argument if (enableRedeemFallback && isRedeemSharingLinkError(odspResolvedUrl, error)) { // Execute the redeem fallback + await redeemSharingLink(odspResolvedUrl, storageTokenFetcher, logger); - await redeemSharingLink( - odspResolvedUrl, - storageTokenFetcher, - logger, - forceAccessTokenViaAuthorizationHeader, - ); const odspResolvedUrlWithoutShareLink: IOdspResolvedUrl = { ...odspResolvedUrl, shareLinkInfo: { @@ -213,7 +208,6 @@ async function redeemSharingLink( odspResolvedUrl: IOdspResolvedUrl, getAuthHeader: InstrumentedStorageTokenFetcher, logger: ITelemetryLoggerExt, - forceAccessTokenViaAuthorizationHeader: boolean, ): Promise { await PerformanceEvent.timedExecAsync( logger, @@ -233,7 +227,12 @@ async function redeemSharingLink( let redeemUrl: string | undefined; async function callSharesAPI(baseUrl: string): Promise { await getWithRetryForTokenRefresh(async (tokenFetchOptions) => { - redeemUrl = `${baseUrl}/_api/v2.0/shares/${encodedShareUrl}`; + // IMPORTANT: Note that redeemUrl has '/driveItem' in it. Technically it is not required for executing redeem operation. + // However, we have other cases that use '/shares' API and do require to specify '/driveItem' in order to get specific + // drive item properties. The reason this matters is when caller of this API must possess logical permissions to call + // this API (for instance, this will be the case when call is made with app-only token) then two separate logical + // permissions are needed for the '/shares' call with and without '/driveItem'. + redeemUrl = `${baseUrl}/_api/v2.0/shares/${encodedShareUrl}/driveItem`; const url = redeemUrl; const method = "GET"; const authHeader = await getAuthHeader( diff --git a/packages/drivers/odsp-driver/src/getFileLink.ts b/packages/drivers/odsp-driver/src/getFileLink.ts index 9841655b5543..8a47fbfa542f 100644 --- a/packages/drivers/odsp-driver/src/getFileLink.ts +++ b/packages/drivers/odsp-driver/src/getFileLink.ts @@ -102,7 +102,7 @@ export async function getFileLink( } /** - * Handles location redirection while fulfilling the getFilelink call. We don't want browser to handle + * Handles location redirection while fulfilling the getFileLink call. We don't want browser to handle * the redirection as the browser will retry with same auth token which will not work as we need app * to regenerate tokens for the new site domain. So when we will make the network calls below we will set * the redirect:manual header to manually handle these redirects. @@ -125,7 +125,7 @@ async function getFileLinkWithLocationRedirectionHandling( let locationRedirected = false; for (let count = 1; count <= 5; count++) { try { - const fileItem = await getFileItemLite(getToken, resolvedUrl, logger, true); + const fileItem = await getFileItemLite(getToken, resolvedUrl, logger); // Sometimes the siteUrl in the actual file is different from the siteUrl in the resolvedUrl due to location // redirection. This creates issues in the getSharingInformation call. So we need to update the siteUrl in the // resolvedUrl to the siteUrl in the fileItem which is the updated siteUrl. @@ -260,7 +260,6 @@ async function getFileItemLite( getToken: TokenFetcher, odspUrlParts: IOdspUrlParts, logger: ITelemetryLoggerExt, - forceAccessTokenViaAuthorizationHeader: boolean, ): Promise { return PerformanceEvent.timedExecAsync( logger, From ddce27086fd037969ff396dc1ee4d9d9c7e25229 Mon Sep 17 00:00:00 2001 From: Andrei Zenkovitch Date: Wed, 8 Jan 2025 12:53:44 -0800 Subject: [PATCH 18/27] Allow for passing client display name used when attributing changes to the current client in the collab session (#23422) Allow for passing client display name used when attributing changes to the current client in the collab session ## Description This change introduces support for overriding the client display name used when attributing changes to the current client in the collab session. This can be used in scenarios where the client display name should be different from the default client display name, for instance, when client is running in app-only context. This is achieved by adding a new optional property `displayName` to the `ICollabSessionOptions` interface. This interface is passed inside `HostStoragePolicy` which is specified when constructing document service factory. If `displayName` is specified, then it is used when establishing collab session and passed in the body of /joinSession request to Push service. --------- Co-authored-by: Andrei Zenkovitch --- ...dsp-driver-definitions.legacy.alpha.api.md | 1 + .../odsp-driver-definitions/src/factory.ts | 7 ++++++ .../src/odspDelayLoadedDeltaStream.ts | 13 +++++++++-- packages/drivers/odsp-driver/src/vroom.ts | 22 ++++++++++++------- .../test/test-drivers/src/odspDriverApi.ts | 1 + 5 files changed, 34 insertions(+), 10 deletions(-) diff --git a/packages/drivers/odsp-driver-definitions/api-report/odsp-driver-definitions.legacy.alpha.api.md b/packages/drivers/odsp-driver-definitions/api-report/odsp-driver-definitions.legacy.alpha.api.md index 3e904b65808b..9bb2d432fb03 100644 --- a/packages/drivers/odsp-driver-definitions/api-report/odsp-driver-definitions.legacy.alpha.api.md +++ b/packages/drivers/odsp-driver-definitions/api-report/odsp-driver-definitions.legacy.alpha.api.md @@ -37,6 +37,7 @@ export interface ICacheEntry extends IEntry { // @alpha (undocumented) export interface ICollabSessionOptions { + displayName?: string; // @deprecated forceAccessTokenViaAuthorizationHeader?: boolean; // @deprecated diff --git a/packages/drivers/odsp-driver-definitions/src/factory.ts b/packages/drivers/odsp-driver-definitions/src/factory.ts index cc579baacd12..8673dae115c2 100644 --- a/packages/drivers/odsp-driver-definitions/src/factory.ts +++ b/packages/drivers/odsp-driver-definitions/src/factory.ts @@ -83,6 +83,13 @@ export interface ICollabSessionOptions { * @deprecated Due to security reasons we will be passing the token via Authorization header only. */ forceAccessTokenViaAuthorizationHeader?: boolean; + /** + * Value indicating the client display name for current session. + * This name will be used in attribution associated with edits made during session. + * This is optional and used only when collab session is being joined by client acting in app-only mode (i.e. without user context). + * If not specified client display name is extracted from the access token that is used to join session. + */ + displayName?: string; } /** diff --git a/packages/drivers/odsp-driver/src/odspDelayLoadedDeltaStream.ts b/packages/drivers/odsp-driver/src/odspDelayLoadedDeltaStream.ts index 995552b99b9b..05bcdab8b4ee 100644 --- a/packages/drivers/odsp-driver/src/odspDelayLoadedDeltaStream.ts +++ b/packages/drivers/odsp-driver/src/odspDelayLoadedDeltaStream.ts @@ -91,7 +91,7 @@ export class OdspDelayLoadedDeltaStream { | undefined, private readonly mc: MonitoringContext, private readonly cache: IOdspCache, - _hostPolicy: HostStoragePolicy, + private readonly hostPolicy: HostStoragePolicy, private readonly epochTracker: EpochTracker, private readonly opsReceived: (ops: ISequencedDocumentMessage[]) => void, private readonly metadataUpdateHandler: (metadata: Record) => void, @@ -156,6 +156,8 @@ export class OdspDelayLoadedDeltaStream { requestWebsocketTokenFromJoinSession, options, false /* isRefreshingJoinSession */, + undefined /* clientId */, + this.hostPolicy.sessionOptions?.displayName, ); const [websocketEndpoint, websocketToken] = await Promise.all([ joinSessionPromise.catch(annotateAndRethrowConnectionError("joinSession")), @@ -273,6 +275,7 @@ export class OdspDelayLoadedDeltaStream { delta: number, requestSocketToken: boolean, clientId: string | undefined, + displayName: string | undefined, ): Promise { if (this.joinSessionRefreshTimer !== undefined) { this.clearJoinSessionTimer(); @@ -301,6 +304,7 @@ export class OdspDelayLoadedDeltaStream { options, true /* isRefreshingJoinSession */, clientId, + displayName, ); resolve(); }).catch((error) => { @@ -314,7 +318,8 @@ export class OdspDelayLoadedDeltaStream { requestSocketToken: boolean, options: TokenFetchOptionsEx, isRefreshingJoinSession: boolean, - clientId?: string, + clientId: string | undefined, + displayName: string | undefined, ): Promise { // If this call is to refresh the join session for the current connection but we are already disconnected in // the meantime or disconnected and then reconnected then do not make the call. However, we should not have @@ -342,6 +347,7 @@ export class OdspDelayLoadedDeltaStream { requestSocketToken, options, isRefreshingJoinSession, + displayName, ).catch((error) => { if (hasFacetCodes(error) && error.facetCodes !== undefined) { for (const code of error.facetCodes) { @@ -378,6 +384,7 @@ export class OdspDelayLoadedDeltaStream { requestSocketToken: boolean, options: TokenFetchOptionsEx, isRefreshingJoinSession: boolean, + displayName: string | undefined, ): Promise { const disableJoinSessionRefresh = this.mc.config.getBoolean( "Fluid.Driver.Odsp.disableJoinSessionRefresh", @@ -397,6 +404,7 @@ export class OdspDelayLoadedDeltaStream { options, disableJoinSessionRefresh, isRefreshingJoinSession, + displayName, ); // Emit event only in case it is fetched from the network. if (joinSessionResponse.sensitivityLabelsInfo !== undefined) { @@ -450,6 +458,7 @@ export class OdspDelayLoadedDeltaStream { response.refreshAfterDeltaMs, requestSocketToken, this.currentConnection?.clientId, + displayName, ).catch((error) => { // Log the error and do nothing as the reconnection would fetch the join session. this.mc.logger.sendTelemetryEvent( diff --git a/packages/drivers/odsp-driver/src/vroom.ts b/packages/drivers/odsp-driver/src/vroom.ts index 93d777f742e7..b0ad5a2fdcad 100644 --- a/packages/drivers/odsp-driver/src/vroom.ts +++ b/packages/drivers/odsp-driver/src/vroom.ts @@ -21,8 +21,8 @@ import { TokenFetchOptionsEx } from "./odspUtils.js"; import { runWithRetry } from "./retryUtils.js"; interface IJoinSessionBody { - requestSocketToken: boolean; - guestDisplayName?: string; + requestSocketToken?: boolean; + displayName?: string; } /** @@ -38,8 +38,9 @@ interface IJoinSessionBody { * @param options - Options to fetch the token. * @param disableJoinSessionRefresh - Whether the caller wants to disable refreshing join session periodically. * @param isRefreshingJoinSession - whether call is to refresh the session before expiry. - * @param guestDisplayName - display name used to identify guest user joining a session. - * This is optional and used only when collab session is being joined via invite. + * @param displayName - display name used to identify client joining a session. + * This is optional and used only when collab session is being joined by client acting in app-only mode (i.e. without user context). + * If not specified client display name is extracted from the access token that is used to join session. */ export async function fetchJoinSession( urlParts: IOdspUrlParts, @@ -52,6 +53,7 @@ export async function fetchJoinSession( options: TokenFetchOptionsEx, disableJoinSessionRefresh: boolean | undefined, isRefreshingJoinSession: boolean, + displayName: string | undefined, ): Promise { const apiRoot = getApiRoot(new URL(urlParts.siteUrl)); const url = `${apiRoot}/drives/${urlParts.driveId}/items/${urlParts.itemId}/${path}?ump=1`; @@ -89,11 +91,15 @@ export async function fetchJoinSession( } postBody += `_post: 1\r\n`; + let requestBody: IJoinSessionBody | undefined; if (requestSocketToken) { - const body: IJoinSessionBody = { - requestSocketToken: true, - }; - postBody += `\r\n${JSON.stringify(body)}\r\n`; + requestBody = { ...requestBody, requestSocketToken: true }; + } + if (displayName) { + requestBody = { ...requestBody, displayName }; + } + if (requestBody) { + postBody += `\r\n${JSON.stringify(requestBody)}\r\n`; } postBody += `\r\n--${formBoundary}--`; const headers: { [index: string]: string } = { diff --git a/packages/test/test-drivers/src/odspDriverApi.ts b/packages/test/test-drivers/src/odspDriverApi.ts index b5e05023bd47..f92997e5fd67 100644 --- a/packages/test/test-drivers/src/odspDriverApi.ts +++ b/packages/test/test-drivers/src/odspDriverApi.ts @@ -57,6 +57,7 @@ const odspOpsCaching: OptionsMatrix = { const odspSessionOptions: OptionsMatrix = { unauthenticatedUserDisplayName: [undefined], forceAccessTokenViaAuthorizationHeader: [undefined], + displayName: [undefined], }; /** From 62836c050b33a49580badc81693f3bdb748bf0f3 Mon Sep 17 00:00:00 2001 From: "Craig Macomber (Microsoft)" <42876482+CraigMacomber@users.noreply.github.com> Date: Wed, 8 Jan 2025 14:12:47 -0800 Subject: [PATCH 19/27] Mark build output "sideEffects": false (#23501) ## Description During the build we copy package.json files into the output for CJS builds and some ESM builds. Thes files lacked `"sideEffects": false` resulting in poor tree shaking for any bundle using build output with a copied package.json. This included the tree package ESM build which caused its bundle to bloat. This package.json copy has been removed from the tree package's esm build, but also update to include `"sideEffects": false` for builds that use it. --- common/build/build-common/src/cjs/package.json | 3 ++- common/build/build-common/src/esm/package.json | 3 ++- packages/dds/tree/package.json | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/common/build/build-common/src/cjs/package.json b/common/build/build-common/src/cjs/package.json index a0df0c867783..a3de646b18a3 100644 --- a/common/build/build-common/src/cjs/package.json +++ b/common/build/build-common/src/cjs/package.json @@ -1,3 +1,4 @@ { - "type": "commonjs" + "type": "commonjs", + "sideEffects": false } diff --git a/common/build/build-common/src/esm/package.json b/common/build/build-common/src/esm/package.json index bedb411a9124..330f44b1b29e 100644 --- a/common/build/build-common/src/esm/package.json +++ b/common/build/build-common/src/esm/package.json @@ -1,3 +1,4 @@ { - "type": "module" + "type": "module", + "sideEffects": false } diff --git a/packages/dds/tree/package.json b/packages/dds/tree/package.json index 455befd11e50..2f54527ad1f4 100644 --- a/packages/dds/tree/package.json +++ b/packages/dds/tree/package.json @@ -79,7 +79,7 @@ "build:commonjs": "fluid-build . --task commonjs", "build:compile": "fluid-build . --task compile", "build:docs": "api-extractor run --local", - "build:esnext": "tsc --project ./tsconfig.json && copyfiles -f ../../../common/build/build-common/src/esm/package.json ./lib", + "build:esnext": "tsc --project ./tsconfig.json", "build:genver": "gen-version", "build:test": "npm run build:test:esm && npm run build:test:cjs", "build:test:cjs": "fluid-tsc commonjs --project ./src/test/tsconfig.cjs.json", From 1047ebb475e71602b56613f127c893545de34e1d Mon Sep 17 00:00:00 2001 From: Joshua Smithrud <54606601+Josmithr@users.noreply.github.com> Date: Wed, 8 Jan 2025 14:27:26 -0800 Subject: [PATCH 20/27] refactor: Make array parameters readonly and clean up stale documentation (#23513) Updates render APIs to accept arrays as readonly. Also cleans up stale API documentation. --- .../api-markdown-documenter.alpha.api.md | 4 ++-- .../api-markdown-documenter.beta.api.md | 2 +- .../api-markdown-documenter.public.api.md | 2 +- .../api-markdown-documenter/src/RenderHtml.ts | 21 +------------------ .../src/RenderMarkdown.ts | 21 +------------------ .../api-item-transforms/TransformApiModel.ts | 13 ------------ .../configuration/DocumentationSuite.ts | 8 +++++-- 7 files changed, 12 insertions(+), 59 deletions(-) diff --git a/tools/api-markdown-documenter/api-report/api-markdown-documenter.alpha.api.md b/tools/api-markdown-documenter/api-report/api-markdown-documenter.alpha.api.md index 4ee7702592a4..db1edbf11376 100644 --- a/tools/api-markdown-documenter/api-report/api-markdown-documenter.alpha.api.md +++ b/tools/api-markdown-documenter/api-report/api-markdown-documenter.alpha.api.md @@ -672,14 +672,14 @@ export interface RenderDocumentAsHtmlConfiguration extends ToHtmlConfiguration, } // @alpha -function renderDocumentsAsHtml(documents: DocumentNode[], options: RenderDocumentsAsHtmlOptions): Promise; +function renderDocumentsAsHtml(documents: readonly DocumentNode[], options: RenderDocumentsAsHtmlOptions): Promise; // @alpha interface RenderDocumentsAsHtmlOptions extends RenderDocumentAsHtmlConfiguration, FileSystemConfiguration { } // @public -function renderDocumentsAsMarkdown(documents: DocumentNode[], options: RenderDocumentsAsMarkdownOptions): Promise; +function renderDocumentsAsMarkdown(documents: readonly DocumentNode[], options: RenderDocumentsAsMarkdownOptions): Promise; // @public interface RenderDocumentsAsMarkdownOptions extends MarkdownRenderConfiguration, FileSystemConfiguration { diff --git a/tools/api-markdown-documenter/api-report/api-markdown-documenter.beta.api.md b/tools/api-markdown-documenter/api-report/api-markdown-documenter.beta.api.md index 1b6136ac16b2..01064319fa02 100644 --- a/tools/api-markdown-documenter/api-report/api-markdown-documenter.beta.api.md +++ b/tools/api-markdown-documenter/api-report/api-markdown-documenter.beta.api.md @@ -665,7 +665,7 @@ export interface RenderDocumentAsHtmlConfiguration extends ToHtmlConfiguration, } // @public -function renderDocumentsAsMarkdown(documents: DocumentNode[], options: RenderDocumentsAsMarkdownOptions): Promise; +function renderDocumentsAsMarkdown(documents: readonly DocumentNode[], options: RenderDocumentsAsMarkdownOptions): Promise; // @public interface RenderDocumentsAsMarkdownOptions extends MarkdownRenderConfiguration, FileSystemConfiguration { diff --git a/tools/api-markdown-documenter/api-report/api-markdown-documenter.public.api.md b/tools/api-markdown-documenter/api-report/api-markdown-documenter.public.api.md index 73549aafc554..5535ba42b2cb 100644 --- a/tools/api-markdown-documenter/api-report/api-markdown-documenter.public.api.md +++ b/tools/api-markdown-documenter/api-report/api-markdown-documenter.public.api.md @@ -643,7 +643,7 @@ export interface RenderDocumentAsHtmlConfiguration extends ToHtmlConfiguration, } // @public -function renderDocumentsAsMarkdown(documents: DocumentNode[], options: RenderDocumentsAsMarkdownOptions): Promise; +function renderDocumentsAsMarkdown(documents: readonly DocumentNode[], options: RenderDocumentsAsMarkdownOptions): Promise; // @public interface RenderDocumentsAsMarkdownOptions extends MarkdownRenderConfiguration, FileSystemConfiguration { diff --git a/tools/api-markdown-documenter/src/RenderHtml.ts b/tools/api-markdown-documenter/src/RenderHtml.ts index e480429ed9aa..46074df76858 100644 --- a/tools/api-markdown-documenter/src/RenderHtml.ts +++ b/tools/api-markdown-documenter/src/RenderHtml.ts @@ -28,22 +28,6 @@ export interface RenderApiModelAsHtmlOptions /** * Renders the provided model and its contents, and writes each document to a file on disk. * - * @remarks - * - * Which API members get their own documents and which get written to the contents of their parent is - * determined by {@link DocumentationSuiteConfiguration.documentBoundaries}. - * - * The file paths under which the files will be generated is determined by the provided output path and the - * following configuration properties: - * - * - {@link DocumentationSuiteConfiguration.documentBoundaries} - * - {@link DocumentationSuiteConfiguration.hierarchyBoundaries} - * - * @param transformConfig - Configuration for transforming API items into {@link DocumentationNode}s. - * @param renderConfig - Configuration for rendering {@link DocumentNode}s as HTML. - * @param fileSystemConfig - Configuration for writing document files to disk. - * @param logger - Receiver of system log data. Default: {@link defaultConsoleLogger}. - * * @alpha */ export async function renderApiModelAsHtml(options: RenderApiModelAsHtmlOptions): Promise { @@ -66,14 +50,11 @@ export interface RenderDocumentsAsHtmlOptions * * @param documents - The documents to render. Each will be rendered to its own file on disk per * {@link DocumentNode.documentPath} (relative to the provided output directory). - * @param renderConfig - Configuration for rendering {@link DocumentNode}s as HTML. - * @param fileSystemConfig - Configuration for writing document files to disk. - * @param logger - Receiver of system log data. Default: {@link defaultConsoleLogger}. * * @alpha */ export async function renderDocumentsAsHtml( - documents: DocumentNode[], + documents: readonly DocumentNode[], options: RenderDocumentsAsHtmlOptions, ): Promise { const { logger, newlineKind, outputDirectoryPath } = options; diff --git a/tools/api-markdown-documenter/src/RenderMarkdown.ts b/tools/api-markdown-documenter/src/RenderMarkdown.ts index 7277cd4b154f..555b73e6eb8e 100644 --- a/tools/api-markdown-documenter/src/RenderMarkdown.ts +++ b/tools/api-markdown-documenter/src/RenderMarkdown.ts @@ -28,22 +28,6 @@ export interface RenderApiModelAsMarkdownOptions /** * Renders the provided model and its contents, and writes each document to a file on disk. * - * @remarks - * - * Which API members get their own documents and which get written to the contents of their parent is - * determined by {@link DocumentationSuiteConfiguration.documentBoundaries}. - * - * The file paths under which the files will be generated is determined by the provided output path and the - * following configuration properties: - * - * - {@link DocumentationSuiteConfiguration.documentBoundaries} - * - {@link DocumentationSuiteConfiguration.hierarchyBoundaries} - * - * @param transformConfig - Configuration for transforming API items into {@link DocumentationNode}s. - * @param renderConfig - Configuration for rendering {@link DocumentNode}s as Markdown. - * @param fileSystemConfig - Configuration for writing document files to disk. - * @param logger - Receiver of system log data. Default: {@link defaultConsoleLogger}. - * * @public */ export async function renderApiModelAsMarkdown( @@ -68,14 +52,11 @@ export interface RenderDocumentsAsMarkdownOptions * * @param documents - The documents to render. Each will be rendered to its own file on disk per * {@link DocumentNode.documentPath} (relative to the provided output directory). - * @param renderConfig - Configuration for rendering {@link DocumentNode}s as Markdown. - * @param fileSystemConfig - Configuration for writing document files to disk. - * @param logger - Receiver of system log data. Default: {@link defaultConsoleLogger}. * * @public */ export async function renderDocumentsAsMarkdown( - documents: DocumentNode[], + documents: readonly DocumentNode[], options: RenderDocumentsAsMarkdownOptions, ): Promise { const { logger, newlineKind, outputDirectoryPath } = options; diff --git a/tools/api-markdown-documenter/src/api-item-transforms/TransformApiModel.ts b/tools/api-markdown-documenter/src/api-item-transforms/TransformApiModel.ts index 947a420fa332..146046ac73c9 100644 --- a/tools/api-markdown-documenter/src/api-item-transforms/TransformApiModel.ts +++ b/tools/api-markdown-documenter/src/api-item-transforms/TransformApiModel.ts @@ -26,19 +26,6 @@ import { createBreadcrumbParagraph, createEntryPointList, wrapInSection } from " /** * Renders the provided model and its contents to a series of {@link DocumentNode}s. * - * @remarks - * - * Which API members get their own documents and which get written to the contents of their parent is - * determined by {@link DocumentationSuiteConfiguration.documentBoundaries}. - * - * The generated nodes' {@link DocumentNode.documentPath}s are determined by the provided output path and the - * following configuration properties: - * - * - {@link DocumentationSuiteConfiguration.documentBoundaries} - * - {@link DocumentationSuiteConfiguration.hierarchyBoundaries} - * - * @param options - Options for transforming API items into {@link DocumentationNode}s. - * * @public */ export function transformApiModel(options: ApiItemTransformationOptions): DocumentNode[] { diff --git a/tools/api-markdown-documenter/src/api-item-transforms/configuration/DocumentationSuite.ts b/tools/api-markdown-documenter/src/api-item-transforms/configuration/DocumentationSuite.ts index 711bc396bbe8..2e2278518476 100644 --- a/tools/api-markdown-documenter/src/api-item-transforms/configuration/DocumentationSuite.ts +++ b/tools/api-markdown-documenter/src/api-item-transforms/configuration/DocumentationSuite.ts @@ -332,7 +332,11 @@ export namespace DefaultDocumentationSuiteOptions { /** * Default {@link DocumentationSuiteConfiguration.getLinkTextForItem}. * - * Uses the item's signature, except for `Model` items, in which case the text "Packages" is displayed. + * Uses the item's qualified API name, but is handled differently for the following items: + * + * - CallSignature, ConstructSignature, IndexSignature: Uses a cleaned up variation on the type signature. + * + * - Model: Uses "API Overview". */ export function defaultGetLinkTextForItem(apiItem: ApiItem): string { const itemKind = getApiItemKind(apiItem); @@ -356,7 +360,7 @@ export namespace DefaultDocumentationSuiteOptions { } /** - * Default {@link DocumentationSuiteConfiguration.getHeadingTextForItem}. + * Default {@link DocumentationSuiteConfiguration.getAlertsForItem}. * * Generates alerts for the following tags, if found: * From 172933c1ada3f10c77bbbb9215824c3c9f097b82 Mon Sep 17 00:00:00 2001 From: Joshua Smithrud <54606601+Josmithr@users.noreply.github.com> Date: Wed, 8 Jan 2025 14:39:24 -0800 Subject: [PATCH 21/27] improvement(api-markdown-documenter): Check for duplicate document paths in transformation output and log warning for any encountered (#23514) It is possible to configure the system to generate multiple output documents targeting the same file path. To help users detect such cases, logic has been added to check transformation output for any documents with duplicate paths. If any are found, a warning is logged. --- .../api-item-transforms/TransformApiModel.ts | 31 ++++++++------ .../src/api-item-transforms/Utilities.ts | 41 +++++++++++++++++++ .../src/api-item-transforms/index.ts | 1 + .../test/Utilities.test.ts | 35 ++++++++++++++++ .../src/test/EndToEndTests.ts | 15 ++----- 5 files changed, 100 insertions(+), 23 deletions(-) create mode 100644 tools/api-markdown-documenter/src/api-item-transforms/test/Utilities.test.ts diff --git a/tools/api-markdown-documenter/src/api-item-transforms/TransformApiModel.ts b/tools/api-markdown-documenter/src/api-item-transforms/TransformApiModel.ts index 146046ac73c9..d69a1453345a 100644 --- a/tools/api-markdown-documenter/src/api-item-transforms/TransformApiModel.ts +++ b/tools/api-markdown-documenter/src/api-item-transforms/TransformApiModel.ts @@ -15,7 +15,7 @@ import type { DocumentNode, SectionNode } from "../documentation-domain/index.js import { doesItemRequireOwnDocument, shouldItemBeIncluded } from "./ApiItemTransformUtilities.js"; import { apiItemToDocument, apiItemToSections } from "./TransformApiItem.js"; -import { createDocument } from "./Utilities.js"; +import { checkForDuplicateDocumentPaths, createDocument } from "./Utilities.js"; import { type ApiItemTransformationConfiguration, type ApiItemTransformationOptions, @@ -37,10 +37,10 @@ export function transformApiModel(options: ApiItemTransformationOptions): Docume // If a package has multiple entry-points, it's possible for the same API item to appear under more than one // entry-point (i.e., we are traversing a graph, rather than a tree). // To avoid redundant computation, we will keep a ledger of which API items we have transformed. - const documents: Map = new Map(); + const documentsMap: Map = new Map(); // Always render Model document (this is the "root" of the generated documentation suite). - documents.set(apiModel, createDocumentForApiModel(apiModel, config)); + documentsMap.set(apiModel, createDocumentForApiModel(apiModel, config)); const packages = apiModel.packages; @@ -75,42 +75,49 @@ export function transformApiModel(options: ApiItemTransformationOptions): Docume const entryPoint = packageEntryPoints[0]; - documents.set( + documentsMap.set( packageItem, createDocumentForSingleEntryPointPackage(packageItem, entryPoint, config), ); const packageDocumentItems = getDocumentItems(entryPoint, config); for (const apiItem of packageDocumentItems) { - if (!documents.has(apiItem)) { - documents.set(apiItem, apiItemToDocument(apiItem, config)); + if (!documentsMap.has(apiItem)) { + documentsMap.set(apiItem, apiItemToDocument(apiItem, config)); } } } else { // If a package contains multiple entry-points, we will generate a separate document for each. // The package-level document will enumerate the entry-points. - documents.set( + documentsMap.set( packageItem, createDocumentForMultiEntryPointPackage(packageItem, packageEntryPoints, config), ); for (const entryPoint of packageEntryPoints) { - documents.set(entryPoint, createDocumentForApiEntryPoint(entryPoint, config)); + documentsMap.set(entryPoint, createDocumentForApiEntryPoint(entryPoint, config)); const packageDocumentItems = getDocumentItems(entryPoint, config); for (const apiItem of packageDocumentItems) { - if (!documents.has(apiItem)) { - documents.set(apiItem, apiItemToDocument(apiItem, config)); + if (!documentsMap.has(apiItem)) { + documentsMap.set(apiItem, apiItemToDocument(apiItem, config)); } } } } } - logger.success("API Model documents generated!"); + const documents = [...documentsMap.values()]; + + try { + checkForDuplicateDocumentPaths(documents); + } catch (error: unknown) { + logger.warning((error as Error).message); + } - return [...documents.values()]; + logger.success("API Model documents generated!"); + return documents; } /** diff --git a/tools/api-markdown-documenter/src/api-item-transforms/Utilities.ts b/tools/api-markdown-documenter/src/api-item-transforms/Utilities.ts index 015e93020ff2..edc906a01381 100644 --- a/tools/api-markdown-documenter/src/api-item-transforms/Utilities.ts +++ b/tools/api-markdown-documenter/src/api-item-transforms/Utilities.ts @@ -95,3 +95,44 @@ function resolveSymbolicLink( return getLinkForApiItem(resolvedReference, config); } + +/** + * Checks for duplicate {@link DocumentNode.documentPath}s among the provided set of documents. + * @throws If any duplicates are found. + */ +export function checkForDuplicateDocumentPaths(documents: readonly DocumentNode[]): void { + const documentPathMap = new Map(); + for (const document of documents) { + let entries = documentPathMap.get(document.documentPath); + if (entries === undefined) { + entries = []; + documentPathMap.set(document.documentPath, entries); + } + entries.push(document); + } + + const duplicates = [...documentPathMap.entries()].filter( + ([, documentsUnderPath]) => documentsUnderPath.length > 1, + ); + + if (duplicates.length === 0) { + return; + } + + const errorMessageLines = ["Duplicate output paths found among the generated documents:"]; + + for (const [documentPath, documentsUnderPath] of duplicates) { + errorMessageLines.push(`- ${documentPath}`); + for (const document of documentsUnderPath) { + const errorEntry = document.apiItem + ? `${document.apiItem.displayName} (${document.apiItem.kind})` + : "(No corresponding API item)"; + errorMessageLines.push(` - ${errorEntry}`); + } + } + errorMessageLines.push( + "Check your configuration to ensure different API items do not result in the same output path.", + ); + + throw new Error(errorMessageLines.join("\n")); +} diff --git a/tools/api-markdown-documenter/src/api-item-transforms/index.ts b/tools/api-markdown-documenter/src/api-item-transforms/index.ts index e09cef05e057..e41a85f52b11 100644 --- a/tools/api-markdown-documenter/src/api-item-transforms/index.ts +++ b/tools/api-markdown-documenter/src/api-item-transforms/index.ts @@ -43,3 +43,4 @@ export { export { transformTsdocNode } from "./TsdocNodeTransforms.js"; export { apiItemToDocument, apiItemToSections } from "./TransformApiItem.js"; export { transformApiModel } from "./TransformApiModel.js"; +export { checkForDuplicateDocumentPaths } from "./Utilities.js"; diff --git a/tools/api-markdown-documenter/src/api-item-transforms/test/Utilities.test.ts b/tools/api-markdown-documenter/src/api-item-transforms/test/Utilities.test.ts new file mode 100644 index 000000000000..ce3cc375dfd3 --- /dev/null +++ b/tools/api-markdown-documenter/src/api-item-transforms/test/Utilities.test.ts @@ -0,0 +1,35 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +import { expect } from "chai"; + +import type { DocumentNode } from "../../index.js"; +import { checkForDuplicateDocumentPaths } from "../Utilities.js"; + +describe("ApiItem to Documentation transformation utilities tests", () => { + describe("checkForDuplicateDocumentPaths", () => { + it("Empty list", () => { + expect(() => checkForDuplicateDocumentPaths([])).to.not.throw(); + }); + + it("No duplicates", () => { + const documents: DocumentNode[] = [ + { documentPath: "foo" } as unknown as DocumentNode, + { documentPath: "bar" } as unknown as DocumentNode, + { documentPath: "baz" } as unknown as DocumentNode, + ]; + expect(() => checkForDuplicateDocumentPaths(documents)).to.not.throw(); + }); + + it("Contains duplicates", () => { + const documents: DocumentNode[] = [ + { documentPath: "foo" } as unknown as DocumentNode, + { documentPath: "bar" } as unknown as DocumentNode, + { documentPath: "foo" } as unknown as DocumentNode, + ]; + expect(() => checkForDuplicateDocumentPaths(documents)).to.throw(); + }); + }); +}); diff --git a/tools/api-markdown-documenter/src/test/EndToEndTests.ts b/tools/api-markdown-documenter/src/test/EndToEndTests.ts index ec2c8f93c78f..4fccb6321cfc 100644 --- a/tools/api-markdown-documenter/src/test/EndToEndTests.ts +++ b/tools/api-markdown-documenter/src/test/EndToEndTests.ts @@ -13,8 +13,9 @@ import type { Suite } from "mocha"; import { loadModel } from "../LoadModel.js"; import { - transformApiModel, type ApiItemTransformationOptions, + checkForDuplicateDocumentPaths, + transformApiModel, } from "../api-item-transforms/index.js"; import type { DocumentNode } from "../documentation-domain/index.js"; @@ -155,16 +156,8 @@ export function endToEndTests( it("Ensure no duplicate file paths", () => { const documents = transformApiModel(apiItemTransformConfig); - const pathMap = new Map(); - for (const document of documents) { - if (pathMap.has(document.documentPath)) { - expect.fail( - `Rendering generated multiple documents to be rendered to the same file path.`, - ); - } else { - pathMap.set(document.documentPath, document); - } - } + // Will throw if any duplicates are found. + checkForDuplicateDocumentPaths(documents); }); // Perform actual output snapshot comparison test against checked-in test collateral. From 1824858ab590a3ecb6f21beaf80bf6f21b6942f8 Mon Sep 17 00:00:00 2001 From: Ji Kim <111468570+jikim-msft@users.noreply.github.com> Date: Wed, 8 Jan 2025 14:56:33 -0800 Subject: [PATCH 22/27] Determine branch a & b are different in clone time (#23485) #### Description [27911](https://dev.azure.com/fluidframework/internal/_workitems/edit/27911) Related PR: https://github.com/microsoft/FluidFramework/pull/23424 Enhance `TreeCheckout.clone()` with Early Branch Validation This PR adds upfront validation in `TreeCheckout`'s `clone()` method to verify that the target branch shares a common ancestry with the source branch before attempting to clone revertibles. This validation: 1. Fails fast if branches are unrelated, providing clearer error messages 2. Moves the compatibility check from `revert()` to `clone()`, giving developers earlier feedback 3. Improves developer experience by catching invalid clone attempts immediately rather than waiting until revert time The change is supported by new test cases that verify the behavior of cloned revertibles across related and unrelated branches. --------- Co-authored-by: yann-achard-MS <97201204+yann-achard-MS@users.noreply.github.com> --- packages/dds/tree/src/core/index.ts | 1 + packages/dds/tree/src/core/rebase/index.ts | 1 + packages/dds/tree/src/core/rebase/utils.ts | 27 ++++++++++++++++ .../dds/tree/src/shared-tree/treeCheckout.ts | 31 ++++++++++++------- .../tree/src/test/shared-tree/undo.spec.ts | 5 ++- .../src/assertionShortCodesMap.ts | 1 - 6 files changed, 52 insertions(+), 14 deletions(-) diff --git a/packages/dds/tree/src/core/index.ts b/packages/dds/tree/src/core/index.ts index 496e19aed616..975613249c8f 100644 --- a/packages/dds/tree/src/core/index.ts +++ b/packages/dds/tree/src/core/index.ts @@ -202,6 +202,7 @@ export { replaceChange, type RebaseStats, type RebaseStatsWithDuration, + isAncestor, } from "./rebase/index.js"; export { diff --git a/packages/dds/tree/src/core/rebase/index.ts b/packages/dds/tree/src/core/rebase/index.ts index 2dc5ea400a29..0c68063c0128 100644 --- a/packages/dds/tree/src/core/rebase/index.ts +++ b/packages/dds/tree/src/core/rebase/index.ts @@ -49,4 +49,5 @@ export { type RebaseStats, type RebaseStatsWithDuration, replaceChange, + isAncestor, } from "./utils.js"; diff --git a/packages/dds/tree/src/core/rebase/utils.ts b/packages/dds/tree/src/core/rebase/utils.ts index bce4c1f85d15..c2e354a18fb4 100644 --- a/packages/dds/tree/src/core/rebase/utils.ts +++ b/packages/dds/tree/src/core/rebase/utils.ts @@ -651,3 +651,30 @@ namespace Rollback { } } } + +/** + * Checks if one node is an ancestor of another in a parent-linked tree structure. + * @param ancestor - The potential ancestor node + * @param descendant - The potential descendant node + * @param allowEqual - If true, returns true when ancestor === descendant + * @returns true if ancestor is an ancestor of descendant (or equal if allowEqual is true) + */ +export function isAncestor( + ancestor: TNode, + descendant: TNode, + allowEqual: boolean, +): boolean { + if (allowEqual && ancestor === descendant) { + return true; + } + + let current = descendant.parent; + while (current !== undefined) { + if (current === ancestor) { + return true; + } + current = current.parent; + } + + return false; +} diff --git a/packages/dds/tree/src/shared-tree/treeCheckout.ts b/packages/dds/tree/src/shared-tree/treeCheckout.ts index ac0f02811e80..adfa677f7d5a 100644 --- a/packages/dds/tree/src/shared-tree/treeCheckout.ts +++ b/packages/dds/tree/src/shared-tree/treeCheckout.ts @@ -41,6 +41,7 @@ import { type RevertibleAlphaFactory, type RevertibleAlpha, type GraphCommit, + isAncestor, } from "../core/index.js"; import { type FieldBatchCodec, @@ -605,21 +606,27 @@ export class TreeCheckout implements ITreeCheckoutFork { revertible.dispose(); } }, - clone: (forkedBranch: TreeBranch) => { - if (forkedBranch === undefined) { - return this.createRevertible(revision, kind, checkout, onRevertibleDisposed); - } - + clone: (targetBranch: TreeBranch) => { // TODO:#23442: When a revertible is cloned for a forked branch, optimize to create a fork of a revertible branch once per revision NOT once per revision per checkout. - const forkedCheckout = getCheckout(forkedBranch); + const targetCheckout = getCheckout(targetBranch); + const revertibleBranch = this.revertibleCommitBranches.get(revision); - assert( - revertibleBranch !== undefined, - 0xa82 /* change to revert does not exist on the given forked branch */, - ); - forkedCheckout.revertibleCommitBranches.set(revision, revertibleBranch.fork()); + if (revertibleBranch === undefined) { + throw new UsageError("Unable to clone a revertible that has been disposed."); + } + + const commitToRevert = revertibleBranch.getHead(); + const activeBranchHead = targetCheckout.#transaction.activeBranch.getHead(); + + if (isAncestor(commitToRevert, activeBranchHead, true) === false) { + throw new UsageError( + "Cannot clone revertible for a commit that is not present on the given branch.", + ); + } + + targetCheckout.revertibleCommitBranches.set(revision, revertibleBranch.fork()); - return this.createRevertible(revision, kind, forkedCheckout, onRevertibleDisposed); + return this.createRevertible(revision, kind, targetCheckout, onRevertibleDisposed); }, dispose: () => { if (revertible.status === RevertibleStatus.Disposed) { diff --git a/packages/dds/tree/src/test/shared-tree/undo.spec.ts b/packages/dds/tree/src/test/shared-tree/undo.spec.ts index 0cdcb14cb554..0946258edc3f 100644 --- a/packages/dds/tree/src/test/shared-tree/undo.spec.ts +++ b/packages/dds/tree/src/test/shared-tree/undo.spec.ts @@ -648,7 +648,10 @@ describe("Undo and redo", () => { const undoOriginalPropertyOne = undoStack.pop(); - assert.throws(() => undoOriginalPropertyOne?.clone(viewB).revert(), "Error: 0x576"); + assert.throws( + () => undoOriginalPropertyOne?.clone(viewB), + /Cannot clone revertible for a commit that is not present on the given branch./, + ); }); // TODO:#24414: Enable forkable revertibles tests to run on attached/detached mode. diff --git a/packages/runtime/test-runtime-utils/src/assertionShortCodesMap.ts b/packages/runtime/test-runtime-utils/src/assertionShortCodesMap.ts index b8c5ec3daca2..9afd5f7ea1ac 100644 --- a/packages/runtime/test-runtime-utils/src/assertionShortCodesMap.ts +++ b/packages/runtime/test-runtime-utils/src/assertionShortCodesMap.ts @@ -1632,7 +1632,6 @@ export const shortCodeMap = { "0xa7e": "Expected at least two types", "0xa7f": "Delta manager does not have inbound/outbound queues.", "0xa80": "Invalid delta manager", - "0xa82": "change to revert does not exist on the given forked branch", "0xa83": "Expected commit(s) for a non no-op rebase", "0xa84": "Commit must be in the branch's ancestry", "0xa86": "Expected source commits in non no-op merge", From ba65d67ace16b1ea0736ff087fa3763e7b2c0251 Mon Sep 17 00:00:00 2001 From: yann-achard-MS <97201204+yann-achard-MS@users.noreply.github.com> Date: Wed, 8 Jan 2025 15:15:56 -0800 Subject: [PATCH 23/27] docs(tree): updates and fixes (#23455) ## Description Updates and fixes to the existing docs. ## Breaking Changes None --- docs/docs/data-structures/tree/events.mdx | 33 +++++++--- docs/docs/data-structures/tree/nodes.mdx | 20 ++++-- .../tree/reading-and-editing.mdx | 61 ++++++++++++------- .../tree/schema-definition.mdx | 3 +- .../data-structures/tree/transactions.mdx | 12 ++-- docs/docs/data-structures/tree/undo-redo.mdx | 2 +- 6 files changed, 86 insertions(+), 45 deletions(-) diff --git a/docs/docs/data-structures/tree/events.mdx b/docs/docs/data-structures/tree/events.mdx index 5ab78b3488a3..6d65ddb721b1 100644 --- a/docs/docs/data-structures/tree/events.mdx +++ b/docs/docs/data-structures/tree/events.mdx @@ -3,20 +3,31 @@ title: Events sidebar_position: 5 --- -`SharedTree` supports two node level events: `nodeChanged` and `treeChanged`. +## Whole-Tree Events -Additionally, the `TreeView` object includes 2 events that operate over the whole tree. -These are `rootChanged` and `commitApplied`. +The `TreeView` object exposes 2 events that communicate changes that affect the whole tree. -`rootChanged` fires when the root field (the field that contains the root node) changes. +`rootChanged` fires when the contents of the root field (the field that contains the root node) change. That is, if a new root node is assigned or the schema changes. This will not fire when the node itself changes. -`changed` fires whenever a change is applied outside of a transaction or when a transaction is committed. +`commitApplied` fires whenever a change is applied outside of a transaction or when a transaction is committed. This is used to get `Revertible` objects to put on the undo or redo stacks. See [undo redo support](./undo-redo.mdx) and [Transactions](./transactions.mdx). -## Event Handling +Here is an example of how to subscribe to one of these events: + +```typescript +const unsubscribe = treeView.events.on("rootChanged", () => {...}); + +// Later at some point when the event subscription is not needed anymore +unsubscribe(); +``` + +## Node-Level Events + +`SharedTree` supports two node-level events: `nodeChanged` and `treeChanged`. +These can be subscribed to using the `Tree.on` method, which has the following signature: ```typescript on( @@ -28,9 +39,12 @@ on( `Tree.on` assigns the specified `listener` function to the specified `eventName` for the specified `node`. The `node` can be any node of the tree. -The `eventName` can be either "treeChanged" or "nodeChanged". -`nodeChanged` fires whenever one or more properties of the specified node change. -`treeChanged` fires whenever one or more properties of the specified node or any node in its subtree, change. + +The `eventName` can be either `"treeChanged"` or `"nodeChanged"`: + +- `nodeChanged` fires whenever one or more properties of the specified node change. +- `treeChanged` fires whenever one or more properties of the specified node or any node in its subtree, change. + We recommend looking at the documentation of each of the events for more details. The `Tree.on()` method returns a function that unsubscribes the handler from the event. This method is typically called in clean up code when the node is being removed. For example: @@ -40,5 +54,4 @@ const unsubscribe = Tree.on(myTreeNode, "nodeChanged", () => {...}); // Later at some point when the event subscription is not needed anymore unsubscribe(); - ``` diff --git a/docs/docs/data-structures/tree/nodes.mdx b/docs/docs/data-structures/tree/nodes.mdx index 05f270272535..725d19ecce83 100644 --- a/docs/docs/data-structures/tree/nodes.mdx +++ b/docs/docs/data-structures/tree/nodes.mdx @@ -10,13 +10,16 @@ See [node types](./node-types.mdx) for details on the types of nodes that can be Below are some utilities provided to make working with nodes easier. -### Node Information +### `Tree.key` ```typescript Tree.key(node: SharedTreeNode): number | string ``` -Returns the key of the `node`. This is a string in all cases, except an array node, in which case it returns the index of the node. +Returns the field key that the `node` is stored under. +This is a string in all cases, except an array node, in which case it returns the index of the node. + +### `Tree.parent` ```typescript Tree.parent(node: SharedTreeNode): SharedTreeNode @@ -33,15 +36,20 @@ if (Tree.is(parent, Notes) || Tree.is(parent, Items)) { } ``` +### `Tree.status` + ```typescript Tree.status(node: SharedTreeNode): TreeStatus ``` Returns the current status of `node`. Possible values are: -- **InDocument**: The node is in the tree. -- **Removed**: The node has been removed from the tree but is still restorable by undo. -- **Deleted**: The node is deleted and unrestorable. +- **New**: The node is created but has not yet been inserted into the tree. +- **InDocument**: The node is parented (either directly or indirectly) under the root field. +- **Removed**: The node is not parented under the root field but may still be restorable by this client or other clients. +- **Deleted**: The node is deleted and cannot be restored by this client, though it may still be restorable by other clients. + +### `Tree.schema` ```typescript Tree.schema(node: SharedTreeNode): TreeNodeSchema @@ -49,7 +57,7 @@ Tree.schema(node: SharedTreeNode): TreeNodeSchema Returns the object that defines the schema of the `node` object. -### Type Guard +### `Tree.is` When your code needs to process nodes only of a certain type and it has a reference to an object of an unknown type, you can use the `Tree.is()` method to test for the desired type as in the following examples. diff --git a/docs/docs/data-structures/tree/reading-and-editing.mdx b/docs/docs/data-structures/tree/reading-and-editing.mdx index 19882016a66e..2cfb05a6e6fc 100644 --- a/docs/docs/data-structures/tree/reading-and-editing.mdx +++ b/docs/docs/data-structures/tree/reading-and-editing.mdx @@ -3,7 +3,8 @@ title: Reading and Editing sidebar_position: 4 --- -The `TreeView` object and its children provide methods that enable your code to add nodes to the tree, remove nodes, and move nodes within the tree. You can also set and read the values of leaf nodes. The APIs have been designed to match as much as possible the syntax of TypeScript primitives, objects, maps, and arrays; although some editing APIs are different for the sake of making the merge semantics clearer. +The `TreeView` object and its children expose properties and methods that allow applications read and edit the tree. +The APIs have been designed to match as much as possible the syntax of TypeScript primitives, objects, maps, and arrays (although some editing APIs are different for the sake of making the merge semantics clearer). ## Leaf Node APIs @@ -23,14 +24,14 @@ const pointsForDetroitTigers: number = seasonTree.tigersTeam.game1.points; ### Reading Object Properties -Your code reads object nodes and their properties exactly as it would read a JavaScript object. The following are some examples. +Your code reads object nodes and their properties exactly as it would from a JavaScript object. The following are some examples. ```typescript const pointsForDetroitTigers: number = seasonTree.tigersTeam.game1.points; const counterHandle: FluidHandle = myTree.myObjectNode.myHandle; -const myItems: Array = stickyNotesTree.items; +const myItems: Items = stickyNotesTree.items; ``` ### Creating Objects @@ -52,6 +53,7 @@ We show how to add this note to an array of notes in the tree in [Array node API ### Editing Object Properties To update the property on an object node, you assign a new node or value to it with the assignment operator (`=`). +See [here](https://github.com/microsoft/FluidFramework/blob/main/packages/dds/tree/docs/user-facing/object-merge-semantics.md) for details on the merge semantics of these edits. ```typescript rectangle.topLeft = new Point({ x: 0, y: 0 }); @@ -67,6 +69,9 @@ Optional properties can be cleared by assigning `undefined` to them. proposal.text = undefined; ``` +Note that if the new value is a node (as opposed to a primitive), then its [status](./nodes.mdx#treestatus) must be `TreeStatus.New`, +In other words, it is not possible to set a node that has already been inserted in the tree in the past, even if that node has since been removed. + ## Map Node APIs ### Map Node Read APIs @@ -77,31 +82,31 @@ The read APIs for map nodes have the same names and syntax as the corresponding has(key): boolean ``` -Returns `true`` if the key is present in the map. +Returns `true` if the key is present in the map. ```typescript get(key): T | undefined ``` -Returns the value of the property with the specified key. +Returns the value associated with the specified key if any, and returns `undefined` if no value is associated with that key. ```typescript keys(): IterableIterator ``` -Returns an Iterator that contains the keys in the map node. The keys are iterated in the order that they were added. +Returns an Iterator that contains the keys in the map node. ```typescript values(): IterableIterator ``` -Returns an Iterator that contains the values in the map node. The values are iterated in the order that they were added. +Returns an Iterator that contains the values in the map node. ```typescript entries(): IterableIterator<[string, T]> ``` -Returns an Iterator that contains the key/value pairs in the map node. The pairs are iterated in the order that they were added. +Returns an Iterator that contains the key/value pairs in the map node. ```typescript map(callback: ()=>[]): IterableIterator<[string, T]> @@ -111,7 +116,8 @@ Returns an array, _not a map node or array node_, that is the result of applying ### Map Node Write APIs -The write methods for map nodes are also the same as the corresponding methods for JavaScript `Map` objects. +The write methods for map nodes are similar to the corresponding methods for JavaScript `Map` objects. +See [here](https://github.com/microsoft/FluidFramework/blob/main/packages/dds/tree/docs/user-facing/map-merge-semantics.md) for details on the merge semantics of these edits. ```typescript set(key: string, value: T) @@ -120,6 +126,8 @@ set(key: string, value: T) The `set()` method sets/changes the value of the item with the specified key. If the key is not present, the item is added to the map. Note the following: - The `T` can be any type that conforms to the map node's schema. For example, if the schema was defined with `class MyMap extends sf.map([sf.number, sf.string]);`, then `T` could be `number` or `string`. +- If the `value` argument is a node (as opposed to a primitive), then its [status](./nodes.mdx#treestatus) must be `TreeStatus.New`, + In other words, it is not possible to set a node that has already been inserted in the tree in the past, even if that node has since been removed. - If multiple clients set the same key simultaneously, the key gets the value set by the last edit to apply. For the meaning of "simultaneously", see [Types of distributed data structures](../overview.mdx). ```typescript @@ -145,10 +153,16 @@ Array nodes have all the same non-mutating read methods as the JavaScript [Array ### Array Node Write APIs The write APIs for array nodes are quite different from JavaScript arrays. They are more suitable for data items that are being worked on collaboratively by multiple people. There are three categories of write APIs: Insert, Remove, and Move. +See [here](https://github.com/microsoft/FluidFramework/blob/main/packages/dds/tree/docs/user-facing/array-merge-semantics.md) for details on the merge semantics of these edits. #### Insert Methods -Array nodes have three methods that insert new items into the node. Note that in all of the following, the `T` can be any type that conforms to the array node's schema. For example, if the schema was defined with `class MyArray extends sf.array([sf.number, sf.string]);`, then `T` could be `number` or `string`. +Array nodes have three methods that insert items into the array. +Note the following: + +- In all of the following, the `T` can be any type that conforms to the array node's schema. For example, if the schema was defined with `class MyArray extends sf.array([sf.number, sf.string]);`, then `T` could be `number` or `string`. +- Inserted values that are nodes (as opposed to a primitive), must have a [status](./nodes.mdx#treestatus) equal `TreeStatus.New`. + In other words, it is not possible to set a node that has already been inserted in the tree in the past, even if that node has since been removed. ```typescript insertAt(index: number, value: Iterable) @@ -194,27 +208,32 @@ Removes the items indicated by the `start` index (inclusive) and `end` index (ex Array nodes have three methods that move items within an array or from one array node to another. When moving from one array node to another, these methods must be called from the destination array node. Note that in all of the following, the `T` can be any type that is derived from an object that is returned by a call of `SchemaFactory.array()`, such as the `Notes` and `Items` classes in the sticky notes example. +Note the following about these methods: + +- If multiple clients simultaneously move an item, then that item will be moved to the destination indicated by the move of the client whose edit is ordered last. +- A moved item may be removed as a result of a simultaneous remove operation from another client. For example, if one client moves items 3-5, and another client simultaneously removes items 4 and 5, then, if the remove operation is ordered last, items 4 and 5 are removed from their destination by the remove operation. If the move operation is ordered last, then all three items will be moved to the destination. +- Moved items that are nodes (as opposed to a primitives), must have a [status](./nodes.mdx#treestatus) equal `TreeStatus.InDocument`. +- We also expose variations of these methods for moving a single item. + +For the meaning of "simultaneously", see [Types of distributed data structures](../overview.mdx). + ```typescript -moveToStart(sourceStartIndex: number, sourceEndIndex: number, source?: T) +moveRangeToStart(sourceStartIndex: number, sourceEndIndex: number, source?: T) ``` Moves the specified items to the start of the array. Specify a `source` array if it is different from the destination array. ```typescript -moveToEnd(sourceStartIndex: number, sourceEndIndex: number, source?: T) +moveRangeToEnd(sourceStartIndex: number, sourceEndIndex: number, source?: T) ``` Moves the specified items to the end of the array. Specify a `source` array if it is different from the destination array. ```typescript -moveToIndex(index: number, sourceStartIndex: number, sourceEndIndex: number, source?: T) +moveRangeToIndex(destinationGap: number, sourceStartIndex: number, sourceEndIndex: number, source?: T) ``` -Moves the items to the specified `index` in the destination array. The item that is at `index` before the method is called will be at the first index position that follows the moved items after the move. Specify a `source` array if it is different from the destination array. If the items are being moved within the same array, the `index` position is calculated including the items being moved (as if a new copy of the moved items were being inserted, without removing the originals). - -Note the following about these methods: - -- If multiple clients simultaneously move an item, then that item will be moved to the destination indicated by the move of the client whose edit is ordered last. -- A moved item may be removed as a result of a simultaneous remove operation from another client. For example, if one client moves items 3-5, and another client simultaneously removes items 4 and 5, then, if the remove operation is ordered last, items 4 and 5 are removed from their destination by the remove operation. If the move operation is ordered last, then all three items will be moved to the destination. - -For the meaning of "simultaneously", see [Types of distributed data structures](../overview.mdx). +Moves the items to the specified `destinationGap` in the destination array. +Specify a `source` array if it is different from the destination array. +If the items are being moved within the same array, the `destinationGap` position is interpreted including the items being moved (as if a new copy of the moved items were being inserted, without removing the originals). +See [the doc comments](https://fluidframework.com/docs/api/tree/treearraynode-interface#moverangetoindex-methodsignature) for details. diff --git a/docs/docs/data-structures/tree/schema-definition.mdx b/docs/docs/data-structures/tree/schema-definition.mdx index 5d7bc67aea3e..4ea4ac399fd7 100644 --- a/docs/docs/data-structures/tree/schema-definition.mdx +++ b/docs/docs/data-structures/tree/schema-definition.mdx @@ -171,12 +171,11 @@ type _check = ValidateRecursiveSchema; ## Setting Properties as Optional -To specify that a property is not required, pass it to the `SchemaFactory.optional()` method inline. The following example shows a schema with two optional properties. +To specify that a property is not required, pass it to the `SchemaFactory.optional()` method inline. The following example shows a schema with an optional property. ```typescript class Proposal = sf.object('Proposal', { id: sf.string, text: sf.optional(sf.string), - comments: sf.optional(sf.array(Comment)), }); ``` diff --git a/docs/docs/data-structures/tree/transactions.mdx b/docs/docs/data-structures/tree/transactions.mdx index 9b6b1ccf5efd..9b2eb23cd7cf 100644 --- a/docs/docs/data-structures/tree/transactions.mdx +++ b/docs/docs/data-structures/tree/transactions.mdx @@ -5,11 +5,13 @@ sidebar_position: 6 TODO:#27374: update this page to describe the new transaction API -If you want the `SharedTree` to treat a set of changes atomically, wrap these changes in a transaction. -Using a transaction guarantees that (if applied) all of the changes will be applied together synchronously and no other changes (either from this client or from a remote client) can be interleaved with those changes. Note that the Fluid Framework guarantees this already for any sequence of changes that are submitted synchronously. However, the changes may not be applied at all if the transaction is given one or more constraints. -If any constraint on a transaction is not met, then the transaction and all its changes will ignored by all clients. -Additionally, all changes in a transaction will be reverted together as a single unit by [undo/redo code](./undo-redo.mdx), because changes within a transaction are exposed through a single `Revertible` object. -It is also more efficient for SharedTree to process a large number of changes in a row as a transaction rather than as changes submitted separately. +If you want the `SharedTree` to treat a set of changes atomically, then you can wrap these changes in a transaction. +Using a transaction guarantees that (if applied) all of the changes will be applied together synchronously and no other changes (either from this client or from a remote client) can be interleaved with those changes. +Note that the Fluid Framework guarantees this already for any sequence of changes that are submitted synchronously. +However, using a transaction has the following additional implications: +- If [reverted](./undo-redo.mdx) (e.g. via an "undo" operation), all the changes in the transaction are reverted together. +- It is also more efficient for SharedTree to process and transmit a large number of changes as a transaction rather than as changes submitted separately. +- It is possible to specify constraints on a transaction so that the transaction will be ignored if one or more of these constraints are not met. To create a transaction use the `Tree.runTransaction()` method. You can cancel a transaction from within the callback function by returning the special "rollback object", available via `Tree.runTransaction.rollback`. Also, if an error occurs within the callback, the transaction will be canceled automatically before propagating the error. diff --git a/docs/docs/data-structures/tree/undo-redo.mdx b/docs/docs/data-structures/tree/undo-redo.mdx index 79dde9f5b462..db5aa2992599 100644 --- a/docs/docs/data-structures/tree/undo-redo.mdx +++ b/docs/docs/data-structures/tree/undo-redo.mdx @@ -12,4 +12,4 @@ sidebar_position: 7 To undo a change, call the `revert` method on the `Revertible` object. This will return the properties of the `TreeView` object last changed by the local client to the their previous state. If changes were made to those properties by other clients in the meantime these changes will be overwritten. For example, if the local client moves 3 items into an array, and then a remote client moves one of those items somewhere else, when the local client reverts their change, the item moved by the remote client will be returned to its original position. -There is an example of a working undo/redo stack here: [Shared Tree Demo](https://github.com/microsoft/FluidExamples/tree/main/brainstorm). +There is an example of a working undo/redo stack here: [Shared Tree Demo](https://github.com/microsoft/FluidExamples/tree/main/brainstorm/src/utils/undo.ts). From bd243fb2927915d87c42486e21ee0c990962a9a7 Mon Sep 17 00:00:00 2001 From: tyler-cai-microsoft <90650728+tyler-cai-microsoft@users.noreply.github.com> Date: Wed, 8 Jan 2025 15:37:49 -0800 Subject: [PATCH 24/27] Remove _createDataStoreWithProps (#22996) [AB#856](https://dev.azure.com/fluidframework/235294da-091d-4c29-84fc-cdfc3d90890b/_workitems/edit/856) https://github.com/microsoft/FluidFramework/issues/22993 ### Description Removes APIs that have been deprecated. We have tried to move away from `_createDataStoreWithProps` as this causes collisions when creating live datastores. Solutions involved aliasing. The original issue with the removal was that there was no way to put in props. Added the `initialState` parameter to `PureDataObjectFactory.createInstanceWithDataStore` as pass in props. ### Remove APIs - `IContainerRuntimeBase._createDataStoreWithProps` - `ContainerRuntime._createDataStoreWithProps` ### Alternatives - `PureDataObjectFactory.createInstanceWithDataStore` allows props to be passed in via the `initialState` parameter. ### Merge Notes - Merge only after `main` is ready for 2.20 changes. --------- Co-authored-by: Tyler Butler --- .changeset/gold-maps-cut.md | 21 +++++++++++++++++++ packages/framework/aqueduct/package.json | 6 +++++- .../validateAqueductPrevious.generated.ts | 1 + .../package.json | 12 ++++++++++- ...nerRuntimeDefinitionsPrevious.generated.ts | 3 +++ .../runtime/container-runtime/package.json | 12 ++++++++++- .../container-runtime/src/containerRuntime.ts | 19 ----------------- ...idateContainerRuntimePrevious.generated.ts | 2 ++ .../runtime-definitions.legacy.alpha.api.md | 2 -- .../runtime/runtime-definitions/package.json | 15 ++++++++++++- .../src/dataStoreContext.ts | 9 -------- ...ateRuntimeDefinitionsPrevious.generated.ts | 4 ++++ .../runtime/test-runtime-utils/package.json | 9 +++++++- ...idateTestRuntimeUtilsPrevious.generated.ts | 2 ++ .../src/test/rootDatastores.spec.ts | 11 ---------- packages/test/test-utils/package.json | 9 +++++++- .../validateTestUtilsPrevious.generated.ts | 2 ++ 17 files changed, 92 insertions(+), 47 deletions(-) create mode 100644 .changeset/gold-maps-cut.md diff --git a/.changeset/gold-maps-cut.md b/.changeset/gold-maps-cut.md new file mode 100644 index 000000000000..3790848acb57 --- /dev/null +++ b/.changeset/gold-maps-cut.md @@ -0,0 +1,21 @@ +--- +"@fluidframework/aqueduct": minor +"@fluidframework/container-runtime": minor +"@fluidframework/container-runtime-definitions": minor +"@fluidframework/datastore": minor +"@fluidframework/runtime-definitions": minor +"@fluidframework/test-runtime-utils": minor +--- + +# The createDataStoreWithProps APIs on ContainerRuntime and IContainerRuntimeBase have been removed + +`ContainerRuntime.createDataStoreWithProps` and `IContainerRuntimeBase.createDataStoreWithProps` +have been removed. + +Replace uses of these APIs with `PureDataObjectFactory.createInstanceWithDataStore` and pass in props via the `initialState` +parameter. + +# Initial deprecation/removal announcement + +The initial deprecations of the now changed or removed types were announced [#1537](https://github.com/microsoft/FluidFramework/issues/1537) +in Fluid Framework v0.25 [#2931](https://github.com/microsoft/FluidFramework/pull/2931) diff --git a/packages/framework/aqueduct/package.json b/packages/framework/aqueduct/package.json index 68fb1020ce55..97c72c64ec89 100644 --- a/packages/framework/aqueduct/package.json +++ b/packages/framework/aqueduct/package.json @@ -156,7 +156,11 @@ "typescript": "~5.4.5" }, "typeValidation": { - "broken": {}, + "broken": { + "Interface_IDataObjectProps": { + "backCompat": false + } + }, "entrypoint": "legacy" } } diff --git a/packages/framework/aqueduct/src/test/types/validateAqueductPrevious.generated.ts b/packages/framework/aqueduct/src/test/types/validateAqueductPrevious.generated.ts index b7fd21423392..aad878387976 100644 --- a/packages/framework/aqueduct/src/test/types/validateAqueductPrevious.generated.ts +++ b/packages/framework/aqueduct/src/test/types/validateAqueductPrevious.generated.ts @@ -247,4 +247,5 @@ declare type old_as_current_for_Interface_IDataObjectProps = requireAssignableTo * typeValidation.broken: * "Interface_IDataObjectProps": {"backCompat": false} */ +// @ts-expect-error compatibility expected to be broken declare type current_as_old_for_Interface_IDataObjectProps = requireAssignableTo, TypeOnly> diff --git a/packages/runtime/container-runtime-definitions/package.json b/packages/runtime/container-runtime-definitions/package.json index fe58ab33f1ad..4a0a0b3bf8be 100644 --- a/packages/runtime/container-runtime-definitions/package.json +++ b/packages/runtime/container-runtime-definitions/package.json @@ -103,7 +103,17 @@ "typescript": "~5.4.5" }, "typeValidation": { - "broken": {}, + "broken": { + "Interface_IContainerRuntime": { + "backCompat": false + }, + "Interface_IContainerRuntimeWithResolveHandle_Deprecated": { + "backCompat": false + }, + "TypeAlias_IContainerRuntimeBaseWithCombinedEvents": { + "backCompat": false + } + }, "entrypoint": "legacy" } } diff --git a/packages/runtime/container-runtime-definitions/src/test/types/validateContainerRuntimeDefinitionsPrevious.generated.ts b/packages/runtime/container-runtime-definitions/src/test/types/validateContainerRuntimeDefinitionsPrevious.generated.ts index 13f5bcb7b763..f746d36779d7 100644 --- a/packages/runtime/container-runtime-definitions/src/test/types/validateContainerRuntimeDefinitionsPrevious.generated.ts +++ b/packages/runtime/container-runtime-definitions/src/test/types/validateContainerRuntimeDefinitionsPrevious.generated.ts @@ -22,6 +22,7 @@ declare type MakeUnusedImportErrorsGoAway = TypeOnly | MinimalType | Fu * typeValidation.broken: * "Interface_IContainerRuntime": {"backCompat": false} */ +// @ts-expect-error compatibility expected to be broken declare type current_as_old_for_Interface_IContainerRuntime = requireAssignableTo, TypeOnly> /* @@ -49,6 +50,7 @@ declare type old_as_current_for_Interface_IContainerRuntimeWithResolveHandle_Dep * typeValidation.broken: * "Interface_IContainerRuntimeWithResolveHandle_Deprecated": {"backCompat": false} */ +// @ts-expect-error compatibility expected to be broken declare type current_as_old_for_Interface_IContainerRuntimeWithResolveHandle_Deprecated = requireAssignableTo, TypeOnly> /* @@ -85,6 +87,7 @@ declare type current_as_old_for_Interface_ISummarizerObservabilityProps = requir * typeValidation.broken: * "TypeAlias_IContainerRuntimeBaseWithCombinedEvents": {"backCompat": false} */ +// @ts-expect-error compatibility expected to be broken declare type current_as_old_for_TypeAlias_IContainerRuntimeBaseWithCombinedEvents = requireAssignableTo, TypeOnly> /* diff --git a/packages/runtime/container-runtime/package.json b/packages/runtime/container-runtime/package.json index 4fcc023f5a59..4b4b078f4b59 100644 --- a/packages/runtime/container-runtime/package.json +++ b/packages/runtime/container-runtime/package.json @@ -221,7 +221,17 @@ "typescript": "~5.4.5" }, "typeValidation": { - "broken": {}, + "broken": { + "Class_ContainerRuntime": { + "backCompat": false + }, + "Class_DocumentsSchemaController": { + "forwardCompat": false + }, + "ClassStatics_ContainerRuntime": { + "backCompat": false + } + }, "entrypoint": "legacy" } } diff --git a/packages/runtime/container-runtime/src/containerRuntime.ts b/packages/runtime/container-runtime/src/containerRuntime.ts index 96a50678b9e7..d81f025ec8b3 100644 --- a/packages/runtime/container-runtime/src/containerRuntime.ts +++ b/packages/runtime/container-runtime/src/containerRuntime.ts @@ -3431,25 +3431,6 @@ export class ContainerRuntime ); } - /** - * @deprecated 0.16 Issue #1537, #3631 - */ - public async _createDataStoreWithProps( - pkg: Readonly, - props?: any, - ): Promise { - const context = this.channelCollection.createDataStoreContext( - Array.isArray(pkg) ? pkg : [pkg], - props, - ); - return channelToDataStore( - await context.realize(), - context.id, - this.channelCollection, - this.mc.logger, - ); - } - private canSendOps() { // Note that the real (non-proxy) delta manager is needed here to get the readonly info. This is because // container runtime's ability to send ops depend on the actual readonly state of the delta manager. diff --git a/packages/runtime/container-runtime/src/test/types/validateContainerRuntimePrevious.generated.ts b/packages/runtime/container-runtime/src/test/types/validateContainerRuntimePrevious.generated.ts index 2dbf49f28ded..a9f4bad04ac1 100644 --- a/packages/runtime/container-runtime/src/test/types/validateContainerRuntimePrevious.generated.ts +++ b/packages/runtime/container-runtime/src/test/types/validateContainerRuntimePrevious.generated.ts @@ -31,6 +31,7 @@ declare type old_as_current_for_Class_ContainerRuntime = requireAssignableTo, TypeOnly> /* @@ -85,6 +86,7 @@ declare type current_as_old_for_Class_SummaryCollection = requireAssignableTo, TypeOnly> /* diff --git a/packages/runtime/runtime-definitions/api-report/runtime-definitions.legacy.alpha.api.md b/packages/runtime/runtime-definitions/api-report/runtime-definitions.legacy.alpha.api.md index f7fe04a66dae..1821c8da82cf 100644 --- a/packages/runtime/runtime-definitions/api-report/runtime-definitions.legacy.alpha.api.md +++ b/packages/runtime/runtime-definitions/api-report/runtime-definitions.legacy.alpha.api.md @@ -72,8 +72,6 @@ export interface IContainerRuntimeBase extends IEventProvider, loadingGroupId?: string): Promise; - // @deprecated (undocumented) - _createDataStoreWithProps(pkg: Readonly, props?: any, id?: string): Promise; createDetachedDataStore(pkg: Readonly, loadingGroupId?: string): IFluidDataStoreContextDetached; // (undocumented) readonly disposed: boolean; diff --git a/packages/runtime/runtime-definitions/package.json b/packages/runtime/runtime-definitions/package.json index 35169d042bbe..172c45a5e7ba 100644 --- a/packages/runtime/runtime-definitions/package.json +++ b/packages/runtime/runtime-definitions/package.json @@ -111,7 +111,20 @@ "typescript": "~5.4.5" }, "typeValidation": { - "broken": {}, + "broken": { + "Interface_IContainerRuntimeBase": { + "backCompat": false + }, + "Interface_IFluidDataStoreContext": { + "backCompat": false + }, + "Interface_IFluidDataStoreContextDetached": { + "backCompat": false + }, + "Interface_IFluidParentContext": { + "backCompat": false + } + }, "entrypoint": "legacy" } } diff --git a/packages/runtime/runtime-definitions/src/dataStoreContext.ts b/packages/runtime/runtime-definitions/src/dataStoreContext.ts index 718325b2d79e..d87e1b0aad8a 100644 --- a/packages/runtime/runtime-definitions/src/dataStoreContext.ts +++ b/packages/runtime/runtime-definitions/src/dataStoreContext.ts @@ -216,15 +216,6 @@ export interface IContainerRuntimeBase extends IEventProvider void; - /** - * @deprecated 0.16 Issue #1537, #3631 - */ - _createDataStoreWithProps( - pkg: Readonly, - props?: any, - id?: string, - ): Promise; - /** * Creates a data store and returns an object that exposes a handle to the data store's entryPoint, and also serves * as the data store's router. The data store is not bound to a container, and in such state is not persisted to diff --git a/packages/runtime/runtime-definitions/src/test/types/validateRuntimeDefinitionsPrevious.generated.ts b/packages/runtime/runtime-definitions/src/test/types/validateRuntimeDefinitionsPrevious.generated.ts index 61b1cd698cb4..5031c28d1216 100644 --- a/packages/runtime/runtime-definitions/src/test/types/validateRuntimeDefinitionsPrevious.generated.ts +++ b/packages/runtime/runtime-definitions/src/test/types/validateRuntimeDefinitionsPrevious.generated.ts @@ -112,6 +112,7 @@ declare type current_as_old_for_Interface_IAttachMessage = requireAssignableTo, TypeOnly> /* @@ -211,6 +212,7 @@ declare type old_as_current_for_Interface_IFluidDataStoreContext = requireAssign * typeValidation.broken: * "Interface_IFluidDataStoreContext": {"backCompat": false} */ +// @ts-expect-error compatibility expected to be broken declare type current_as_old_for_Interface_IFluidDataStoreContext = requireAssignableTo, TypeOnly> /* @@ -229,6 +231,7 @@ declare type old_as_current_for_Interface_IFluidDataStoreContextDetached = requi * typeValidation.broken: * "Interface_IFluidDataStoreContextDetached": {"backCompat": false} */ +// @ts-expect-error compatibility expected to be broken declare type current_as_old_for_Interface_IFluidDataStoreContextDetached = requireAssignableTo, TypeOnly> /* @@ -283,6 +286,7 @@ declare type old_as_current_for_Interface_IFluidParentContext = requireAssignabl * typeValidation.broken: * "Interface_IFluidParentContext": {"backCompat": false} */ +// @ts-expect-error compatibility expected to be broken declare type current_as_old_for_Interface_IFluidParentContext = requireAssignableTo, TypeOnly> /* diff --git a/packages/runtime/test-runtime-utils/package.json b/packages/runtime/test-runtime-utils/package.json index 6b34e307ae96..2453e4ba8268 100644 --- a/packages/runtime/test-runtime-utils/package.json +++ b/packages/runtime/test-runtime-utils/package.json @@ -157,7 +157,14 @@ "typescript": "~5.4.5" }, "typeValidation": { - "broken": {}, + "broken": { + "Class_MockFluidDataStoreContext": { + "backCompat": false + }, + "ClassStatics_MockFluidDataStoreContext": { + "backCompat": false + } + }, "entrypoint": "legacy" } } diff --git a/packages/runtime/test-runtime-utils/src/test/types/validateTestRuntimeUtilsPrevious.generated.ts b/packages/runtime/test-runtime-utils/src/test/types/validateTestRuntimeUtilsPrevious.generated.ts index 60347db4f053..a88ba30aba20 100644 --- a/packages/runtime/test-runtime-utils/src/test/types/validateTestRuntimeUtilsPrevious.generated.ts +++ b/packages/runtime/test-runtime-utils/src/test/types/validateTestRuntimeUtilsPrevious.generated.ts @@ -175,6 +175,7 @@ declare type old_as_current_for_Class_MockFluidDataStoreContext = requireAssigna * typeValidation.broken: * "Class_MockFluidDataStoreContext": {"backCompat": false} */ +// @ts-expect-error compatibility expected to be broken declare type current_as_old_for_Class_MockFluidDataStoreContext = requireAssignableTo, TypeOnly> /* @@ -364,6 +365,7 @@ declare type current_as_old_for_ClassStatics_MockDeltaQueue = requireAssignableT * typeValidation.broken: * "ClassStatics_MockFluidDataStoreContext": {"backCompat": false} */ +// @ts-expect-error compatibility expected to be broken declare type current_as_old_for_ClassStatics_MockFluidDataStoreContext = requireAssignableTo, TypeOnly> /* diff --git a/packages/test/test-end-to-end-tests/src/test/rootDatastores.spec.ts b/packages/test/test-end-to-end-tests/src/test/rootDatastores.spec.ts index b52afcc247b6..fd949e81b098 100644 --- a/packages/test/test-end-to-end-tests/src/test/rootDatastores.spec.ts +++ b/packages/test/test-end-to-end-tests/src/test/rootDatastores.spec.ts @@ -61,9 +61,6 @@ describeCompat("Named root data stores", "FullCompat", (getTestObjectProvider) = const runtimeOf = (dataObject: ITestFluidObject): IContainerRuntime => dataObject.context.containerRuntime as IContainerRuntime; - const createDataStoreWithProps = async (dataObject: ITestFluidObject, id: string) => - runtimeOf(dataObject)._createDataStoreWithProps(packageName, {}, id); - /** * Gets an aliased data store with the given id. Throws an error if the data store cannot be retrieved. */ @@ -78,14 +75,6 @@ describeCompat("Named root data stores", "FullCompat", (getTestObjectProvider) = return dataStore; } - describe("Legacy APIs", () => { - it("Datastore creation with legacy API returns datastore which can be aliased", async () => { - const ds = await createDataStoreWithProps(dataObject1, "1"); - const aliasResult = await ds.trySetAlias("2"); - assert.equal(aliasResult, "Success"); - }); - }); - describe("Aliasing", () => { const alias = "alias"; diff --git a/packages/test/test-utils/package.json b/packages/test/test-utils/package.json index 5d9cf87aa2f7..c9ae7ee694d1 100644 --- a/packages/test/test-utils/package.json +++ b/packages/test/test-utils/package.json @@ -167,7 +167,14 @@ "typescript": "~5.4.5" }, "typeValidation": { - "broken": {}, + "broken": { + "Interface_IProvideTestFluidObject": { + "backCompat": false + }, + "Interface_ITestFluidObject": { + "backCompat": false + } + }, "entrypoint": "legacy" } } diff --git a/packages/test/test-utils/src/test/types/validateTestUtilsPrevious.generated.ts b/packages/test/test-utils/src/test/types/validateTestUtilsPrevious.generated.ts index 23bb8b2365d0..d14180f98964 100644 --- a/packages/test/test-utils/src/test/types/validateTestUtilsPrevious.generated.ts +++ b/packages/test/test-utils/src/test/types/validateTestUtilsPrevious.generated.ts @@ -85,6 +85,7 @@ declare type old_as_current_for_Interface_IProvideTestFluidObject = requireAssig * typeValidation.broken: * "Interface_IProvideTestFluidObject": {"backCompat": false} */ +// @ts-expect-error compatibility expected to be broken declare type current_as_old_for_Interface_IProvideTestFluidObject = requireAssignableTo, TypeOnly> /* @@ -103,4 +104,5 @@ declare type old_as_current_for_Interface_ITestFluidObject = requireAssignableTo * typeValidation.broken: * "Interface_ITestFluidObject": {"backCompat": false} */ +// @ts-expect-error compatibility expected to be broken declare type current_as_old_for_Interface_ITestFluidObject = requireAssignableTo, TypeOnly> From 0783a317317647e8881ec717a6f85c531cdbc956 Mon Sep 17 00:00:00 2001 From: Alex Villarreal <716334+alexvy86@users.noreply.github.com> Date: Wed, 8 Jan 2025 18:12:24 -0600 Subject: [PATCH 25/27] refactor(shared-object-base): Improve typing in legacy+alpha API surface (#23238) ## Description Updates some types in the legacy+alpha API surface of the `@fluidframework/shared-object-base` package. Mostly improving type safety by replacing `any` with `unknown`, and a few `any` with `void` where that was the correct type. Also updates some code in response to the changes above, like call sites of the affected APIs now having to do casts themselves instead of relying on the `any` they were getting before. ## Breaking Changes The breaking changes affect only the legacy+alpha API surface. --- .changeset/smooth-roses-repair.md | 56 +++++++++++++++++++ .../packages/property-dds/src/propertyTree.ts | 2 +- experimental/dds/ot/ot/src/ot.ts | 2 +- .../dds/tree/src/SummaryBackCompatibility.ts | 2 +- packages/dds/cell/src/cell.ts | 3 +- packages/dds/map/src/directory.ts | 4 +- .../shared-object-base.legacy.alpha.api.md | 26 ++++----- .../dds/shared-object-base/src/serializer.ts | 21 +++---- .../shared-object-base/src/sharedObject.ts | 21 +++---- packages/dds/shared-object-base/src/utils.ts | 6 +- .../dds/test-dds-utils/src/ddsFuzzHarness.ts | 7 +-- .../src/test/gc/gcTestSummaryUtils.ts | 2 +- .../src/test/gc/gcTombstoneDataStores.spec.ts | 2 +- 13 files changed, 95 insertions(+), 59 deletions(-) create mode 100644 .changeset/smooth-roses-repair.md diff --git a/.changeset/smooth-roses-repair.md b/.changeset/smooth-roses-repair.md new file mode 100644 index 000000000000..ad463bc29204 --- /dev/null +++ b/.changeset/smooth-roses-repair.md @@ -0,0 +1,56 @@ +--- +"@fluidframework/shared-object-base": minor +--- +--- +"section": legacy +--- + +Replace 'any' in return type for several APIs + +To improve type safety of the Fluid Framework legacy+alpha API surface, +we're moving away from using the `any` type in favor of `unknown`. + +We mostly expect that any changes required in consumers of these APIs will be limited to having to provide explicit types +when calling any of the APIs whose return value changed to `unknown`, like `IFluidSerializer.parse()`. + +So code that looked like this: + +```typescript +// 'myVariable' ended up typed as 'any' here and TypeScript would not do any type-safety checks on it. +const myVariable = this.serializer.parse(stringHeader); +``` + +Will now have to look like this: + +```typescript +// Do this if you know the type of the object you expect to get back. +const myVariable = this.serializer.parse(stringHeader) as MyType; + +// Alternatively, this will maintain current behavior but also means no type-safety checks will be done by TS. +// const myVariable = this.serializer.parse(stringHeader) as any; +``` + +The appropriate type will depend on what the calling code is doing and the objects it expects to be dealing with. + +We further encourage consumers of any of these APIs to add runtime checks +to validate that the returned object actually matches the expected type. + +The list of affected APIs is as follows: + +- `IFluidSerializer.encode(...)` now takes `value: unknown` instead of `value: any` and returns `unknown` instead of `any`. +- `IFluidSerializer.decode(...)` now takes `input: unknown` instead of `input: any` and returns `unknown` instead of `any`. +- `IFluidSerializer.stringify(...)` now takes `value: unknown` instead of `value: any`. +- `IFluidSerializer.parse(...)` now returns `unknown` instead of `any`. +- `SharedObjectCore.applyStashedOps(...)` now takes `content: unknown` instead of `content: any`. +- `SharedObjectCore.rollback(...)` now takes `content: unknown` instead of `content: any`. +- `SharedObjectCore.submitLocalMessage(...)` now takes `content: unknown` instead of `content: any`. +- `SharedObjectCore.reSubmitCore(...)` now takes `content: unknown` instead of `content: any`. +- In `SharedObjectCore.newAckBasedPromise(...)` the `executor` parameter now takes `reject: (reason?: unknown)` + instead of `reject: (reason?: any)`. +- `makeHandlesSerializable(...)` now returns `unknown` instead of `any`. +- `parseHandles(...)` now returns `unknown` instead of `any`. + +Additionally, the following APIs were never designed to return a value and have thus been updated to return `void` instead of `any`: + +- `SharedObjectCore.processCore(...)`. +- `SharedObjectCore.onDisconnect(...)` diff --git a/experimental/PropertyDDS/packages/property-dds/src/propertyTree.ts b/experimental/PropertyDDS/packages/property-dds/src/propertyTree.ts index 7c73e6b83290..da10e48fba88 100644 --- a/experimental/PropertyDDS/packages/property-dds/src/propertyTree.ts +++ b/experimental/PropertyDDS/packages/property-dds/src/propertyTree.ts @@ -615,7 +615,7 @@ export class SharedPropertyTree extends SharedObject { const handleTableChunk = await storage.readBlob("properties"); const utf8 = bufferToString(handleTableChunk, "utf8"); - const snapshot: ISnapshot = this.serializer.parse(utf8); + const snapshot: ISnapshot = this.serializer.parse(utf8) as ISnapshot; this.useMH = snapshot.useMH; try { diff --git a/experimental/dds/ot/ot/src/ot.ts b/experimental/dds/ot/ot/src/ot.ts index 67ec3db67c28..ae1ac68866a8 100644 --- a/experimental/dds/ot/ot/src/ot.ts +++ b/experimental/dds/ot/ot/src/ot.ts @@ -98,7 +98,7 @@ export abstract class SharedOT extends SharedObject { protected async loadCore(storage: IChannelStorageService): Promise { const blob = await storage.readBlob("header"); const rawContent = bufferToString(blob, "utf8"); - this.global = this.local = this.serializer.parse(rawContent); + this.global = this.local = this.serializer.parse(rawContent) as TState; } protected onDisconnect() {} diff --git a/experimental/dds/tree/src/SummaryBackCompatibility.ts b/experimental/dds/tree/src/SummaryBackCompatibility.ts index 1d74bed4d9d6..520a9af103ab 100644 --- a/experimental/dds/tree/src/SummaryBackCompatibility.ts +++ b/experimental/dds/tree/src/SummaryBackCompatibility.ts @@ -28,7 +28,7 @@ import { export function deserialize(jsonSummary: string, serializer: IFluidSerializer): SharedTreeSummaryBase { let summary: Partial; try { - summary = serializer.parse(jsonSummary); + summary = serializer.parse(jsonSummary) as Partial; } catch { fail('Json syntax error in Summary'); } diff --git a/packages/dds/cell/src/cell.ts b/packages/dds/cell/src/cell.ts index d810d7d48316..903111ca85bc 100644 --- a/packages/dds/cell/src/cell.ts +++ b/packages/dds/cell/src/cell.ts @@ -205,8 +205,7 @@ export class SharedCell protected async loadCore(storage: IChannelStorageService): Promise { const content = await readAndParse(storage, snapshotFileName); - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment - this.data = this.serializer.decode(content.value); + this.data = this.serializer.decode(content.value) as Serializable; this.attribution = content.attribution; } diff --git a/packages/dds/map/src/directory.ts b/packages/dds/map/src/directory.ts index c867e3907db6..cc3353123c3c 100644 --- a/packages/dds/map/src/directory.ts +++ b/packages/dds/map/src/directory.ts @@ -784,8 +784,8 @@ export class SharedDirectory const localValue = this.makeLocal( key, currentSubDir.absolutePath, - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - parseHandles(serializable, this.serializer), + // eslint-disable-next-line import/no-deprecated + parseHandles(serializable, this.serializer) as ISerializableValue, ); currentSubDir.populateStorage(key, localValue); } diff --git a/packages/dds/shared-object-base/api-report/shared-object-base.legacy.alpha.api.md b/packages/dds/shared-object-base/api-report/shared-object-base.legacy.alpha.api.md index de8c9114e47c..4eab532dc67f 100644 --- a/packages/dds/shared-object-base/api-report/shared-object-base.legacy.alpha.api.md +++ b/packages/dds/shared-object-base/api-report/shared-object-base.legacy.alpha.api.md @@ -6,10 +6,10 @@ // @alpha (undocumented) export interface IFluidSerializer { - decode(input: any): any; - encode(value: any, bind: IFluidHandle): any; - parse(value: string): any; - stringify(value: any, bind: IFluidHandle): string; + decode(input: unknown): unknown; + encode(value: unknown, bind: IFluidHandle): unknown; + parse(value: string): unknown; + stringify(value: unknown, bind: IFluidHandle): string; } // @alpha @@ -33,10 +33,10 @@ export interface ISharedObjectKind { } // @alpha -export function makeHandlesSerializable(value: unknown, serializer: IFluidSerializer, bind: IFluidHandle): any; +export function makeHandlesSerializable(value: unknown, serializer: IFluidSerializer, bind: IFluidHandle): unknown; // @alpha -export function parseHandles(value: unknown, serializer: IFluidSerializer): any; +export function parseHandles(value: unknown, serializer: IFluidSerializer): unknown; // @alpha export abstract class SharedObject extends SharedObjectCore { @@ -53,7 +53,7 @@ export abstract class SharedObject extends EventEmitterWithErrorHandling implements ISharedObject { constructor(id: string, runtime: IFluidDataStoreRuntime, attributes: IChannelAttributes); - protected abstract applyStashedOp(content: any): void; + protected abstract applyStashedOp(content: unknown): void; // (undocumented) readonly attributes: IChannelAttributes; bindToContext(): void; @@ -76,16 +76,16 @@ export abstract class SharedObjectCore; protected abstract loadCore(services: IChannelStorageService): Promise; protected readonly logger: ITelemetryLoggerExt; - protected newAckBasedPromise(executor: (resolve: (value: T | PromiseLike) => void, reject: (reason?: any) => void) => void): Promise; + protected newAckBasedPromise(executor: (resolve: (value: T | PromiseLike) => void, reject: (reason?: unknown) => void) => void): Promise; protected onConnect(): void; - protected abstract onDisconnect(): any; - protected abstract processCore(message: ISequencedDocumentMessage, local: boolean, localOpMetadata: unknown): any; - protected reSubmitCore(content: any, localOpMetadata: unknown): void; - protected rollback(content: any, localOpMetadata: unknown): void; + protected abstract onDisconnect(): void; + protected abstract processCore(message: ISequencedDocumentMessage, local: boolean, localOpMetadata: unknown): void; + protected reSubmitCore(content: unknown, localOpMetadata: unknown): void; + protected rollback(content: unknown, localOpMetadata: unknown): void; // (undocumented) protected runtime: IFluidDataStoreRuntime; protected abstract get serializer(): IFluidSerializer; - protected submitLocalMessage(content: any, localOpMetadata?: unknown): void; + protected submitLocalMessage(content: unknown, localOpMetadata?: unknown): void; abstract summarize(fullTree?: boolean, trackState?: boolean, telemetryContext?: ITelemetryContext): Promise; } diff --git a/packages/dds/shared-object-base/src/serializer.ts b/packages/dds/shared-object-base/src/serializer.ts index a4b9db7534a6..f9b03e5700b8 100644 --- a/packages/dds/shared-object-base/src/serializer.ts +++ b/packages/dds/shared-object-base/src/serializer.ts @@ -31,8 +31,7 @@ export interface IFluidSerializer { * The original `input` object is not mutated. This method will shallowly clones all objects in the path from * the root to any replaced handles. (If no handles are found, returns the original object.) */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any -- TODO: AB#26129 use unknown instead of any (legacy breaking) - encode(value: any, bind: IFluidHandle): any; + encode(value: unknown, bind: IFluidHandle): unknown; /** * Given a fully-jsonable object tree that may have encoded handle objects embedded within, will return an @@ -43,21 +42,18 @@ export interface IFluidSerializer { * * The decoded handles are implicitly bound to the handle context of this serializer. */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any -- TODO: AB#26129 use unknown instead of any (legacy breaking) - decode(input: any): any; + decode(input: unknown): unknown; /** * Stringifies a given value. Converts any IFluidHandle to its stringified equivalent. */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any -- TODO: AB#26129 use unknown instead of any (legacy breaking) - stringify(value: any, bind: IFluidHandle): string; + stringify(value: unknown, bind: IFluidHandle): string; /** * Parses the given JSON input string and returns the JavaScript object defined by it. Any Fluid * handles will be realized as part of the parse */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any -- TODO: AB#26129 use unknown instead of any (legacy breaking) - parse(value: string): any; + parse(value: string): unknown; } /** @@ -87,14 +83,12 @@ export class FluidSerializer implements IFluidSerializer { * * Any unbound handles encountered are bound to the provided IFluidHandle. */ - // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types, @typescript-eslint/no-explicit-any -- TODO: AB#26129 ddsFuzzHarness breaks when we update any->unknown - public encode(input: any, bind: IFluidHandleInternal): any { + public encode(input: unknown, bind: IFluidHandleInternal): unknown { // If the given 'input' cannot contain handles, return it immediately. Otherwise, // return the result of 'recursivelyReplace()'. // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions return !!input && typeof input === "object" - ? // eslint-disable-next-line @typescript-eslint/no-unsafe-argument -- TODO: AB#26129 ddsFuzzHarness breaks when we update any->unknown - this.recursivelyReplace(input, this.encodeValue, bind) + ? this.recursivelyReplace(input, this.encodeValue, bind) : input; } @@ -107,8 +101,7 @@ export class FluidSerializer implements IFluidSerializer { * * The decoded handles are implicitly bound to the handle context of this serializer. */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any -- TODO: AB#26129 ddsFuzzHarness breaks when we update any->unknown - public decode(input: unknown): any { + public decode(input: unknown): unknown { // If the given 'input' cannot contain handles, return it immediately. Otherwise, // return the result of 'recursivelyReplace()'. // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions diff --git a/packages/dds/shared-object-base/src/sharedObject.ts b/packages/dds/shared-object-base/src/sharedObject.ts index de02418c1e7c..640e9ffad163 100644 --- a/packages/dds/shared-object-base/src/sharedObject.ts +++ b/packages/dds/shared-object-base/src/sharedObject.ts @@ -389,15 +389,13 @@ export abstract class SharedObjectCore< message: ISequencedDocumentMessage, local: boolean, localOpMetadata: unknown, - // eslint-disable-next-line @typescript-eslint/no-explicit-any -- TODO: AB#26129 use void instead of any (legacy breaking) - ): any; + ): void; /** * Called when the object has disconnected from the delta stream. */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any -- TODO: AB#26129 change return type to void (legacy breaking) - protected abstract onDisconnect(): any; + protected abstract onDisconnect(): void; /** * The serializer to serialize / parse handles. @@ -412,8 +410,7 @@ export abstract class SharedObjectCore< * and not sent to the server. This will be sent back when this message is received back from the server. This is * also sent if we are asked to resubmit the message. */ - // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types, @typescript-eslint/no-explicit-any -- TODO: AB#26129 use unknown instead of any (legacy breaking) - protected submitLocalMessage(content: any, localOpMetadata: unknown = undefined): void { + protected submitLocalMessage(content: unknown, localOpMetadata: unknown = undefined): void { this.verifyNotClosed(); if (this.isAttached()) { // eslint-disable-next-line @typescript-eslint/no-non-null-assertion @@ -451,8 +448,7 @@ export abstract class SharedObjectCore< * @param content - The content of the original message. * @param localOpMetadata - The local metadata associated with the original message. */ - // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types, @typescript-eslint/no-explicit-any -- TODO: AB#26129 use unknown instead of any (legacy breaking) - protected reSubmitCore(content: any, localOpMetadata: unknown): void { + protected reSubmitCore(content: unknown, localOpMetadata: unknown): void { this.submitLocalMessage(content, localOpMetadata); } @@ -465,8 +461,7 @@ export abstract class SharedObjectCore< protected async newAckBasedPromise( executor: ( resolve: (value: T | PromiseLike) => void, - // eslint-disable-next-line @typescript-eslint/no-explicit-any -- TODO: AB#26129 use unknown instead of any (legacy breaking) - reject: (reason?: any) => void, + reject: (reason?: unknown) => void, ) => void, ): Promise { let rejectBecauseDispose: () => void; @@ -619,8 +614,7 @@ export abstract class SharedObjectCore< /** * Revert an op */ - // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types, @typescript-eslint/no-explicit-any -- TODO: AB#26129 use unknown instead of any (legacy breaking) - protected rollback(content: any, localOpMetadata: unknown): void { + protected rollback(content: unknown, localOpMetadata: unknown): void { throw new Error("rollback not supported"); } @@ -641,8 +635,7 @@ export abstract class SharedObjectCore< * * @param content - Contents of a stashed op. */ - // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types, @typescript-eslint/no-explicit-any -- TODO: AB#26129 use unknown instead of any (legacy breaking) - protected abstract applyStashedOp(content: any): void; + protected abstract applyStashedOp(content: unknown): void; /** * Emit an event. This function is only intended for use by DDS classes that extend SharedObject/SharedObjectCore, diff --git a/packages/dds/shared-object-base/src/utils.ts b/packages/dds/shared-object-base/src/utils.ts index c80ef0d506c1..2ef68500d95b 100644 --- a/packages/dds/shared-object-base/src/utils.ts +++ b/packages/dds/shared-object-base/src/utils.ts @@ -45,8 +45,7 @@ export function makeHandlesSerializable( value: unknown, serializer: IFluidSerializer, bind: IFluidHandle, - // eslint-disable-next-line @typescript-eslint/no-explicit-any -- TODO: AB#26129 use unknown instead of any (legacy breaking) -): any { +): unknown { return serializer.encode(value, bind); } @@ -61,8 +60,7 @@ export function makeHandlesSerializable( * @legacy * @alpha */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -- TODO: AB#26129 use unknown instead of any (legacy breaking) -export function parseHandles(value: unknown, serializer: IFluidSerializer): any { +export function parseHandles(value: unknown, serializer: IFluidSerializer): unknown { return serializer.decode(value); } diff --git a/packages/dds/test-dds-utils/src/ddsFuzzHarness.ts b/packages/dds/test-dds-utils/src/ddsFuzzHarness.ts index a49fcfb731b0..4ac3557f0dff 100644 --- a/packages/dds/test-dds-utils/src/ddsFuzzHarness.ts +++ b/packages/dds/test-dds-utils/src/ddsFuzzHarness.ts @@ -1460,14 +1460,11 @@ export async function runTestForSeed< let operationCount = 0; const generator = model.generatorFactory(); const finalState = await performFuzzActionsAsync( - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - async (state) => serializer.encode(await generator(state), bind), + async (state) => serializer.encode(await generator(state), bind) as TOperation, async (state, operation) => { - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment - const decodedHandles = serializer.decode(operation); + const decodedHandles = serializer.decode(operation) as TOperation; options.emitter.emit("operation", decodedHandles); operationCount++; - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument return model.reducer(state, decodedHandles); }, initialState, diff --git a/packages/test/test-end-to-end-tests/src/test/gc/gcTestSummaryUtils.ts b/packages/test/test-end-to-end-tests/src/test/gc/gcTestSummaryUtils.ts index 555f22f389f9..a85e36627419 100644 --- a/packages/test/test-end-to-end-tests/src/test/gc/gcTestSummaryUtils.ts +++ b/packages/test/test-end-to-end-tests/src/test/gc/gcTestSummaryUtils.ts @@ -175,7 +175,7 @@ export function manufactureHandle( const handle: IFluidHandleInternal = parseHandles( { type: "__fluid_handle__", url }, serializer, - ); + ) as IFluidHandleInternal; return handle; } diff --git a/packages/test/test-end-to-end-tests/src/test/gc/gcTombstoneDataStores.spec.ts b/packages/test/test-end-to-end-tests/src/test/gc/gcTombstoneDataStores.spec.ts index 6191eb9d2f79..3a16b1b72339 100644 --- a/packages/test/test-end-to-end-tests/src/test/gc/gcTombstoneDataStores.spec.ts +++ b/packages/test/test-end-to-end-tests/src/test/gc/gcTombstoneDataStores.spec.ts @@ -583,7 +583,7 @@ describeCompat("GC data store tombstone tests", "NoCompat", (getTestObjectProvid const handle: IFluidHandle = parseHandles( { type: "__fluid_handle__", url: unreferencedId }, serializer, - ); + ) as IFluidHandle; // This fails because the DataStore is tombstoned let tombstoneError: ExpectedTombstoneError | undefined; From a9232b231efa0c32954794b0276cc7cb30642099 Mon Sep 17 00:00:00 2001 From: alex-pardes <105307164+alex-pardes@users.noreply.github.com> Date: Wed, 8 Jan 2025 16:18:49 -0800 Subject: [PATCH 26/27] tree: Refactor RangeMap to use a B-tree (#23497) ## Description Reimplemented RangeMap to store entries in a B-tree instead of an array to improve asymptotic performance. --- .../modular-schema/modularChangeCodecs.ts | 3 +- .../modular-schema/modularChangeFamily.ts | 76 ++---- .../modular-schema/modularChangeTypes.ts | 4 +- .../modularChangeFamily.spec.ts | 2 +- .../modular-schema/modularChangesetUtil.ts | 5 +- .../modular-schema/rangeMap.spec.ts | 40 ++-- .../shared-tree/sharedTreeChangeCodec.spec.ts | 3 +- packages/dds/tree/src/test/utils.ts | 1 - packages/dds/tree/src/util/bTreeUtils.ts | 60 +++++ packages/dds/tree/src/util/idAllocator.ts | 2 - packages/dds/tree/src/util/index.ts | 2 + packages/dds/tree/src/util/rangeMap.ts | 225 +++++++----------- packages/dds/tree/src/util/utils.ts | 2 - 13 files changed, 197 insertions(+), 228 deletions(-) create mode 100644 packages/dds/tree/src/util/bTreeUtils.ts diff --git a/packages/dds/tree/src/feature-libraries/modular-schema/modularChangeCodecs.ts b/packages/dds/tree/src/feature-libraries/modular-schema/modularChangeCodecs.ts index 985965a87670..b6a0eeb71979 100644 --- a/packages/dds/tree/src/feature-libraries/modular-schema/modularChangeCodecs.ts +++ b/packages/dds/tree/src/feature-libraries/modular-schema/modularChangeCodecs.ts @@ -32,6 +32,7 @@ import { brand, fail, idAllocatorFromMaxId, + newTupleBTree, } from "../../util/index.js"; import { type FieldBatchCodec, @@ -65,7 +66,7 @@ import type { NodeId, } from "./modularChangeTypes.js"; import type { FieldChangeEncodingContext, FieldChangeHandler } from "./fieldChangeHandler.js"; -import { newCrossFieldKeyTable, newTupleBTree } from "./modularChangeFamily.js"; +import { newCrossFieldKeyTable } from "./modularChangeFamily.js"; export function makeModularChangeCodecFamily( fieldKindConfigurations: ReadonlyMap, diff --git a/packages/dds/tree/src/feature-libraries/modular-schema/modularChangeFamily.ts b/packages/dds/tree/src/feature-libraries/modular-schema/modularChangeFamily.ts index 76d47e4ad5b5..8f67a1fde803 100644 --- a/packages/dds/tree/src/feature-libraries/modular-schema/modularChangeFamily.ts +++ b/packages/dds/tree/src/feature-libraries/modular-schema/modularChangeFamily.ts @@ -51,6 +51,9 @@ import { idAllocatorFromState, type RangeQueryResult, getOrAddInMapLazy, + newTupleBTree, + mergeTupleBTrees, + type TupleBTree, } from "../../util/index.js"; import { type TreeChunk, @@ -86,7 +89,6 @@ import type { ModularChangeset, NodeChangeset, NodeId, - TupleBTree, } from "./modularChangeTypes.js"; import type { IIdCompressor } from "@fluidframework/id-compressor"; @@ -272,14 +274,14 @@ export class ModularChangeFamily // (since we assume that if two changesets use the same node ID they are referring to the same node), // therefore all collisions will be addressed when processing the intersection of the changesets. const composedNodeChanges: ChangeAtomIdBTree = brand( - mergeBTrees(change1.nodeChanges, change2.nodeChanges), + mergeTupleBTrees(change1.nodeChanges, change2.nodeChanges), ); const composedNodeToParent: ChangeAtomIdBTree = brand( - mergeBTrees(change1.nodeToParent, change2.nodeToParent), + mergeTupleBTrees(change1.nodeToParent, change2.nodeToParent), ); const composedNodeAliases: ChangeAtomIdBTree = brand( - mergeBTrees(change1.nodeAliases, change2.nodeAliases), + mergeTupleBTrees(change1.nodeAliases, change2.nodeAliases), ); const crossFieldTable = newComposeTable(change1, change2, composedNodeToParent); @@ -304,7 +306,10 @@ export class ModularChangeFamily ); // Currently no field kinds require making changes to cross-field keys during composition, so we can just merge the two tables. - const composedCrossFieldKeys = mergeBTrees(change1.crossFieldKeys, change2.crossFieldKeys); + const composedCrossFieldKeys = mergeTupleBTrees( + change1.crossFieldKeys, + change2.crossFieldKeys, + ); return { fieldChanges: composedFields, nodeChanges: composedNodeChanges, @@ -1816,15 +1821,19 @@ function composeBuildsDestroysAndRefreshers( // composition all the changes already reflected on the tree, but that is not something we // care to support at this time. const allBuilds: ChangeAtomIdBTree = brand( - mergeBTrees(change1.builds ?? newTupleBTree(), change2.builds ?? newTupleBTree(), true), + mergeTupleBTrees( + change1.builds ?? newTupleBTree(), + change2.builds ?? newTupleBTree(), + true, + ), ); const allDestroys: ChangeAtomIdBTree = brand( - mergeBTrees(change1.destroys ?? newTupleBTree(), change2.destroys ?? newTupleBTree()), + mergeTupleBTrees(change1.destroys ?? newTupleBTree(), change2.destroys ?? newTupleBTree()), ); const allRefreshers: ChangeAtomIdBTree = brand( - mergeBTrees( + mergeTupleBTrees( change1.refreshers ?? newTupleBTree(), change2.refreshers ?? newTupleBTree(), true, @@ -2975,27 +2984,6 @@ function revisionInfoFromTaggedChange( return revInfos; } -function mergeBTrees( - tree1: TupleBTree | undefined, - tree2: TupleBTree | undefined, - preferLeft = true, -): TupleBTree { - if (tree1 === undefined) { - return tree2 !== undefined ? brand(tree2.clone()) : newTupleBTree(); - } - - const result: TupleBTree = brand(tree1.clone()); - if (tree2 === undefined) { - return result; - } - - for (const [key, value] of tree2.entries()) { - result.set(key, value, !preferLeft); - } - - return result; -} - function fieldChangeFromId( fields: FieldChangeMap, nodes: ChangeAtomIdBTree, @@ -3199,36 +3187,6 @@ export function newCrossFieldKeyTable(): CrossFieldKeyTable { return newTupleBTree(); } -export function newTupleBTree( - entries?: [K, V][], -): TupleBTree { - return brand(new BTree(entries, compareTuples)); -} - -// This assumes that the arrays are the same length. -function compareTuples(arrayA: readonly unknown[], arrayB: readonly unknown[]): number { - for (let i = 0; i < arrayA.length; i++) { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const a = arrayA[i] as any; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const b = arrayB[i] as any; - - // Less-than and greater-than always return false if either value is undefined, - // so we handle undefined separately, treating it as less than all other values. - if (a === undefined && b !== undefined) { - return -1; - } else if (b === undefined && a !== undefined) { - return 1; - } else if (a < b) { - return -1; - } else if (a > b) { - return 1; - } - } - - return 0; -} - interface ModularChangesetContent { fieldChanges: FieldChangeMap; nodeChanges: ChangeAtomIdBTree; diff --git a/packages/dds/tree/src/feature-libraries/modular-schema/modularChangeTypes.ts b/packages/dds/tree/src/feature-libraries/modular-schema/modularChangeTypes.ts index 10c06624d236..37aba3ac82b0 100644 --- a/packages/dds/tree/src/feature-libraries/modular-schema/modularChangeTypes.ts +++ b/packages/dds/tree/src/feature-libraries/modular-schema/modularChangeTypes.ts @@ -3,7 +3,6 @@ * Licensed under the MIT License. */ -import type { BTree } from "@tylerbu/sorted-btree-es6"; import type { ChangeAtomId, ChangesetLocalId, @@ -12,7 +11,7 @@ import type { RevisionInfo, RevisionTag, } from "../../core/index.js"; -import type { Brand } from "../../util/index.js"; +import type { Brand, TupleBTree } from "../../util/index.js"; import type { TreeChunk } from "../chunked-forest/index.js"; import type { CrossFieldTarget } from "./crossFieldQueries.js"; @@ -71,7 +70,6 @@ export interface ModularChangeset extends HasFieldChanges { readonly refreshers?: ChangeAtomIdBTree; } -export type TupleBTree = Brand, "TupleBTree">; export type ChangeAtomIdBTree = TupleBTree<[RevisionTag | undefined, ChangesetLocalId], V>; export type CrossFieldKeyTable = TupleBTree; export type CrossFieldKeyRange = readonly [ diff --git a/packages/dds/tree/src/test/feature-libraries/modular-schema/modularChangeFamily.spec.ts b/packages/dds/tree/src/test/feature-libraries/modular-schema/modularChangeFamily.spec.ts index a34ae17c3dff..2ba2465614ab 100644 --- a/packages/dds/tree/src/test/feature-libraries/modular-schema/modularChangeFamily.spec.ts +++ b/packages/dds/tree/src/test/feature-libraries/modular-schema/modularChangeFamily.spec.ts @@ -57,6 +57,7 @@ import { brand, idAllocatorFromMaxId, nestedMapFromFlatList, + newTupleBTree, setInNestedMap, tryGetFromNestedMap, } from "../../../util/index.js"; @@ -87,7 +88,6 @@ import { updateRefreshers, relevantRemovedRoots as relevantDetachedTreesImplementation, newCrossFieldKeyTable, - newTupleBTree, // eslint-disable-next-line import/no-internal-modules } from "../../../feature-libraries/modular-schema/modularChangeFamily.js"; import type { diff --git a/packages/dds/tree/src/test/feature-libraries/modular-schema/modularChangesetUtil.ts b/packages/dds/tree/src/test/feature-libraries/modular-schema/modularChangesetUtil.ts index 481654969a2d..99977a89745b 100644 --- a/packages/dds/tree/src/test/feature-libraries/modular-schema/modularChangesetUtil.ts +++ b/packages/dds/tree/src/test/feature-libraries/modular-schema/modularChangesetUtil.ts @@ -31,13 +31,13 @@ import { brand, fail, idAllocatorFromMaxId, + newTupleBTree, } from "../../../util/index.js"; import { getChangeHandler, getFieldsForCrossFieldKey, getParentFieldId, newCrossFieldKeyTable, - newTupleBTree, // eslint-disable-next-line import/no-internal-modules } from "../../../feature-libraries/modular-schema/modularChangeFamily.js"; import { strict as assert } from "node:assert"; @@ -235,8 +235,9 @@ function addNodeToField( } const dummyCrossFieldManager: CrossFieldManager = { - get: (_target, _revision, _id, count, _addDependency) => ({ + get: (_target, _revision, id, count, _addDependency) => ({ value: undefined, + start: id, length: count, }), set: () => fail("Not supported"), diff --git a/packages/dds/tree/src/test/feature-libraries/modular-schema/rangeMap.spec.ts b/packages/dds/tree/src/test/feature-libraries/modular-schema/rangeMap.spec.ts index 55d961fa5776..dd1529ca03b3 100644 --- a/packages/dds/tree/src/test/feature-libraries/modular-schema/rangeMap.spec.ts +++ b/packages/dds/tree/src/test/feature-libraries/modular-schema/rangeMap.spec.ts @@ -16,7 +16,7 @@ describe("RangeMap", () => { it("query on empty map returns undefined", () => { const map = newRangeMap(); const entry = map.get(5, 4); - assert.deepEqual(entry, { length: 4, value: undefined }); + assert.deepEqual(entry, { start: 5, length: 4, value: undefined }); }); it("read one entry", () => { @@ -27,22 +27,22 @@ describe("RangeMap", () => { // Read keys 0-2 const entryBefore = map.get(0, 3); - assert.deepEqual(entryBefore, { length: 3, value: undefined }); + assert.deepEqual(entryBefore, { start: 0, length: 3, value: undefined }); // Read keys 1-3 const entryBeginning = map.get(1, 3); - assert.deepEqual(entryBeginning, { length: 2, value: undefined }); + assert.deepEqual(entryBeginning, { start: 1, length: 2, value: undefined }); // Read keys 2-7 const entryWhole = map.get(2, 6); - assert.deepEqual(entryWhole, { length: 1, value: undefined }); + assert.deepEqual(entryWhole, { start: 2, length: 1, value: undefined }); const entryEnd = map.get(6, 2); - assert.deepEqual(entryEnd, { length: 1, value: "a" }); + assert.deepEqual(entryEnd, { start: 6, length: 1, value: "a" }); // Read key 7 const entryAfter = map.get(7, 1); - assert.deepEqual(entryAfter, { length: 1, value: undefined }); + assert.deepEqual(entryAfter, { start: 7, length: 1, value: undefined }); }); it("read two entries", () => { @@ -56,19 +56,19 @@ describe("RangeMap", () => { // Read key 3 const entryFirst = map.get(3, 1); - assert.deepEqual(entryFirst, { length: 1, value: "a" }); + assert.deepEqual(entryFirst, { start: 3, length: 1, value: "a" }); // Read keys 5-7 const entrySecond = map.get(5, 3); - assert.deepEqual(entrySecond, { length: 1, value: undefined }); + assert.deepEqual(entrySecond, { start: 5, length: 1, value: undefined }); // Read keys 4-5 const entryBetween = map.get(4, 2); - assert.deepEqual(entryBetween, { length: 2, value: undefined }); + assert.deepEqual(entryBetween, { start: 4, length: 2, value: undefined }); // Read keys 3-6 const entryBoth = map.get(3, 4); - assert.deepEqual(entryBoth, { length: 1, value: "a" }); + assert.deepEqual(entryBoth, { start: 3, length: 1, value: "a" }); }); it("write overlapping ranges", () => { @@ -90,25 +90,25 @@ describe("RangeMap", () => { map.set(1, 7, "e"); const entry0 = map.get(0, 8); - assert.deepEqual(entry0, { length: 1, value: "a" }); + assert.deepEqual(entry0, { start: 0, length: 1, value: "a" }); const entry1 = map.get(1, 1); - assert.deepEqual(entry1, { length: 1, value: "e" }); + assert.deepEqual(entry1, { start: 1, length: 1, value: "e" }); const entry3 = map.get(3, 2); - assert.deepEqual(entry3, { length: 2, value: "e" }); + assert.deepEqual(entry3, { start: 3, length: 2, value: "e" }); const entry5 = map.get(5, 1); - assert.deepEqual(entry5, { length: 1, value: "e" }); + assert.deepEqual(entry5, { start: 5, length: 1, value: "e" }); const entry6 = map.get(6, 1); - assert.deepEqual(entry6, { length: 1, value: "e" }); + assert.deepEqual(entry6, { start: 6, length: 1, value: "e" }); const entry7 = map.get(7, 2); - assert.deepEqual(entry7, { length: 1, value: "e" }); + assert.deepEqual(entry7, { start: 7, length: 1, value: "e" }); const entry8 = map.get(8, 1); - assert.deepEqual(entry8, { length: 1, value: "d" }); + assert.deepEqual(entry8, { start: 8, length: 1, value: "d" }); }); it("write range which splits existing range", () => { @@ -121,13 +121,13 @@ describe("RangeMap", () => { map.set(4, 3, "b"); const entry1 = map.get(1, 10); - assert.deepEqual(entry1, { length: 3, value: "a" }); + assert.deepEqual(entry1, { start: 1, length: 3, value: "a" }); const entry4 = map.get(4, 8); - assert.deepEqual(entry4, { length: 3, value: "b" }); + assert.deepEqual(entry4, { start: 4, length: 3, value: "b" }); const entry7 = map.get(7, 4); - assert.deepEqual(entry7, { length: 4, value: "a" }); + assert.deepEqual(entry7, { start: 7, length: 4, value: "a" }); }); describe("deleteFromRange", () => { diff --git a/packages/dds/tree/src/test/shared-tree/sharedTreeChangeCodec.spec.ts b/packages/dds/tree/src/test/shared-tree/sharedTreeChangeCodec.spec.ts index ef3df8192a6a..3a05d0b55e3a 100644 --- a/packages/dds/tree/src/test/shared-tree/sharedTreeChangeCodec.spec.ts +++ b/packages/dds/tree/src/test/shared-tree/sharedTreeChangeCodec.spec.ts @@ -35,8 +35,7 @@ import { brand } from "../../util/brand.js"; import { ajvValidator } from "../codec/index.js"; import { testIdCompressor, testRevisionTagCodec } from "../utils.js"; import { BTree } from "@tylerbu/sorted-btree-es6"; -// eslint-disable-next-line import/no-internal-modules -import { newTupleBTree } from "../../feature-libraries/modular-schema/modularChangeFamily.js"; +import { newTupleBTree } from "../../util/index.js"; const codecOptions: ICodecOptions = { jsonValidator: ajvValidator }; diff --git a/packages/dds/tree/src/test/utils.ts b/packages/dds/tree/src/test/utils.ts index 4967499a888b..ea5917e128e8 100644 --- a/packages/dds/tree/src/test/utils.ts +++ b/packages/dds/tree/src/test/utils.ts @@ -128,7 +128,6 @@ import { SchematizingSimpleTreeView, // eslint-disable-next-line import/no-internal-modules } from "../shared-tree/schematizingTreeView.js"; -// eslint-disable-next-line import/no-internal-modules import type { SharedTreeOptions, SharedTreeOptionsInternal, diff --git a/packages/dds/tree/src/util/bTreeUtils.ts b/packages/dds/tree/src/util/bTreeUtils.ts new file mode 100644 index 000000000000..1eabff23bf8e --- /dev/null +++ b/packages/dds/tree/src/util/bTreeUtils.ts @@ -0,0 +1,60 @@ +/*! + * Copyright (c) Microsoft Corporation and contributors. All rights reserved. + * Licensed under the MIT License. + */ + +import { BTree } from "@tylerbu/sorted-btree-es6"; +import { brand, type Brand } from "./brand.js"; + +export type TupleBTree = Brand, "TupleBTree">; + +export function newTupleBTree( + entries?: [K, V][], +): TupleBTree { + return brand(new BTree(entries, compareTuples)); +} + +// This assumes that the arrays are the same length. +function compareTuples(arrayA: readonly unknown[], arrayB: readonly unknown[]): number { + for (let i = 0; i < arrayA.length; i++) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const a = arrayA[i] as any; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const b = arrayB[i] as any; + + // Less-than and greater-than always return false if either value is undefined, + // so we handle undefined separately, treating it as less than all other values. + if (a === undefined && b !== undefined) { + return -1; + } else if (b === undefined && a !== undefined) { + return 1; + } else if (a < b) { + return -1; + } else if (a > b) { + return 1; + } + } + + return 0; +} + +export function mergeTupleBTrees( + tree1: TupleBTree | undefined, + tree2: TupleBTree | undefined, + preferLeft = true, +): TupleBTree { + if (tree1 === undefined) { + return tree2 !== undefined ? brand(tree2.clone()) : newTupleBTree(); + } + + const result: TupleBTree = brand(tree1.clone()); + if (tree2 === undefined) { + return result; + } + + for (const [key, value] of tree2.entries()) { + result.set(key, value, !preferLeft); + } + + return result; +} diff --git a/packages/dds/tree/src/util/idAllocator.ts b/packages/dds/tree/src/util/idAllocator.ts index cfa38bd24556..cb17630b1346 100644 --- a/packages/dds/tree/src/util/idAllocator.ts +++ b/packages/dds/tree/src/util/idAllocator.ts @@ -26,8 +26,6 @@ export interface IdAllocationState { maxId: number; } -/** - */ export function idAllocatorFromMaxId(maxId: number | undefined = undefined): IdAllocator { return idAllocatorFromState({ maxId: maxId ?? -1 }); } diff --git a/packages/dds/tree/src/util/index.ts b/packages/dds/tree/src/util/index.ts index 2b9743db35a7..eb6f793e6da7 100644 --- a/packages/dds/tree/src/util/index.ts +++ b/packages/dds/tree/src/util/index.ts @@ -140,3 +140,5 @@ export { throwIfBroken, breakingClass, } from "./breakable.js"; + +export { type TupleBTree, newTupleBTree, mergeTupleBTrees } from "./bTreeUtils.js"; diff --git a/packages/dds/tree/src/util/rangeMap.ts b/packages/dds/tree/src/util/rangeMap.ts index d803614a8782..678409055566 100644 --- a/packages/dds/tree/src/util/rangeMap.ts +++ b/packages/dds/tree/src/util/rangeMap.ts @@ -3,25 +3,29 @@ * Licensed under the MIT License. */ -import { oob } from "@fluidframework/core-utils/internal"; +import { newTupleBTree, type TupleBTree } from "./bTreeUtils.js"; /** * RangeMap represents a mapping from integers to values of type T or undefined. * The values for a range of consecutive keys can be changed or queried in a single operation. */ export class RangeMap { - private readonly entries: RangeEntry[]; + private readonly tree: TupleBTree<[number], RangeEntry>; - public constructor(initialEntries?: RangeEntry[]) { - this.entries = initialEntries ? [...initialEntries] : []; + public constructor() { + this.tree = newTupleBTree(); } /** - * Retrieves all entries from the rangeMap. - * @returns An array of RangeEntryResult objects, each containing the start index, length, and value of a contiguous range. + * Retrieves all entries from the RangeMap. */ - public getAllEntries(): readonly RangeQueryResult[] { - return this.entries; + public getAllEntries(): RangeQueryResult[] { + const entries: RangeQueryResult[] = []; + for (const [[start], entry] of this.tree.entries()) { + entries.push({ start, length: entry.length, value: entry.value }); + } + + return entries; } /** @@ -33,20 +37,36 @@ export class RangeMap { * and the number of consecutive keys with that same value. */ public get(start: number, length: number): RangeQueryResult { - for (const entry of this.entries) { - if (entry.start > start) { - return { value: undefined, length: Math.min(entry.start - start, length) }; + // We first check for an entry with a key less than or equal to `start`. + { + const entry = this.tree.getPairOrNextLower([start]); + if (entry !== undefined) { + const [entryKey] = entry[0]; + const { value, length: entryLength } = entry[1]; + + const entryLastId = entryKey + entryLength - 1; + const overlappingLength = Math.min(entryLastId - start + 1, length); + if (overlappingLength > 0) { + return { value, start, length: overlappingLength }; + } } + } + + { + // There is no value associated with `start`. + // Now we need to determine how many of the following keys are also undefined. + const key = this.tree.nextHigherKey([start]); + if (key !== undefined) { + const [entryKey] = key; - const lastRangeKey = entry.start + entry.length - 1; - if (lastRangeKey >= start) { - const overlapLength = lastRangeKey - start + 1; - return { value: entry.value, length: Math.min(overlapLength, length) }; + const lastQueryId = start + length - 1; + if (entryKey <= lastQueryId) { + return { value: undefined, start, length: entryKey - start }; + } } - } - // There were no entries intersecting the query range, so the entire query range has undefined value. - return { value: undefined, length }; + return { value: undefined, start, length }; + } } /** @@ -57,73 +77,10 @@ export class RangeMap { * @param value - The value to associate with the range. */ public set(start: number, length: number, value: T | undefined): void { - if (value === undefined) { - this.delete(start, length); - return; - } - - const end = start + length - 1; - const newEntry: RangeEntry = { start, length, value }; - - let iBefore = -1; - let iAfter = this.entries.length; - for (const [i, entry] of this.entries.entries()) { - const entryLastKey = entry.start + entry.length - 1; - if (entryLastKey < start) { - iBefore = i; - } else if (entry.start > end) { - iAfter = i; - break; - } - } - - const numOverlappingEntries = iAfter - iBefore - 1; - if (numOverlappingEntries === 0) { - this.entries.splice(iAfter, 0, newEntry); - return; - } - - const iFirst = iBefore + 1; - const firstEntry = this.entries[iFirst] ?? oob(); - const iLast = iAfter - 1; - const lastEntry = this.entries[iLast] ?? oob(); - const lengthBeforeFirst = start - firstEntry.start; - const lastEntryKey = lastEntry.start + lastEntry.length - 1; - const lengthAfterLast = lastEntryKey - end; - - if (lengthBeforeFirst > 0 && lengthAfterLast > 0 && iFirst === iLast) { - // The new entry fits in the middle of an existing entry. - // We replace the existing entry with: - // 1) the portion which comes before `newEntry` - // 2) `newEntry` - // 3) the portion which comes after `newEntry` - this.entries.splice(iFirst, 1, { ...firstEntry, length: lengthBeforeFirst }, newEntry, { - ...lastEntry, - start: end + 1, - length: lengthAfterLast, - }); - return; - } - - if (lengthBeforeFirst > 0) { - this.entries[iFirst] = { ...firstEntry, length: lengthBeforeFirst }; - // The entry at `iFirst` is no longer overlapping with `newEntry`. - iBefore = iFirst; - } - - if (lengthAfterLast > 0) { - this.entries[iLast] = { - ...lastEntry, - start: end + 1, - length: lengthAfterLast, - }; - - // The entry at `iLast` is no longer overlapping with `newEntry`. - iAfter = iLast; + this.delete(start, length); + if (value !== undefined) { + this.tree.set([start], { value, length }); } - - const numContainedEntries = iAfter - iBefore - 1; - this.entries.splice(iBefore + 1, numContainedEntries, newEntry); } /** @@ -147,58 +104,57 @@ export class RangeMap { * @param length - The length of the range to delete. */ public delete(start: number, length: number): void { - const end = start + length - 1; - - let iBefore = -1; - let iAfter = this.entries.length; - - for (const [i, entry] of this.entries.entries()) { - const entryLastKey = entry.start + entry.length - 1; - if (entryLastKey < start) { - iBefore = i; - } else if (entry.start > end) { - iAfter = i; - break; + const lastDeletedKey = start + length - 1; + { + const entry = this.tree.getPairOrNextLower([start]); + if (entry !== undefined) { + const [key] = entry[0]; + const { length: entryLength, value } = entry[1]; + const lastEntryKey = key + entryLength - 1; + if (lastEntryKey >= start) { + // This entry overlaps with the deleted range, so we remove it. + this.tree.delete([key]); + if (key < start) { + // A portion of the entry comes before the delete range, so we reinsert that portion. + this.tree.set([key], { value, length: start - key }); + } + + if (lastEntryKey > lastDeletedKey) { + // A portion of the entry comes after the delete range, so we reinsert that portion. + this.tree.set([lastDeletedKey + 1], { + value, + length: lastEntryKey - lastDeletedKey, + }); + + return; + } + } } } - const numOverlappingEntries = iAfter - iBefore - 1; - - if (numOverlappingEntries === 0) { - // No entry will be removed - return; - } - - const iFirst = iBefore + 1; - const iLast = iAfter - 1; - - for (let i = iFirst; i <= iLast; ++i) { - const entry = this.entries[i] ?? oob(); - const entryLastKey = entry.start + entry.length - 1; - let isDirty = false; - - if (entry.start >= start && entryLastKey <= end) { - // If the entry lies within the range to be deleted, remove it - this.entries.splice(i, 1); - } else { - // If the entry partially or completely overlaps with the range to be deleted - if (entry.start < start) { - // Update the endpoint and length of the portion before the range to be deleted - const lengthBefore = start - entry.start; - this.entries[i] = { ...entry, length: lengthBefore }; - isDirty = true; + { + let entry = this.tree.nextHigherPair([start]); + while (entry !== undefined) { + const [key] = entry[0]; + if (key > lastDeletedKey) { + return; } - if (entryLastKey > end) { - // Update the startpoint and length of the portion after the range to be deleted - const newStart = end + 1; - const newLength = entryLastKey - end; - this.entries.splice(isDirty ? i + 1 : i, isDirty ? 0 : 1, { - start: newStart, - length: newLength, - value: entry.value, + const { length: entryLength, value } = entry[1]; + const lastEntryKey = key + entryLength - 1; + + this.tree.delete([key]); + if (lastEntryKey > lastDeletedKey) { + // A portion of the entry comes after the delete range, so we reinsert that portion. + this.tree.set([lastDeletedKey + 1], { + value, + length: lastEntryKey - lastDeletedKey, }); + + return; } + + entry = this.tree.nextHigherPair([lastEntryKey]); } } } @@ -206,14 +162,8 @@ export class RangeMap { /** * Represents a contiguous range of values in the RangeMap. - * This interface is used internally and should not be exposed to consumers. */ interface RangeEntry { - /** - * The starting index of the range (inclusive). - */ - readonly start: number; - /** * The length of the range. */ @@ -229,6 +179,11 @@ interface RangeEntry { * Describes the result of a range query, including the value and length of the matching prefix. */ export interface RangeQueryResult { + /** + * The key for the first element in the range. + */ + readonly start: number; + /** * The value of the first key in the query range. * If no matching range is found, this will be undefined. diff --git a/packages/dds/tree/src/util/utils.ts b/packages/dds/tree/src/util/utils.ts index 476f02b4b471..264a38983918 100644 --- a/packages/dds/tree/src/util/utils.ts +++ b/packages/dds/tree/src/util/utils.ts @@ -47,8 +47,6 @@ export function asMutable(readonly: T): Mutable { export const clone = structuredClone; -/** - */ export function fail(message: string): never { throw new Error(message); } From 29dd73af368a778610b0e54e9f4f38d7d611bead Mon Sep 17 00:00:00 2001 From: Ji Kim <111468570+jikim-msft@users.noreply.github.com> Date: Wed, 8 Jan 2025 16:30:49 -0800 Subject: [PATCH 27/27] Clone list of revertibles (#23424) #### Description [13865](https://dev.azure.com/fluidframework/internal/_workitems/edit/13865/) This PR adds a helper function that takes in the list of revertibles and returns the cloned revertibles in order. --------- Co-authored-by: Jenn --- .../dds/tree/api-report/tree.alpha.api.md | 3 + packages/dds/tree/src/core/index.ts | 1 + packages/dds/tree/src/core/revertible.ts | 28 ++++++++ packages/dds/tree/src/index.ts | 1 + .../tree/src/test/shared-tree/undo.spec.ts | 72 +++++++++++++++++++ .../api-report/fluid-framework.alpha.api.md | 3 + 6 files changed, 108 insertions(+) diff --git a/packages/dds/tree/api-report/tree.alpha.api.md b/packages/dds/tree/api-report/tree.alpha.api.md index 36bb82bbb513..1c76bccb21d3 100644 --- a/packages/dds/tree/api-report/tree.alpha.api.md +++ b/packages/dds/tree/api-report/tree.alpha.api.md @@ -44,6 +44,9 @@ export interface BranchableTree extends ViewableTree { rebase(branch: TreeBranchFork): void; } +// @alpha +export function cloneRevertibles(revertibles: RevertibleAlpha[], targetBranch: TreeBranch): RevertibleAlpha[]; + // @public export enum CommitKind { Default = 0, diff --git a/packages/dds/tree/src/core/index.ts b/packages/dds/tree/src/core/index.ts index 975613249c8f..ddfb5d7a9b4d 100644 --- a/packages/dds/tree/src/core/index.ts +++ b/packages/dds/tree/src/core/index.ts @@ -218,4 +218,5 @@ export { type RevertibleFactory, type RevertibleAlphaFactory, type RevertibleAlpha, + cloneRevertibles, } from "./revertible.js"; diff --git a/packages/dds/tree/src/core/revertible.ts b/packages/dds/tree/src/core/revertible.ts index 5fbbaebe59ef..962d62a68d0b 100644 --- a/packages/dds/tree/src/core/revertible.ts +++ b/packages/dds/tree/src/core/revertible.ts @@ -47,6 +47,8 @@ export interface RevertibleAlpha extends Revertible { /** * Clones the {@link Revertible} to a target branch. * + * @remarks To clone a group of`RevertibleAlpha`s, use {@link cloneRevertibles}. + * * @param branch - A target branch to apply the revertible to. * The target branch must contain the same commit that this revertible is meant to revert, otherwise will throw an error. * @returns A cloned revertible is independent of the original, meaning disposing of one will not affect the other, @@ -96,3 +98,29 @@ export type RevertibleFactory = ( export type RevertibleAlphaFactory = ( onRevertibleDisposed?: (revertible: RevertibleAlpha) => void, ) => RevertibleAlpha; + +/** + * Clones a group of revertibles for a target branch. + * + * @throws Error if any revertible is disposed. + * @throws Error if the target branch does not contain the changes that the revertibles are meant to revert. + * + * @param revertibles - Array of revertibles to clone. + * @param targetBranch - The target branch to clone the revertibles for. + * @returns Array of cloned revertibles, maintaining the same order as the input. + * + * @alpha + */ +export function cloneRevertibles( + revertibles: RevertibleAlpha[], + targetBranch: TreeBranch, +): RevertibleAlpha[] { + const disposedRevertible = revertibles.find( + (revertible) => revertible.status === RevertibleStatus.Disposed, + ); + if (disposedRevertible !== undefined) { + throw new Error("List of revertible should not contain disposed revertibles."); + } + + return revertibles.map((revertible) => revertible.clone(targetBranch)); +} diff --git a/packages/dds/tree/src/index.ts b/packages/dds/tree/src/index.ts index 4469a8eddf24..90b6381a0827 100644 --- a/packages/dds/tree/src/index.ts +++ b/packages/dds/tree/src/index.ts @@ -12,6 +12,7 @@ export { type RevertibleFactory, type RevertibleAlphaFactory, type RevertibleAlpha, + cloneRevertibles, } from "./core/index.js"; export type { diff --git a/packages/dds/tree/src/test/shared-tree/undo.spec.ts b/packages/dds/tree/src/test/shared-tree/undo.spec.ts index 0946258edc3f..2011ea48775e 100644 --- a/packages/dds/tree/src/test/shared-tree/undo.spec.ts +++ b/packages/dds/tree/src/test/shared-tree/undo.spec.ts @@ -6,6 +6,7 @@ import { type FieldUpPath, type Revertible, + cloneRevertibles, RevertibleStatus, type UpPath, rootFieldKey, @@ -676,6 +677,77 @@ describe("Undo and redo", () => { "Error: Unable to revert a revertible that has been disposed.", ); }); + + // TODO:#24414: Enable forkable revertibles tests to run on attached/detached mode. + it("clone list of revertibles", () => { + const view = createInitializedView(); + const { undoStack } = createTestUndoRedoStacks(view.events); + + assert(view.root.child !== undefined); + view.root.child.propertyOne = 256; // 128 -> 256 + view.root.child.propertyTwo.itemOne = "newItem"; // "" -> "newItem" + + const forkedView = view.fork(); + + const revertibles = [...undoStack]; + const clonedRevertibles = cloneRevertibles(revertibles, forkedView); + + assert.equal(clonedRevertibles.length, 2); + assert.equal(forkedView.root.child?.propertyOne, 256); + assert.equal(forkedView.root.child?.propertyTwo.itemOne, "newItem"); + + assert.equal(clonedRevertibles[0]?.status, RevertibleStatus.Valid); + assert.equal(clonedRevertibles[1]?.status, RevertibleStatus.Valid); + + clonedRevertibles.pop()?.revert(); + assert.equal(forkedView.root.child?.propertyOne, 256); + assert.equal(forkedView.root.child?.propertyTwo.itemOne, ""); + + clonedRevertibles.pop()?.revert(); + assert.equal(forkedView.root.child?.propertyOne, 128); + }); + + // TODO:#24414: Enable forkable revertibles tests to run on attached/detached mode. + it("cloning list of disposed revertibles throws error", () => { + const view = createInitializedView(); + const { undoStack } = createTestUndoRedoStacks(view.events); + + assert(view.root.child !== undefined); + view.root.child.propertyOne = 256; // 128 -> 256 + view.root.child.propertyTwo.itemOne = "newItem"; // "" -> "newItem" + + const forkedView = view.fork(); + const revertibles = [...undoStack]; + + for (const revertible of undoStack) { + revertible.revert(); + assert.equal(revertible.status, RevertibleStatus.Disposed); + } + + assert.throws( + () => cloneRevertibles(revertibles, forkedView), + /List of revertible should not contain disposed revertibles./, + ); + }); + + // TODO:#24414: Enable forkable revertibles tests to run on attached/detached mode. + it("cloning list of revertibles between views with different changes throws error", () => { + const viewA = createInitializedView(); + const viewB = createInitializedView(); + + const { undoStack } = createTestUndoRedoStacks(viewA.events); + + assert(viewA.root.child !== undefined); + viewA.root.child.propertyOne = 256; // 128 -> 256 + viewA.root.child.propertyTwo.itemOne = "newItem"; // "" -> "newItem" + + const revertibles = [...undoStack]; + + assert.throws( + () => cloneRevertibles(revertibles, viewB), + /Cannot clone revertible for a commit that is not present on the given branch./, + ); + }); }); /** diff --git a/packages/framework/fluid-framework/api-report/fluid-framework.alpha.api.md b/packages/framework/fluid-framework/api-report/fluid-framework.alpha.api.md index 59ce78a13310..fa9a958c5664 100644 --- a/packages/framework/fluid-framework/api-report/fluid-framework.alpha.api.md +++ b/packages/framework/fluid-framework/api-report/fluid-framework.alpha.api.md @@ -51,6 +51,9 @@ export interface BranchableTree extends ViewableTree { rebase(branch: TreeBranchFork): void; } +// @alpha +export function cloneRevertibles(revertibles: RevertibleAlpha[], targetBranch: TreeBranch): RevertibleAlpha[]; + // @public export enum CommitKind { Default = 0,