From 701e22660d5ac9f0b3e940ad656b9ca6c479251d Mon Sep 17 00:00:00 2001 From: Emmanuel T Odeke Date: Mon, 28 Oct 2024 06:48:24 -0700 Subject: [PATCH 1/4] feat: (observability): trace Database.runPartitionedUpdate (#2176) This change traces Database.runPartitionedUpdate along with the appropriate tests for it with and without errors. Updates #2079 --- observability-test/database.ts | 221 ++++++++++++++++++++++++++++++++- observability-test/spanner.ts | 47 +++++++ src/database.ts | 26 +++- 3 files changed, 287 insertions(+), 7 deletions(-) diff --git a/observability-test/database.ts b/observability-test/database.ts index 5ce075aa1..3b6811e8c 100644 --- a/observability-test/database.ts +++ b/observability-test/database.ts @@ -38,7 +38,10 @@ const { InMemorySpanExporter, } = require('@opentelemetry/sdk-trace-node'); // eslint-disable-next-line n/no-extraneous-require -const {SimpleSpanProcessor} = require('@opentelemetry/sdk-trace-base'); +const { + ReadableSpan, + SimpleSpanProcessor, +} = require('@opentelemetry/sdk-trace-base'); import * as db from '../src/database'; import {Instance, MutationGroup, Spanner} from '../src'; import * as pfy from '@google-cloud/promisify'; @@ -1954,4 +1957,220 @@ describe('Database', () => { fakeStream2.push(null); }); }); + + describe('runPartitionedUpdate', () => { + const QUERY = { + sql: 'INSERT INTO `MyTable` (Key, Thing) VALUES(@key, @thing)', + params: { + key: 'k999', + thing: 'abc', + }, + }; + + let fakePool: FakeSessionPool; + let fakeSession: FakeSession; + let fakePartitionedDml = new FakeTransaction( + {} as google.spanner.v1.TransactionOptions.PartitionedDml + ); + + let getSessionStub; + let beginStub; + let runUpdateStub; + + beforeEach(() => { + fakePool = database.pool_; + fakeSession = new FakeSession(); + fakePartitionedDml = new FakeTransaction( + {} as google.spanner.v1.TransactionOptions.PartitionedDml + ); + + getSessionStub = ( + sandbox.stub(fakePool, 'getSession') as sinon.SinonStub + ).callsFake(callback => { + callback(null, fakeSession); + }); + + sandbox.stub(fakeSession, 'partitionedDml').returns(fakePartitionedDml); + + beginStub = ( + sandbox.stub(fakePartitionedDml, 'begin') as sinon.SinonStub + ).callsFake(callback => callback(null)); + + runUpdateStub = ( + sandbox.stub(fakePartitionedDml, 'runUpdate') as sinon.SinonStub + ).callsFake((_, callback) => callback(null)); + }); + + interface traceExportResults { + spanNames: string[]; + spans: (typeof ReadableSpan)[]; + eventNames: string[]; + } + + async function getTraceExportResults(): Promise { + await provider.forceFlush(); + await traceExporter.forceFlush(); + const spans = traceExporter.getFinishedSpans(); + withAllSpansHaveDBName(spans); + + const actualSpanNames: string[] = []; + const actualEventNames: string[] = []; + spans.forEach(span => { + actualSpanNames.push(span.name); + span.events.forEach(event => { + actualEventNames.push(event.name); + }); + }); + + return Promise.resolve({ + spanNames: actualSpanNames, + spans: spans, + eventNames: actualEventNames, + }); + } + + it('with pool errors', done => { + const fakeError = new Error('err'); + const fakeCallback = sandbox.spy(); + + getSessionStub.callsFake(callback => callback(fakeError)); + database.runPartitionedUpdate(QUERY, async (err, rowCount) => { + assert.strictEqual(err, fakeError); + assert.strictEqual(rowCount, 0); + + const exportResults = await getTraceExportResults(); + const actualSpanNames = exportResults.spanNames; + const spans = exportResults.spans; + const actualEventNames = exportResults.eventNames; + + const expectedSpanNames = [ + 'CloudSpanner.Database.runPartitionedUpdate', + ]; + assert.deepStrictEqual( + actualSpanNames, + expectedSpanNames, + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + ); + + // Ensure that the first span actually produced an error that was recorded. + const parentSpan = spans[0]; + assert.deepStrictEqual( + SpanStatusCode.ERROR, + parentSpan.status.code, + 'Expected an ERROR span status' + ); + assert.deepStrictEqual( + fakeError.message, + parentSpan.status.message.toString(), + 'Mismatched span status message' + ); + + const expectedEventNames = []; + assert.deepStrictEqual( + actualEventNames, + expectedEventNames, + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + ); + + done(); + }); + }); + + it('with begin errors', done => { + const fakeError = new Error('err'); + + beginStub.callsFake(callback => callback(fakeError)); + + const releaseStub = ( + sandbox.stub(fakePool, 'release') as sinon.SinonStub + ).withArgs(fakeSession); + + database.runPartitionedUpdate(QUERY, async (err, rowCount) => { + assert.strictEqual(err, fakeError); + assert.strictEqual(rowCount, 0); + assert.strictEqual(releaseStub.callCount, 1); + + const exportResults = await getTraceExportResults(); + const actualSpanNames = exportResults.spanNames; + const spans = exportResults.spans; + const actualEventNames = exportResults.eventNames; + + const expectedSpanNames = [ + 'CloudSpanner.Database.runPartitionedUpdate', + ]; + assert.deepStrictEqual( + actualSpanNames, + expectedSpanNames, + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + ); + + // Ensure that the first span actually produced an error that was recorded. + const parentSpan = spans[0]; + assert.deepStrictEqual( + SpanStatusCode.ERROR, + parentSpan.status.code, + 'Expected an ERROR span status' + ); + assert.deepStrictEqual( + fakeError.message, + parentSpan.status.message.toString(), + 'Mismatched span status message' + ); + + const expectedEventNames = []; + assert.deepStrictEqual( + actualEventNames, + expectedEventNames, + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + ); + done(); + }); + }); + + it('session released on transaction end', done => { + const releaseStub = ( + sandbox.stub(fakePool, 'release') as sinon.SinonStub + ).withArgs(fakeSession); + + database.runPartitionedUpdate(QUERY, async (err, rowCount) => { + const exportResults = await getTraceExportResults(); + const actualSpanNames = exportResults.spanNames; + const spans = exportResults.spans; + const actualEventNames = exportResults.eventNames; + + const expectedSpanNames = [ + 'CloudSpanner.Database.runPartitionedUpdate', + ]; + assert.deepStrictEqual( + actualSpanNames, + expectedSpanNames, + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + ); + + // Ensure that the first span actually produced an error that was recorded. + const parentSpan = spans[0]; + assert.deepStrictEqual( + SpanStatusCode.UNSET, + parentSpan.status.code, + 'Unexpected span status' + ); + assert.deepStrictEqual( + undefined, + parentSpan.status.message, + 'Mismatched span status message' + ); + + const expectedEventNames = []; + assert.deepStrictEqual( + actualEventNames, + expectedEventNames, + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + ); + done(); + }); + + fakePartitionedDml.emit('end'); + assert.strictEqual(releaseStub.callCount, 1); + }); + }); }); diff --git a/observability-test/spanner.ts b/observability-test/spanner.ts index 2e90b3044..c9ce60df2 100644 --- a/observability-test/spanner.ts +++ b/observability-test/spanner.ts @@ -613,6 +613,53 @@ describe('EndToEnd', async () => { done(); }); }); + + it('runPartitionedUpdate', async () => { + const [rowCount] = await database.runPartitionedUpdate({ + sql: updateSql, + }); + + await tracerProvider.forceFlush(); + await traceExporter.forceFlush(); + const spans = traceExporter.getFinishedSpans(); + + const actualEventNames: string[] = []; + const actualSpanNames: string[] = []; + spans.forEach(span => { + actualSpanNames.push(span.name); + span.events.forEach(event => { + actualEventNames.push(event.name); + }); + }); + + const expectedSpanNames = [ + 'CloudSpanner.Snapshot.begin', + 'CloudSpanner.Snapshot.runStream', + 'CloudSpanner.Snapshot.run', + 'CloudSpanner.Dml.runUpdate', + 'CloudSpanner.PartitionedDml.runUpdate', + 'CloudSpanner.Database.runPartitionedUpdate', + ]; + assert.deepStrictEqual( + actualSpanNames, + expectedSpanNames, + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + ); + + const expectedEventNames = [ + 'Begin Transaction', + 'Transaction Creation Done', + 'Starting stream', + 'Acquiring session', + 'Cache hit: has usable session', + 'Acquired session', + ]; + assert.deepStrictEqual( + actualEventNames, + expectedEventNames, + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + ); + }); }); }); diff --git a/src/database.ts b/src/database.ts index 3d8321355..12db176c6 100644 --- a/src/database.ts +++ b/src/database.ts @@ -2858,13 +2858,27 @@ class Database extends common.GrpcServiceObject { query: string | RunPartitionedUpdateOptions, callback?: RunUpdateCallback ): void | Promise<[number]> { - this.pool_.getSession((err, session) => { - if (err) { - callback!(err as ServiceError, 0); - return; - } + const traceConfig = { + sql: query, + ...this._traceConfig, + }; + return startTrace('Database.runPartitionedUpdate', traceConfig, span => { + this.pool_.getSession((err, session) => { + if (err) { + setSpanError(span, err); + span.end(); + callback!(err as ServiceError, 0); + return; + } - this._runPartitionedUpdate(session!, query, callback); + this._runPartitionedUpdate(session!, query, (err, count) => { + if (err) { + setSpanError(span, err); + } + span.end(); + callback!(err, count); + }); + }); }); } From 66d99e836cd2bfbb3b0f78980ec2b499f9e5e563 Mon Sep 17 00:00:00 2001 From: Emmanuel T Odeke Date: Tue, 29 Oct 2024 22:03:36 -0700 Subject: [PATCH 2/4] feat: (observability, samples): add tracing end-to-end sample (#2130) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: (observability, samples): add tracing end-to-end sample This change documents an end-to-end observability tracing sample using OpenTelemetry, which then exports trace spans to Google Cloud Trace. Updates #2079 * Update with code review comments * Minimize observability sample * Tailor samples to match rubric * Add OBSERVABILITY.md docs file * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * system-test: add sample test * samples: remove demo of User Defined Traces * Address review feedback * Remove the vestige of tag until the team decides an appropriate one * Address Surbhi's review feedback * Update samples/system-test/spanner.test.js --------- Co-authored-by: Owl Bot Co-authored-by: surbhigarg92 --- OBSERVABILITY.md | 102 +++++++++++++++++++++++++ README.md | 1 + samples/README.md | 18 +++++ samples/observability-traces.js | 112 ++++++++++++++++++++++++++++ samples/package.json | 9 ++- samples/system-test/spanner.test.js | 10 +++ 6 files changed, 250 insertions(+), 2 deletions(-) create mode 100644 OBSERVABILITY.md create mode 100644 samples/observability-traces.js diff --git a/OBSERVABILITY.md b/OBSERVABILITY.md new file mode 100644 index 000000000..71ab18226 --- /dev/null +++ b/OBSERVABILITY.md @@ -0,0 +1,102 @@ +## Observability with OpenTelemetry + +This Cloud Spanner client supports [OpenTelemetry Traces](https://opentelemetry.io/), which gives insight into the client internals and aids in debugging/troubleshooting production issues. + +By default, the functionality is disabled. You shall need to add OpenTelemetry dependencies, and must configure and +enable OpenTelemetry with appropriate exporters at the startup of your application: + +**Table of contents:** + +* [Observability](#observability) + * [Tracing](#tracing) + * [OpenTelemetry Dependencies](#opentelemetry-dependencies) + * [OpenTelemetry Configuration](#opentelemetry-configuration) + * [SQL Statement span annotation](#sql-statement-span-annotation) + * [OpenTelemetry gRCP instrumentation](#opentelemetry-grpc-instrumentation) + * [Tracing Sample](#tracing-sample) + +### Tracing + +#### OpenTelemetry Dependencies + +Add the following dependencies in your `package.json` or install them directly. +```javascript +// Required packages for OpenTelemetry SDKs +"@opentelemetry/sdk-trace-base": "^1.26.0", +"@opentelemetry/sdk-trace-node": "^1.26.0", + +// Package to use Google Cloud Trace exporter +"@google-cloud/opentelemetry-cloud-trace-exporter": "^2.4.1", + +// Packages to enable gRPC instrumentation +"@opentelemetry/instrumentation": "^0.53.0", +"@opentelemetry/instrumentation-grpc": "^0.53.0", +``` + +#### OpenTelemetry Configuration + +```javascript +const { + NodeTracerProvider, + TraceIdRatioBasedSampler, +} = require('@opentelemetry/sdk-trace-node'); +const { + BatchSpanProcessor, +} = require('@opentelemetry/sdk-trace-base'); +const { + TraceExporter, +} = require('@google-cloud/opentelemetry-cloud-trace-exporter'); +const exporter = new TraceExporter(); + +// Create the tracerProvider that the exporter shall be attached to. +const provider = new NodeTracerProvider({resource: resource}); +provider.addSpanProcessor(new BatchSpanProcessor(exporter)); + +// Create the Cloud Spanner Client. +const {Spanner} = require('@google-cloud/spanner'); +const spanner = new Spanner({ + projectId: projectId, + observabilityOptions: { + // Inject the TracerProvider via SpannerOptions or + // register it as a global by invoking `provider.register()` + tracerProvider: provider, + }, +}); +``` + +#### SQL Statement span annotation + +To allow your SQL statements to be annotated in the appropriate spans, you need to opt-in, because +SQL statements can contain sensitive personally-identifiable-information (PII). + +You can opt-in by either: + +* Setting the environment variable `SPANNER_ENABLE_EXTENDED_TRACING=true` before your application is started +* In code, setting `enableExtendedTracing: true` in your SpannerOptions before creating the Cloud Spanner client + +```javascript +const spanner = new Spanner({ + projectId: projectId, + observabilityOptions: { + tracerProvider: provider, + enableExtendedTracing: true, + }, +``` + +#### OpenTelemetry gRPC instrumentation + +Optionally, you can enable OpenTelemetry gRPC instrumentation which produces traces of executed remote procedure calls (RPCs) +in your programs by these imports and instantiation. You could pass in the traceProvider or register it globally +by invoking `tracerProvider.register()` + +```javascript + const {registerInstrumentations} = require('@opentelemetry/instrumentation'); + const {GrpcInstrumentation} = require('@opentelemetry/instrumentation-grpc'); + registerInstrumentations({ + tracerProvider: tracerProvider, + instrumentations: [new GrpcInstrumentation()], + }); +``` + +#### Tracing Sample +For more information please see this [sample code](./samples/observability-traces.js) diff --git a/README.md b/README.md index bae3b2106..ae0b2e06c 100644 --- a/README.md +++ b/README.md @@ -155,6 +155,7 @@ Samples are in the [`samples/`](https://github.com/googleapis/nodejs-spanner/tre | Numeric-add-column | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/numeric-add-column.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/numeric-add-column.js,samples/README.md) | | Numeric-query-parameter | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/numeric-query-parameter.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/numeric-query-parameter.js,samples/README.md) | | Numeric-update-data | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/numeric-update-data.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/numeric-update-data.js,samples/README.md) | +| Observability (Tracing) with OpenTelemetry | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/observability-traces.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/observability-traces.js,samples/README.md) | | Adds a column to an existing table in a Spanner PostgreSQL database. | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/pg-add-column.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/pg-add-column.js,samples/README.md) | | Showcase the rules for case-sensitivity and case folding for a Spanner PostgreSQL database. | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/pg-case-sensitivity.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/pg-case-sensitivity.js,samples/README.md) | | Creates a PostgreSQL Database. | [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/pg-database-create.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/pg-database-create.js,samples/README.md) | diff --git a/samples/README.md b/samples/README.md index 9a2d55d5b..5e6612328 100644 --- a/samples/README.md +++ b/samples/README.md @@ -80,6 +80,7 @@ and automatic, synchronous replication for high availability. * [Numeric-add-column](#numeric-add-column) * [Numeric-query-parameter](#numeric-query-parameter) * [Numeric-update-data](#numeric-update-data) + * [Observability (Tracing) with OpenTelemetry](#observability-tracing-with-opentelemetry) * [Adds a column to an existing table in a Spanner PostgreSQL database.](#adds-a-column-to-an-existing-table-in-a-spanner-postgresql-database.) * [Showcase the rules for case-sensitivity and case folding for a Spanner PostgreSQL database.](#showcase-the-rules-for-case-sensitivity-and-case-folding-for-a-spanner-postgresql-database.) * [Creates a PostgreSQL Database.](#creates-a-postgresql-database.) @@ -1268,6 +1269,23 @@ __Usage:__ +### Observability (Tracing) with OpenTelemetry + +View the [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/observability-traces.js). + +[![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/nodejs-spanner&page=editor&open_in_editor=samples/observability-traces.js,samples/README.md) + +__Usage:__ + + +`node observability-traces.js ` + + +----- + + + + ### Adds a column to an existing table in a Spanner PostgreSQL database. View the [source code](https://github.com/googleapis/nodejs-spanner/blob/main/samples/pg-add-column.js). diff --git a/samples/observability-traces.js b/samples/observability-traces.js new file mode 100644 index 000000000..f8c802d4d --- /dev/null +++ b/samples/observability-traces.js @@ -0,0 +1,112 @@ +/*! + * Copyright 2024 Google LLC. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// sample-metadata: +// title: Observability (Tracing) with OpenTelemetry +// usage: node observability-traces.js + +'use strict'; + +// Setup OpenTelemetry and the trace exporter. +const { + NodeTracerProvider, + TraceIdRatioBasedSampler, +} = require('@opentelemetry/sdk-trace-node'); +const {BatchSpanProcessor} = require('@opentelemetry/sdk-trace-base'); + +// Create the Google Cloud Trace exporter for OpenTelemetry. +const { + TraceExporter, +} = require('@google-cloud/opentelemetry-cloud-trace-exporter'); +const exporter = new TraceExporter(); + +// Create the OpenTelemetry tracerProvider that the exporter shall be attached to. +const provider = new NodeTracerProvider({ + // Modify the following line to adjust the sampling rate. + // It is currently set to 1.0, meaning all requests will be traced. + sampler: new TraceIdRatioBasedSampler(1.0), +}); +provider.addSpanProcessor(new BatchSpanProcessor(exporter)); + +// Uncomment following line to register global tracerProvider instead +// of passing it into SpannerOptions.observabilityOptions. +// provider.register(); + +// Set `enableGrpcInstrumentation` to `true` to enable gRPC instrumentation. +const enableGrpcInstrumentation = false; +if (enableGrpcInstrumentation) { + const {registerInstrumentations} = require('@opentelemetry/instrumentation'); + const {GrpcInstrumentation} = require('@opentelemetry/instrumentation-grpc'); + registerInstrumentations({ + tracerProvider: provider, + instrumentations: [new GrpcInstrumentation()], + }); +} + +async function main( + projectId = 'my-project-id', + instanceId = 'my-instance-id', + databaseId = 'my-project-id' +) { + // Create the Cloud Spanner Client. + const {Spanner} = require('@google-cloud/spanner'); + + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + // const projectId = 'my-project-id'; + // const instanceId = 'my-instance-id'; + // const databaseId = 'my-database-id'; + + const spanner = new Spanner({ + projectId: projectId, + observabilityOptions: { + tracerProvider: provider, + enableExtendedTracing: true, + }, + }); + + // Acquire the database handle. + const instance = spanner.instance(instanceId); + const database = instance.database(databaseId); + + try { + const query = { + sql: 'SELECT 1', + }; + const [rows] = await database.run(query); + console.log(`Query: ${rows.length} found.`); + rows.forEach(row => console.log(row)); + } finally { + spanner.close(); + } + + provider.forceFlush(); + + // This sleep gives ample time for the trace + // spans to be exported to Google Cloud Trace. + await new Promise(resolve => { + setTimeout(() => { + resolve(); + }, 8800); + }); +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/samples/package.json b/samples/package.json index 8a2b46f27..0585eed30 100644 --- a/samples/package.json +++ b/samples/package.json @@ -18,10 +18,15 @@ "@google-cloud/kms": "^4.0.0", "@google-cloud/precise-date": "^4.0.0", "@google-cloud/spanner": "^7.14.0", - "yargs": "^17.0.0", - "protobufjs": "^7.0.0" + "protobufjs": "^7.0.0", + "yargs": "^17.0.0" }, "devDependencies": { + "@google-cloud/opentelemetry-cloud-trace-exporter": "^2.4.1", + "@opentelemetry/instrumentation": "^0.53.0", + "@opentelemetry/instrumentation-grpc": "^0.53.0", + "@opentelemetry/sdk-trace-base": "^1.26.0", + "@opentelemetry/sdk-trace-node": "^1.26.0", "chai": "^4.2.0", "mocha": "^9.0.0", "p-limit": "^3.0.1" diff --git a/samples/system-test/spanner.test.js b/samples/system-test/spanner.test.js index 227f96dee..0abf87a3a 100644 --- a/samples/system-test/spanner.test.js +++ b/samples/system-test/spanner.test.js @@ -50,6 +50,7 @@ const alterTableWithForeignKeyDeleteCascadeCommand = 'node table-alter-with-foreign-key-delete-cascade.js'; const dropForeignKeyConstraintDeleteCascaseCommand = 'node table-drop-foreign-key-constraint-delete-cascade.js'; +const traceObservabilityCommand = 'node observability-traces.js'; const CURRENT_TIME = Math.round(Date.now() / 1000).toString(); const PROJECT_ID = process.env.GCLOUD_PROJECT; @@ -1574,6 +1575,15 @@ describe('Autogenerated Admin Clients', () => { ); }); + describe('observability', () => { + it('traces', () => { + const output = execSync( + `${traceObservabilityCommand} ${PROJECT_ID} ${INSTANCE_ID} ${DATABASE_ID}` + ); + assert.match(output, /Query: \d+ found./); + }); + }); + describe('leader options', () => { before(async () => { const instance = spanner.instance(SAMPLE_INSTANCE_ID); From be063b098c889bc627412639b412a69801a54171 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 30 Oct 2024 06:38:25 +0000 Subject: [PATCH 3/4] chore(main): release 7.15.0 (#2118) :robot: I have created a release *beep* *boop* --- ## [7.15.0](https://togithub.com/googleapis/nodejs-spanner/compare/v7.14.0...v7.15.0) (2024-10-30) ### Features * (observability, samples): add tracing end-to-end sample ([#2130](https://togithub.com/googleapis/nodejs-spanner/issues/2130)) ([66d99e8](https://togithub.com/googleapis/nodejs-spanner/commit/66d99e836cd2bfbb3b0f78980ec2b499f9e5e563)) * (observability) add spans for BatchTransaction and Table ([#2115](https://togithub.com/googleapis/nodejs-spanner/issues/2115)) ([d51aae9](https://togithub.com/googleapis/nodejs-spanner/commit/d51aae9c9c3c0e6319d81c2809573ae54675acf3)), closes [#2114](https://togithub.com/googleapis/nodejs-spanner/issues/2114) * (observability) Add support for OpenTelemetry traces and allow observability options to be passed. ([#2131](https://togithub.com/googleapis/nodejs-spanner/issues/2131)) ([5237e11](https://togithub.com/googleapis/nodejs-spanner/commit/5237e118befb4b7fe4aea76a80a91e822d7a22e4)), closes [#2079](https://togithub.com/googleapis/nodejs-spanner/issues/2079) * (observability) propagate database name for every span generated to aid in quick debugging ([#2155](https://togithub.com/googleapis/nodejs-spanner/issues/2155)) ([0342e74](https://togithub.com/googleapis/nodejs-spanner/commit/0342e74721a0684d8195a6299c3a634eefc2b522)) * (observability) trace Database.batchCreateSessions + SessionPool.createSessions ([#2145](https://togithub.com/googleapis/nodejs-spanner/issues/2145)) ([f489c94](https://togithub.com/googleapis/nodejs-spanner/commit/f489c9479fa5402f0c960cf896fd3be0e946f182)) * (observability): trace Database.runPartitionedUpdate ([#2176](https://togithub.com/googleapis/nodejs-spanner/issues/2176)) ([701e226](https://togithub.com/googleapis/nodejs-spanner/commit/701e22660d5ac9f0b3e940ad656b9ca6c479251d)), closes [#2079](https://togithub.com/googleapis/nodejs-spanner/issues/2079) * (observability): trace Database.runTransactionAsync ([#2167](https://togithub.com/googleapis/nodejs-spanner/issues/2167)) ([d0fe178](https://togithub.com/googleapis/nodejs-spanner/commit/d0fe178623c1c48245d11bcea97fcd340b6615af)), closes [#207](https://togithub.com/googleapis/nodejs-spanner/issues/207) * Allow multiple KMS keys to create CMEK database/backup ([#2099](https://togithub.com/googleapis/nodejs-spanner/issues/2099)) ([51bc8a7](https://togithub.com/googleapis/nodejs-spanner/commit/51bc8a7445ab8b3d2239493b69d9c271c1086dde)) * **observability:** Fix bugs found from product review + negative cases ([#2158](https://togithub.com/googleapis/nodejs-spanner/issues/2158)) ([cbc86fa](https://togithub.com/googleapis/nodejs-spanner/commit/cbc86fa80498af6bd745eebb9443612936e26d4e)) * **observability:** Trace Database methods ([#2119](https://togithub.com/googleapis/nodejs-spanner/issues/2119)) ([1f06871](https://togithub.com/googleapis/nodejs-spanner/commit/1f06871f7aca386756e8691013602b069697bb87)), closes [#2114](https://togithub.com/googleapis/nodejs-spanner/issues/2114) * **observability:** Trace Database.batchWriteAtLeastOnce ([#2157](https://togithub.com/googleapis/nodejs-spanner/issues/2157)) ([2a19ef1](https://togithub.com/googleapis/nodejs-spanner/commit/2a19ef1af4f6fd1b81d08afc15db76007859a0b9)), closes [#2079](https://togithub.com/googleapis/nodejs-spanner/issues/2079) * **observability:** Trace Transaction ([#2122](https://togithub.com/googleapis/nodejs-spanner/issues/2122)) ([a464bdb](https://togithub.com/googleapis/nodejs-spanner/commit/a464bdb5cbb7856b7a08dac3ff48132948b65792)), closes [#2114](https://togithub.com/googleapis/nodejs-spanner/issues/2114) ### Bug Fixes * Exact staleness timebound ([#2143](https://togithub.com/googleapis/nodejs-spanner/issues/2143)) ([f01516e](https://togithub.com/googleapis/nodejs-spanner/commit/f01516ec6ba44730622cfb050c52cd93f30bba7a)), closes [#2129](https://togithub.com/googleapis/nodejs-spanner/issues/2129) * GetMetadata for Session ([#2124](https://togithub.com/googleapis/nodejs-spanner/issues/2124)) ([2fd63ac](https://togithub.com/googleapis/nodejs-spanner/commit/2fd63acb87ce06a02d7fdfa78d836dbd7ad59a26)), closes [#2123](https://togithub.com/googleapis/nodejs-spanner/issues/2123) --- This PR was generated with [Release Please](https://togithub.com/googleapis/release-please). See [documentation](https://togithub.com/googleapis/release-please#release-please). --- CHANGELOG.md | 24 ++++++++++++++++++++++++ package.json | 2 +- samples/package.json | 2 +- 3 files changed, 26 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3c49869a2..9d8948e98 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,30 @@ [1]: https://www.npmjs.com/package/nodejs-spanner?activeTab=versions +## [7.15.0](https://github.com/googleapis/nodejs-spanner/compare/v7.14.0...v7.15.0) (2024-10-30) + + +### Features + +* (observability, samples): add tracing end-to-end sample ([#2130](https://github.com/googleapis/nodejs-spanner/issues/2130)) ([66d99e8](https://github.com/googleapis/nodejs-spanner/commit/66d99e836cd2bfbb3b0f78980ec2b499f9e5e563)) +* (observability) add spans for BatchTransaction and Table ([#2115](https://github.com/googleapis/nodejs-spanner/issues/2115)) ([d51aae9](https://github.com/googleapis/nodejs-spanner/commit/d51aae9c9c3c0e6319d81c2809573ae54675acf3)), closes [#2114](https://github.com/googleapis/nodejs-spanner/issues/2114) +* (observability) Add support for OpenTelemetry traces and allow observability options to be passed. ([#2131](https://github.com/googleapis/nodejs-spanner/issues/2131)) ([5237e11](https://github.com/googleapis/nodejs-spanner/commit/5237e118befb4b7fe4aea76a80a91e822d7a22e4)), closes [#2079](https://github.com/googleapis/nodejs-spanner/issues/2079) +* (observability) propagate database name for every span generated to aid in quick debugging ([#2155](https://github.com/googleapis/nodejs-spanner/issues/2155)) ([0342e74](https://github.com/googleapis/nodejs-spanner/commit/0342e74721a0684d8195a6299c3a634eefc2b522)) +* (observability) trace Database.batchCreateSessions + SessionPool.createSessions ([#2145](https://github.com/googleapis/nodejs-spanner/issues/2145)) ([f489c94](https://github.com/googleapis/nodejs-spanner/commit/f489c9479fa5402f0c960cf896fd3be0e946f182)) +* (observability): trace Database.runPartitionedUpdate ([#2176](https://github.com/googleapis/nodejs-spanner/issues/2176)) ([701e226](https://github.com/googleapis/nodejs-spanner/commit/701e22660d5ac9f0b3e940ad656b9ca6c479251d)), closes [#2079](https://github.com/googleapis/nodejs-spanner/issues/2079) +* (observability): trace Database.runTransactionAsync ([#2167](https://github.com/googleapis/nodejs-spanner/issues/2167)) ([d0fe178](https://github.com/googleapis/nodejs-spanner/commit/d0fe178623c1c48245d11bcea97fcd340b6615af)), closes [#207](https://github.com/googleapis/nodejs-spanner/issues/207) +* Allow multiple KMS keys to create CMEK database/backup ([#2099](https://github.com/googleapis/nodejs-spanner/issues/2099)) ([51bc8a7](https://github.com/googleapis/nodejs-spanner/commit/51bc8a7445ab8b3d2239493b69d9c271c1086dde)) +* **observability:** Fix bugs found from product review + negative cases ([#2158](https://github.com/googleapis/nodejs-spanner/issues/2158)) ([cbc86fa](https://github.com/googleapis/nodejs-spanner/commit/cbc86fa80498af6bd745eebb9443612936e26d4e)) +* **observability:** Trace Database methods ([#2119](https://github.com/googleapis/nodejs-spanner/issues/2119)) ([1f06871](https://github.com/googleapis/nodejs-spanner/commit/1f06871f7aca386756e8691013602b069697bb87)), closes [#2114](https://github.com/googleapis/nodejs-spanner/issues/2114) +* **observability:** Trace Database.batchWriteAtLeastOnce ([#2157](https://github.com/googleapis/nodejs-spanner/issues/2157)) ([2a19ef1](https://github.com/googleapis/nodejs-spanner/commit/2a19ef1af4f6fd1b81d08afc15db76007859a0b9)), closes [#2079](https://github.com/googleapis/nodejs-spanner/issues/2079) +* **observability:** Trace Transaction ([#2122](https://github.com/googleapis/nodejs-spanner/issues/2122)) ([a464bdb](https://github.com/googleapis/nodejs-spanner/commit/a464bdb5cbb7856b7a08dac3ff48132948b65792)), closes [#2114](https://github.com/googleapis/nodejs-spanner/issues/2114) + + +### Bug Fixes + +* Exact staleness timebound ([#2143](https://github.com/googleapis/nodejs-spanner/issues/2143)) ([f01516e](https://github.com/googleapis/nodejs-spanner/commit/f01516ec6ba44730622cfb050c52cd93f30bba7a)), closes [#2129](https://github.com/googleapis/nodejs-spanner/issues/2129) +* GetMetadata for Session ([#2124](https://github.com/googleapis/nodejs-spanner/issues/2124)) ([2fd63ac](https://github.com/googleapis/nodejs-spanner/commit/2fd63acb87ce06a02d7fdfa78d836dbd7ad59a26)), closes [#2123](https://github.com/googleapis/nodejs-spanner/issues/2123) + ## [7.14.0](https://github.com/googleapis/nodejs-spanner/compare/v7.13.0...v7.14.0) (2024-08-14) diff --git a/package.json b/package.json index 27ed2f743..03923aee0 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "@google-cloud/spanner", "description": "Cloud Spanner Client Library for Node.js", - "version": "7.14.0", + "version": "7.15.0", "license": "Apache-2.0", "author": "Google Inc.", "engines": { diff --git a/samples/package.json b/samples/package.json index 0585eed30..2d404e4b2 100644 --- a/samples/package.json +++ b/samples/package.json @@ -17,7 +17,7 @@ "dependencies": { "@google-cloud/kms": "^4.0.0", "@google-cloud/precise-date": "^4.0.0", - "@google-cloud/spanner": "^7.14.0", + "@google-cloud/spanner": "^7.15.0", "protobufjs": "^7.0.0", "yargs": "^17.0.0" }, From f08cf4b5d4a85b11e0e6e767b7897b000b43b78b Mon Sep 17 00:00:00 2001 From: surbhigarg92 Date: Wed, 30 Oct 2024 13:42:23 +0530 Subject: [PATCH 4/4] chore: refactor observability tests (#2177) * chore: integration test fix * traces tests refactoring * feat: (observability): trace Database.runPartitionedUpdate (#2176) This change traces Database.runPartitionedUpdate along with the appropriate tests for it with and without errors. Updates #2079 * moving additional attributes to separate PR --------- Co-authored-by: Emmanuel T Odeke Co-authored-by: alkatrivedi <58396306+alkatrivedi@users.noreply.github.com> --- observability-test/batch-transaction.ts | 174 ++++--- observability-test/database.ts | 31 +- observability-test/helper.ts | 46 ++ observability-test/observability.ts | 6 +- observability-test/session-pool.ts | 14 +- observability-test/spanner.ts | 636 +++++------------------- observability-test/transaction.ts | 17 +- src/instrument.ts | 1 - src/table.ts | 1 - test/database.ts | 4 +- test/partial-result-stream.ts | 2 +- test/spanner.ts | 2 +- 12 files changed, 263 insertions(+), 671 deletions(-) diff --git a/observability-test/batch-transaction.ts b/observability-test/batch-transaction.ts index 763fb7e36..89cd49b17 100644 --- a/observability-test/batch-transaction.ts +++ b/observability-test/batch-transaction.ts @@ -153,68 +153,63 @@ describe('BatchTransaction', () => { }; it('createQueryPartitions', done => { - const REQUEST = sandbox.stub(); - - const res = batchTransaction.createQueryPartitions( - QUERY, - (err, part, resp) => { - assert.ifError(err); - traceExporter.forceFlush(); - const spans = traceExporter.getFinishedSpans(); - assert.strictEqual(spans.length, 2, 'Exactly 2 spans expected'); - - // Sort the spans by duration. - spans.sort((spanA, spanB) => { - spanA.duration < spanB.duration; - }); - - const actualSpanNames: string[] = []; - spans.forEach(span => { - actualSpanNames.push(span.name); - }); - - const expectedSpanNames = [ - 'CloudSpanner.BatchTransaction.createPartitions_', - 'CloudSpanner.BatchTransaction.createQueryPartitions', - ]; - assert.deepStrictEqual( - actualSpanNames, - expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` - ); - - // Ensure that createPartitions_ is a child span of createQueryPartitions. - const spanCreatePartitions_ = spans[0]; - const spanCreateQueryPartitions = spans[1]; - assert.ok( - spanCreateQueryPartitions.spanContext().traceId, - 'Expected that createQueryPartitions has a defined traceId' - ); - assert.ok( - spanCreatePartitions_.spanContext().traceId, - 'Expected that createPartitions_ has a defined traceId' - ); - assert.deepStrictEqual( - spanCreatePartitions_.spanContext().traceId, - spanCreateQueryPartitions.spanContext().traceId, - 'Expected that both spans share a traceId' - ); - assert.ok( - spanCreateQueryPartitions.spanContext().spanId, - 'Expected that createQueryPartitions has a defined spanId' - ); - assert.ok( - spanCreatePartitions_.spanContext().spanId, - 'Expected that createPartitions_ has a defined spanId' - ); - assert.deepStrictEqual( - spanCreatePartitions_.parentSpanId, - spanCreateQueryPartitions.spanContext().spanId, - 'Expected that createQueryPartitions is the parent to createPartitions_' - ); - done(); - } - ); + batchTransaction.createQueryPartitions(QUERY, err => { + assert.ifError(err); + traceExporter.forceFlush(); + const spans = traceExporter.getFinishedSpans(); + assert.strictEqual(spans.length, 2, 'Exactly 2 spans expected'); + + // Sort the spans by duration. + spans.sort((spanA, spanB) => { + spanA.duration < spanB.duration; + }); + + const actualSpanNames: string[] = []; + spans.forEach(span => { + actualSpanNames.push(span.name); + }); + + const expectedSpanNames = [ + 'CloudSpanner.BatchTransaction.createPartitions_', + 'CloudSpanner.BatchTransaction.createQueryPartitions', + ]; + assert.deepStrictEqual( + actualSpanNames, + expectedSpanNames, + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + ); + + // Ensure that createPartitions_ is a child span of createQueryPartitions. + const spanCreatePartitions_ = spans[0]; + const spanCreateQueryPartitions = spans[1]; + assert.ok( + spanCreateQueryPartitions.spanContext().traceId, + 'Expected that createQueryPartitions has a defined traceId' + ); + assert.ok( + spanCreatePartitions_.spanContext().traceId, + 'Expected that createPartitions_ has a defined traceId' + ); + assert.deepStrictEqual( + spanCreatePartitions_.spanContext().traceId, + spanCreateQueryPartitions.spanContext().traceId, + 'Expected that both spans share a traceId' + ); + assert.ok( + spanCreateQueryPartitions.spanContext().spanId, + 'Expected that createQueryPartitions has a defined spanId' + ); + assert.ok( + spanCreatePartitions_.spanContext().spanId, + 'Expected that createPartitions_ has a defined spanId' + ); + assert.deepStrictEqual( + spanCreatePartitions_.parentSpanId, + spanCreateQueryPartitions.spanContext().spanId, + 'Expected that createQueryPartitions is the parent to createPartitions_' + ); + done(); + }); }); it('createReadPartitions', done => { @@ -222,34 +217,31 @@ describe('BatchTransaction', () => { const response = {}; REQUEST.callsFake((_, callback) => callback(null, response)); - const res = batchTransaction.createReadPartitions( - QUERY, - (err, part, resp) => { - assert.ifError(err); - traceExporter.forceFlush(); - const spans = traceExporter.getFinishedSpans(); - assert.strictEqual(spans.length, 2, 'Exactly 2 spans expected'); - - // Sort the spans by duration. - spans.sort((spanA, spanB) => { - spanA.duration < spanB.duration; - }); - - const actualSpanNames: string[] = []; - spans.forEach(span => { - actualSpanNames.push(span.name); - }); - const expectedSpanNames = [ - 'CloudSpanner.BatchTransaction.createPartitions_', - 'CloudSpanner.BatchTransaction.createReadPartitions', - ]; - assert.deepStrictEqual( - actualSpanNames, - expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` - ); - done(); - } - ); + batchTransaction.createReadPartitions(QUERY, err => { + assert.ifError(err); + traceExporter.forceFlush(); + const spans = traceExporter.getFinishedSpans(); + assert.strictEqual(spans.length, 2, 'Exactly 2 spans expected'); + + // Sort the spans by duration. + spans.sort((spanA, spanB) => { + spanA.duration < spanB.duration; + }); + + const actualSpanNames: string[] = []; + spans.forEach(span => { + actualSpanNames.push(span.name); + }); + const expectedSpanNames = [ + 'CloudSpanner.BatchTransaction.createPartitions_', + 'CloudSpanner.BatchTransaction.createReadPartitions', + ]; + assert.deepStrictEqual( + actualSpanNames, + expectedSpanNames, + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` + ); + done(); + }); }); }); diff --git a/observability-test/database.ts b/observability-test/database.ts index 3b6811e8c..39ebe9afc 100644 --- a/observability-test/database.ts +++ b/observability-test/database.ts @@ -507,7 +507,6 @@ describe('Database', () => { let beginSnapshotStub: sinon.SinonStub; let getSessionStub: sinon.SinonStub; - let snapshotStub: sinon.SinonStub; beforeEach(() => { fakePool = database.pool_; @@ -524,9 +523,7 @@ describe('Database', () => { sandbox.stub(fakePool, 'getSession') as sinon.SinonStub ).callsFake(callback => callback(null, fakeSession)); - snapshotStub = sandbox - .stub(fakeSession, 'snapshot') - .returns(fakeSnapshot); + sandbox.stub(fakeSession, 'snapshot').returns(fakeSnapshot); }); it('with error', done => { @@ -1175,7 +1172,7 @@ describe('Database', () => { it('with error on null mutation should catch thrown error', done => { try { - database.writeAtLeastOnce(null, (err, res) => {}); + database.writeAtLeastOnce(null, () => {}); } catch (err) { // Performing a substring search on the error because // depending on the version of Node.js, the error might be either of: @@ -1250,7 +1247,6 @@ describe('Database', () => { let fakeSession: FakeSession; let fakeDataStream: Transform; let getSessionStub: sinon.SinonStub; - let requestStreamStub: sinon.SinonStub; const options = { requestOptions: { @@ -1269,9 +1265,7 @@ describe('Database', () => { sandbox.stub(fakePool, 'getSession') as sinon.SinonStub ).callsFake(callback => callback(null, fakeSession)); - requestStreamStub = sandbox - .stub(database, 'requestStream') - .returns(fakeDataStream); + sandbox.stub(database, 'requestStream').returns(fakeDataStream); }); it('on retry with "Session not found" error', done => { @@ -1320,7 +1314,6 @@ describe('Database', () => { 'Expected an ERROR span status' ); - const errorMessage = firstSpan.status.message; assert.deepStrictEqual( firstSpan.status.message, sessionNotFoundError.message @@ -1658,7 +1651,7 @@ describe('Database', () => { .throws(ourException); assert.rejects(async () => { - const value = await database.runTransactionAsync(async txn => { + await database.runTransactionAsync(async txn => { const result = await txn.run('SELECT 1'); await txn.commit(); return result; @@ -1724,8 +1717,6 @@ describe('Database', () => { let fakeStream2: Transform; let getSessionStub: sinon.SinonStub; - let snapshotStub: sinon.SinonStub; - let runStreamStub: sinon.SinonStub; beforeEach(() => { fakePool = database.pool_; @@ -1746,15 +1737,11 @@ describe('Database', () => { .onSecondCall() .callsFake(callback => callback(null, fakeSession2)); - snapshotStub = sandbox - .stub(fakeSession, 'snapshot') - .returns(fakeSnapshot); + sandbox.stub(fakeSession, 'snapshot').returns(fakeSnapshot); sandbox.stub(fakeSession2, 'snapshot').returns(fakeSnapshot2); - runStreamStub = sandbox - .stub(fakeSnapshot, 'runStream') - .returns(fakeStream); + sandbox.stub(fakeSnapshot, 'runStream').returns(fakeStream); sandbox.stub(fakeSnapshot2, 'runStream').returns(fakeStream2); }); @@ -1975,7 +1962,6 @@ describe('Database', () => { let getSessionStub; let beginStub; - let runUpdateStub; beforeEach(() => { fakePool = database.pool_; @@ -1996,7 +1982,7 @@ describe('Database', () => { sandbox.stub(fakePartitionedDml, 'begin') as sinon.SinonStub ).callsFake(callback => callback(null)); - runUpdateStub = ( + ( sandbox.stub(fakePartitionedDml, 'runUpdate') as sinon.SinonStub ).callsFake((_, callback) => callback(null)); }); @@ -2031,7 +2017,6 @@ describe('Database', () => { it('with pool errors', done => { const fakeError = new Error('err'); - const fakeCallback = sandbox.spy(); getSessionStub.callsFake(callback => callback(fakeError)); database.runPartitionedUpdate(QUERY, async (err, rowCount) => { @@ -2132,7 +2117,7 @@ describe('Database', () => { sandbox.stub(fakePool, 'release') as sinon.SinonStub ).withArgs(fakeSession); - database.runPartitionedUpdate(QUERY, async (err, rowCount) => { + database.runPartitionedUpdate(QUERY, async () => { const exportResults = await getTraceExportResults(); const actualSpanNames = exportResults.spanNames; const spans = exportResults.spans; diff --git a/observability-test/helper.ts b/observability-test/helper.ts index b6d429d32..591171666 100644 --- a/observability-test/helper.ts +++ b/observability-test/helper.ts @@ -19,6 +19,25 @@ import * as assert from 'assert'; const {ReadableSpan} = require('@opentelemetry/sdk-trace-base'); import {SEMATTRS_DB_NAME} from '@opentelemetry/semantic-conventions'; +export const batchCreateSessionsEvents = [ + 'Requesting 25 sessions', + 'Creating 25 sessions', + 'Requested for 25 sessions returned 25', +]; + +export const waitingSessionsEvents = [ + 'Acquiring session', + 'Waiting for a session to become available', + 'Acquired session', + 'Using Session', +]; + +export const cacheSessionEvents = [ + 'Acquiring session', + 'Cache hit: has usable session', + 'Acquired session', +]; + /** * This utility exists as a test helper because mocha has builtin "context" * and referring to context causes type/value collision errors. @@ -47,3 +66,30 @@ export function generateWithAllSpansHaveDBName(dbName: String): Function { }); }; } + +export async function verifySpansAndEvents( + traceExporter, + expectedSpans, + expectedEvents +) { + await traceExporter.forceFlush(); + const spans = traceExporter.getFinishedSpans(); + const actualEventNames: string[] = []; + const actualSpanNames: string[] = []; + spans.forEach(span => { + actualSpanNames.push(span.name); + span.events.forEach(event => { + actualEventNames.push(event.name); + }); + }); + assert.deepStrictEqual( + actualSpanNames, + expectedSpans, + `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpans}` + ); + assert.deepStrictEqual( + actualEventNames, + expectedEvents, + `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEvents}` + ); +} diff --git a/observability-test/observability.ts b/observability-test/observability.ts index 576005687..8270cd7e9 100644 --- a/observability-test/observability.ts +++ b/observability-test/observability.ts @@ -99,7 +99,7 @@ describe('startTrace', () => { 'aSpan', {opts: {tracerProvider: overridingProvider}}, async span => { - await new Promise((resolve, reject) => setTimeout(resolve, 400)); + await new Promise(resolve => setTimeout(resolve, 400)); span.end(); const gotSpansFromGlobal = globalExporter.getFinishedSpans(); @@ -250,7 +250,7 @@ describe('startTrace', () => { 'aSpan', {opts: {tracerProvider: overridingProvider}}, async span => { - await new Promise((resolve, reject) => setTimeout(resolve, 400)); + await new Promise(resolve => setTimeout(resolve, 400)); span.end(); const gotSpansFromGlobal = globalExporter.getFinishedSpans(); @@ -382,7 +382,6 @@ describe('setError', () => { it('a non-empty string should set the message', () => { startTrace('aSpan', {opts: {tracerProvider: provider}}, span => { - const status1 = span.status; const res = setSpanError(span, 'this one'); assert.strictEqual(res, true, 'value was set'); span.end(); @@ -438,7 +437,6 @@ describe('setErrorAndException', () => { it('a non-empty string should set the message', () => { startTrace('aSpan', {opts: {tracerProvider: provider}}, span => { - const status1 = span.status; const res = setSpanErrorAndException(span, 'this one'); assert.strictEqual(res, true, 'value was set'); span.end(); diff --git a/observability-test/session-pool.ts b/observability-test/session-pool.ts index e92b42b0a..f60553dc0 100644 --- a/observability-test/session-pool.ts +++ b/observability-test/session-pool.ts @@ -65,7 +65,7 @@ describe('SessionPool', () => { } as unknown as Database; const sandbox = sinon.createSandbox(); - const shouldNotBeCalled = sandbox.stub().throws('Should not be called.'); + sandbox.stub().throws('Should not be called.'); const createSession = (name = 'id', props?): Session => { props = props || {}; @@ -112,10 +112,8 @@ describe('SessionPool', () => { const OPTIONS = 3; it('on exception from Database.batchCreateSessions', async () => { const ourException = new Error('this fails intentionally'); - const stub = sandbox - .stub(DATABASE, 'batchCreateSessions') - .throws(ourException); - const releaseStub = sandbox.stub(sessionPool, 'release'); + sandbox.stub(DATABASE, 'batchCreateSessions').throws(ourException); + sandbox.stub(sessionPool, 'release'); assert.rejects(async () => { await sessionPool._createSessions(OPTIONS); @@ -168,10 +166,8 @@ describe('SessionPool', () => { it('without error', async () => { const RESPONSE = [[{}, {}, {}]]; - const stub = sandbox - .stub(DATABASE, 'batchCreateSessions') - .resolves(RESPONSE); - const releaseStub = sandbox.stub(sessionPool, 'release'); + sandbox.stub(DATABASE, 'batchCreateSessions').resolves(RESPONSE); + sandbox.stub(sessionPool, 'release'); await sessionPool._createSessions(OPTIONS); assert.strictEqual(sessionPool.size, 3); diff --git a/observability-test/spanner.ts b/observability-test/spanner.ts index c9ce60df2..c60549776 100644 --- a/observability-test/spanner.ts +++ b/observability-test/spanner.ts @@ -21,13 +21,11 @@ import {Database, Instance, Spanner} from '../src'; import {MutationSet} from '../src/transaction'; import protobuf = google.spanner.v1; import v1 = google.spanner.v1; -import PartialResultSet = google.spanner.v1.PartialResultSet; import * as mock from '../test/mockserver/mockspanner'; import * as mockInstanceAdmin from '../test/mockserver/mockinstanceadmin'; import * as mockDatabaseAdmin from '../test/mockserver/mockdatabaseadmin'; import * as sinon from 'sinon'; import {Row} from '../src/partial-result-stream'; -import {Json} from '../src/codec'; const { AlwaysOnSampler, NodeTracerProvider, @@ -40,14 +38,16 @@ const { disableContextAndManager, generateWithAllSpansHaveDBName, setGlobalContextManager, + verifySpansAndEvents, + batchCreateSessionsEvents, + waitingSessionsEvents, + cacheSessionEvents, } = require('./helper'); const { AsyncHooksContextManager, } = require('@opentelemetry/context-async-hooks'); const {ObservabilityOptions} = require('../src/instrument'); -import {SessionPool} from '../src/session-pool'; - const selectSql = 'SELECT 1'; const updateSql = 'UPDATE FOO SET BAR=1 WHERE BAZ=2'; @@ -165,7 +165,7 @@ describe('EndToEnd', async () => { // To deflake expectations of session creation, let's // issue out a warm-up request request that'll ensure // that the SessionPool is created deterministically. - const [rows] = await database.run('SELECT 1'); + await database.run('SELECT 1'); // Clear out any present traces to make a clean slate for testing. traceExporter.forceFlush(); traceExporter.reset(); @@ -173,187 +173,83 @@ describe('EndToEnd', async () => { describe('Database', () => { it('getSessions', async () => { - const [rows] = await database.getSessions(); - - const withAllSpansHaveDBName = generateWithAllSpansHaveDBName( - database.formattedName_ - ); - traceExporter.forceFlush(); - const spans = traceExporter.getFinishedSpans(); - assert.strictEqual(spans.length, 1, 'Exactly 1 span expected'); - withAllSpansHaveDBName(spans); - - const actualSpanNames: string[] = []; - const actualEventNames: string[] = []; - spans.forEach(span => { - actualSpanNames.push(span.name); - span.events.forEach(event => { - actualEventNames.push(event.name); - }); - }); - + await database.getSessions(); const expectedSpanNames = ['CloudSpanner.Database.getSessions']; - assert.deepStrictEqual( - actualSpanNames, - expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` - ); - const expectedEventNames = []; - assert.deepStrictEqual( - actualEventNames, - expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + + await verifySpansAndEvents( + traceExporter, + expectedSpanNames, + expectedEventNames ); }); it('getSnapshot', done => { - const withAllSpansHaveDBName = generateWithAllSpansHaveDBName( - database.formattedName_ - ); - database.getSnapshot((err, transaction) => { assert.ifError(err); - transaction!.run('SELECT 1', async (err, rows) => { + transaction!.run('SELECT 1', async err => { assert.ifError(err); transaction!.end(); - - await tracerProvider.forceFlush(); - traceExporter.forceFlush(); - const spans = traceExporter.getFinishedSpans(); - withAllSpansHaveDBName(spans); - - const actualSpanNames: string[] = []; - const actualEventNames: string[] = []; - spans.forEach(span => { - actualSpanNames.push(span.name); - span.events.forEach(event => { - actualEventNames.push(event.name); - }); - }); - const expectedSpanNames = [ 'CloudSpanner.Snapshot.begin', 'CloudSpanner.Database.getSnapshot', 'CloudSpanner.Snapshot.runStream', 'CloudSpanner.Snapshot.run', ]; - assert.deepStrictEqual( - actualSpanNames, - expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` - ); - const expectedEventNames = [ 'Begin Transaction', 'Transaction Creation Done', - 'Acquiring session', - 'Cache hit: has usable session', - 'Acquired session', + ...cacheSessionEvents, 'Starting stream', ]; - assert.deepStrictEqual( - actualEventNames, - expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + await verifySpansAndEvents( + traceExporter, + expectedSpanNames, + expectedEventNames ); - done(); }); }); }); it('getTransaction', done => { - const withAllSpansHaveDBName = generateWithAllSpansHaveDBName( - database.formattedName_ - ); database.getTransaction(async (err, transaction) => { assert.ifError(err); assert.ok(transaction); transaction!.end(); transaction!.commit(); - await tracerProvider.forceFlush(); - traceExporter.forceFlush(); - const spans = traceExporter.getFinishedSpans(); - withAllSpansHaveDBName(spans); - - const actualEventNames: string[] = []; - const actualSpanNames: string[] = []; - spans.forEach(span => { - actualSpanNames.push(span.name); - span.events.forEach(event => { - actualEventNames.push(event.name); - }); - }); - const expectedSpanNames = ['CloudSpanner.Database.getTransaction']; - assert.deepStrictEqual( - actualSpanNames, + const expectedEventNames = [...cacheSessionEvents, 'Using Session']; + await verifySpansAndEvents( + traceExporter, expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` - ); - - const expectedEventNames = [ - 'Acquiring session', - 'Cache hit: has usable session', - 'Acquired session', - 'Using Session', - ]; - assert.deepStrictEqual( - actualEventNames, - expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + expectedEventNames ); - done(); }); }); it('runStream', done => { - const withAllSpansHaveDBName = generateWithAllSpansHaveDBName( - database.formattedName_ - ); database .runStream('SELECT 1') - .on('data', row => {}) + .on('data', () => {}) .once('error', assert.ifError) - .on('end', () => { - traceExporter.forceFlush(); - const spans = traceExporter.getFinishedSpans(); - withAllSpansHaveDBName(spans); - - const actualSpanNames: string[] = []; - const actualEventNames: string[] = []; - spans.forEach(span => { - actualSpanNames.push(span.name); - span.events.forEach(event => { - actualEventNames.push(event.name); - }); - }); - + .on('end', async () => { const expectedSpanNames = [ 'CloudSpanner.Snapshot.runStream', 'CloudSpanner.Database.runStream', ]; - assert.deepStrictEqual( - actualSpanNames, - expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` - ); - const expectedEventNames = [ 'Starting stream', - 'Acquiring session', - 'Cache hit: has usable session', - 'Acquired session', + ...cacheSessionEvents, 'Using Session', ]; - assert.deepStrictEqual( - actualEventNames, - expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + await verifySpansAndEvents( + traceExporter, + expectedSpanNames, + expectedEventNames ); done(); @@ -361,384 +257,144 @@ describe('EndToEnd', async () => { }); it('run', async () => { - const withAllSpansHaveDBName = generateWithAllSpansHaveDBName( - database.formattedName_ - ); - const [rows] = await database.run('SELECT 1'); - - traceExporter.forceFlush(); - const spans = traceExporter.getFinishedSpans(); - withAllSpansHaveDBName(spans); - - // Sort the spans by duration. - spans.sort((spanA, spanB) => { - spanA.duration < spanB.duration; - }); - - const actualEventNames: string[] = []; - const actualSpanNames: string[] = []; - spans.forEach(span => { - actualSpanNames.push(span.name); - span.events.forEach(event => { - actualEventNames.push(event.name); - }); - }); - + await database.run('SELECT 1'); const expectedSpanNames = [ 'CloudSpanner.Snapshot.runStream', 'CloudSpanner.Database.runStream', 'CloudSpanner.Database.run', ]; - assert.deepStrictEqual( - actualSpanNames, - expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` - ); - - // Ensure that RunStream is a child span of createQueryPartitions. - const spanRunStream = spans[0]; - const spanRun = spans[1]; - assert.ok( - spanRun.spanContext().traceId, - 'Expected that createQueryPartitions has a defined traceId' - ); - assert.ok( - spanRunStream.spanContext().traceId, - 'Expected that RunStream has a defined traceId' - ); - assert.ok( - spanRun.spanContext().spanId, - 'Expected that createQueryPartitions has a defined spanId' - ); - assert.ok( - spanRunStream.spanContext().spanId, - 'Expected that RunStream has a defined spanId' - ); - const expectedEventNames = [ 'Starting stream', - 'Acquiring session', - 'Cache hit: has usable session', - 'Acquired session', + ...cacheSessionEvents, 'Using Session', ]; - assert.deepStrictEqual( - actualEventNames, - expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + await verifySpansAndEvents( + traceExporter, + expectedSpanNames, + expectedEventNames ); }); it('runTransaction', done => { - const withAllSpansHaveDBName = generateWithAllSpansHaveDBName( - database.formattedName_ - ); - database.runTransaction(async (err, transaction) => { assert.ifError(err); await transaction!.run('SELECT 1'); await transaction!.commit(); await transaction!.end(); - await traceExporter.forceFlush(); - - const spans = traceExporter.getFinishedSpans(); - withAllSpansHaveDBName(spans); - - const actualEventNames: string[] = []; - const actualSpanNames: string[] = []; - spans.forEach(span => { - actualSpanNames.push(span.name); - span.events.forEach(event => { - actualEventNames.push(event.name); - }); - }); - const expectedSpanNames = [ 'CloudSpanner.Snapshot.runStream', 'CloudSpanner.Snapshot.run', 'CloudSpanner.Transaction.commit', 'CloudSpanner.Database.runTransaction', ]; - assert.deepStrictEqual( - actualSpanNames, - expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` - ); - const expectedEventNames = [ 'Starting stream', 'Transaction Creation Done', 'Starting Commit', 'Commit Done', - 'Acquiring session', - 'Cache hit: has usable session', - 'Acquired session', + ...cacheSessionEvents, ]; - assert.deepStrictEqual( - actualEventNames, - expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + + await verifySpansAndEvents( + traceExporter, + expectedSpanNames, + expectedEventNames ); done(); }); }); it('runTransactionAsync', async () => { - const withAllSpansHaveDBName = generateWithAllSpansHaveDBName( - database.formattedName_ - ); await database.runTransactionAsync(async transaction => { await transaction!.run('SELECT 1'); }); - traceExporter.forceFlush(); - const spans = traceExporter.getFinishedSpans(); - withAllSpansHaveDBName(spans); - - const actualEventNames: string[] = []; - const actualSpanNames: string[] = []; - spans.forEach(span => { - actualSpanNames.push(span.name); - span.events.forEach(event => { - actualEventNames.push(event.name); - }); - }); - const expectedSpanNames = [ 'CloudSpanner.Snapshot.runStream', 'CloudSpanner.Snapshot.run', 'CloudSpanner.Database.runTransactionAsync', ]; - assert.deepStrictEqual( - actualSpanNames, - expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` - ); - const expectedEventNames = [ 'Starting stream', 'Transaction Creation Done', - 'Acquiring session', - 'Cache hit: has usable session', - 'Acquired session', + ...cacheSessionEvents, 'Using Session', ]; - assert.deepStrictEqual( - actualEventNames, - expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + await verifySpansAndEvents( + traceExporter, + expectedSpanNames, + expectedEventNames ); }); it('writeAtLeastOnce', done => { - const withAllSpansHaveDBName = generateWithAllSpansHaveDBName( - database.formattedName_ - ); const blankMutations = new MutationSet(); - database.writeAtLeastOnce(blankMutations, (err, response) => { + database.writeAtLeastOnce(blankMutations, async (err, response) => { assert.ifError(err); assert.ok(response); - - traceExporter.forceFlush(); - const spans = traceExporter.getFinishedSpans(); - withAllSpansHaveDBName(spans); - - const actualEventNames: string[] = []; - const actualSpanNames: string[] = []; - spans.forEach(span => { - actualSpanNames.push(span.name); - span.events.forEach(event => { - actualEventNames.push(event.name); - }); - }); - const expectedSpanNames = [ 'CloudSpanner.Transaction.commit', 'CloudSpanner.Database.writeAtLeastOnce', ]; - assert.deepStrictEqual( - actualSpanNames, - expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` - ); - const expectedEventNames = [ 'Starting Commit', 'Commit Done', - 'Acquiring session', - 'Cache hit: has usable session', - 'Acquired session', + ...cacheSessionEvents, 'Using Session', ]; - assert.deepStrictEqual( - actualEventNames, - expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + await verifySpansAndEvents( + traceExporter, + expectedSpanNames, + expectedEventNames ); - done(); }); }); it('batchCreateSessions', done => { - database.batchCreateSessions(5, (err, sessions) => { + database.batchCreateSessions(5, async err => { assert.ifError(err); - - traceExporter.forceFlush(); - const spans = traceExporter.getFinishedSpans(); - - const actualEventNames: string[] = []; - const actualSpanNames: string[] = []; - spans.forEach(span => { - actualSpanNames.push(span.name); - span.events.forEach(event => { - actualEventNames.push(event.name); - }); - }); - const expectedSpanNames = ['CloudSpanner.Database.batchCreateSessions']; - assert.deepStrictEqual( - actualSpanNames, - expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` - ); - const expectedEventNames = []; - assert.deepStrictEqual( - actualEventNames, - expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + await verifySpansAndEvents( + traceExporter, + expectedSpanNames, + expectedEventNames ); - done(); - }); - }); - it('runPartitionedUpdate', async () => { - const [rowCount] = await database.runPartitionedUpdate({ - sql: updateSql, - }); - - await tracerProvider.forceFlush(); - await traceExporter.forceFlush(); - const spans = traceExporter.getFinishedSpans(); + it('runPartitionedUpdate', async () => { + await database.runPartitionedUpdate({ + sql: updateSql, + }); - const actualEventNames: string[] = []; - const actualSpanNames: string[] = []; - spans.forEach(span => { - actualSpanNames.push(span.name); - span.events.forEach(event => { - actualEventNames.push(event.name); + const expectedSpanNames = [ + 'CloudSpanner.Snapshot.begin', + 'CloudSpanner.Snapshot.runStream', + 'CloudSpanner.Snapshot.run', + 'CloudSpanner.Dml.runUpdate', + 'CloudSpanner.PartitionedDml.runUpdate', + 'CloudSpanner.Database.runPartitionedUpdate', + ]; + const expectedEventNames = [ + 'Begin Transaction', + 'Transaction Creation Done', + 'Starting stream', + 'Acquiring session', + 'Cache hit: has usable session', + 'Acquired session', + ]; + verifySpansAndEvents( + traceExporter, + expectedSpanNames, + expectedEventNames + ); }); }); - - const expectedSpanNames = [ - 'CloudSpanner.Snapshot.begin', - 'CloudSpanner.Snapshot.runStream', - 'CloudSpanner.Snapshot.run', - 'CloudSpanner.Dml.runUpdate', - 'CloudSpanner.PartitionedDml.runUpdate', - 'CloudSpanner.Database.runPartitionedUpdate', - ]; - assert.deepStrictEqual( - actualSpanNames, - expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` - ); - - const expectedEventNames = [ - 'Begin Transaction', - 'Transaction Creation Done', - 'Starting stream', - 'Acquiring session', - 'Cache hit: has usable session', - 'Acquired session', - ]; - assert.deepStrictEqual( - actualEventNames, - expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` - ); }); }); }); -describe('SessionPool', async () => { - const traceExporter = new InMemorySpanExporter(); - const sampler = new AlwaysOnSampler(); - const provider = new NodeTracerProvider({ - sampler: sampler, - exporter: traceExporter, - }); - provider.addSpanProcessor(new SimpleSpanProcessor(traceExporter)); - - const setupResult = await setup({ - tracerProvider: provider, - enableExtendedTracing: false, - }); - - const spanner = setupResult.spanner; - const server = setupResult.server; - const spannerMock = setupResult.spannerMock; - const instance = spanner.instance('instance'); - - after(async () => { - traceExporter.reset(); - await provider.shutdown(); - spannerMock.resetRequests(); - spanner.close(); - server.tryShutdown(() => {}); - }); - - it('_createSessions', async () => { - // The first invocation of new SessionPool shall implicitly happen in here. - const database = instance.database('database'); - await database.run('SELECT 1'); - - await provider.forceFlush(); - traceExporter.reset(); - - // Explicitly invoking new SessionPool. - const sessionPool = new SessionPool(database); - - const OPTIONS = 3; - await sessionPool._createSessions(OPTIONS); - - traceExporter.forceFlush(); - const spans = traceExporter.getFinishedSpans(); - - const actualSpanNames: string[] = []; - const actualEventNames: string[] = []; - spans.forEach(span => { - actualSpanNames.push(span.name); - span.events.forEach(event => { - actualEventNames.push(event.name); - }); - }); - - const expectedSpanNames = [ - 'CloudSpanner.Database.batchCreateSessions', - 'CloudSpanner.SessionPool.createSessions', - ]; - assert.deepStrictEqual( - actualSpanNames, - expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` - ); - - const expectedEventNames = [ - 'Requesting 3 sessions', - 'Creating 3 sessions', - 'Requested for 3 sessions returned 3', - ]; - assert.deepStrictEqual( - actualEventNames, - expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` - ); - }); -}); - describe('ObservabilityOptions injection and propagation', async () => { it('Passed into Spanner, Instance and Database', async () => { const traceExporter = new InMemorySpanExporter(); @@ -834,7 +490,7 @@ describe('ObservabilityOptions injection and propagation', async () => { // To deflake expectations of session creation, let's // issue out a warm-up request request that'll ensure // that the SessionPool is created deterministically. - const [rows] = await database.run('SELECT 1'); + await database.run('SELECT 1'); // Clear out any present traces to make a clean slate for testing. traceExporter.forceFlush(); traceExporter.reset(); @@ -853,7 +509,7 @@ describe('ObservabilityOptions injection and propagation', async () => { database.getTransaction((err, tx) => { assert.ifError(err); - tx!.run('SELECT 1', async (err, rows) => { + tx!.run('SELECT 1', async () => { tx!.end(); await tracerProvider.forceFlush(); @@ -883,9 +539,7 @@ describe('ObservabilityOptions injection and propagation', async () => { ); const expectedEventNames = [ - 'Acquiring session', - 'Cache hit: has usable session', - 'Acquired session', + ...cacheSessionEvents, 'Using Session', 'Starting stream', 'Transaction Creation Done', @@ -909,7 +563,7 @@ describe('ObservabilityOptions injection and propagation', async () => { traceExporter.reset(); tx!.begin(); - tx!.runUpdate(updateSql, async (err, rowCount) => { + tx!.runUpdate(updateSql, async err => { assert.ifError(err); tx!.end(); @@ -964,7 +618,7 @@ describe('ObservabilityOptions injection and propagation', async () => { .runStream(selectSql) .on('data', () => rowCount++) .on('error', assert.ifError) - .on('stats', _stats => {}) + .on('stats', () => {}) .on('end', async () => { tx!.end(); @@ -994,9 +648,7 @@ describe('ObservabilityOptions injection and propagation', async () => { ); const expectedEventNames = [ - 'Acquiring session', - 'Cache hit: has usable session', - 'Acquired session', + ...cacheSessionEvents, 'Using Session', 'Starting stream', ]; @@ -1020,9 +672,9 @@ describe('ObservabilityOptions injection and propagation', async () => { tx!.begin(); - tx!.runUpdate(updateSql, async (err, rowCount) => { + tx!.runUpdate(updateSql, async err => { assert.ifError(err); - tx!.rollback(async err => { + tx!.rollback(async () => { tx!.end(); await tracerProvider.forceFlush(); traceExporter.forceFlush(); @@ -1116,7 +768,7 @@ describe('ObservabilityOptions injection and propagation', async () => { database.formattedName_ ); - database.run('SELECT 1', (err, rows) => { + database.run('SELECT 1', err => { assert.ifError(err); injectedTraceExporter.forceFlush(); @@ -1162,14 +814,9 @@ describe('ObservabilityOptions injection and propagation', async () => { ); const expectedEventNames = [ - 'Requesting 25 sessions', - 'Creating 25 sessions', - 'Requested for 25 sessions returned 25', + ...batchCreateSessionsEvents, 'Starting stream', - 'Acquiring session', - 'Waiting for a session to become available', - 'Acquired session', - 'Using Session', + ...waitingSessionsEvents, ]; assert.deepStrictEqual( actualEventNames, @@ -1183,7 +830,6 @@ describe('ObservabilityOptions injection and propagation', async () => { describe('E2E traces with async/await', async () => { let server: grpc.Server; let spanner: Spanner; - let database: Database; let spannerMock: mock.MockSpanner; let traceExporter: typeof InMemorySpanExporter; let provider: typeof TracerProvider; @@ -1310,14 +956,9 @@ describe('E2E traces with async/await', async () => { // Finally check for the collective expected event names. const expectedEventNames = [ - 'Requesting 25 sessions', - 'Creating 25 sessions', - 'Requested for 25 sessions returned 25', + ...batchCreateSessionsEvents, 'Starting stream', - 'Acquiring session', - 'Waiting for a session to become available', - 'Acquired session', - 'Using Session', + ...waitingSessionsEvents, ]; assert.deepStrictEqual( actualEventNames, @@ -1339,7 +980,7 @@ describe('E2E traces with async/await', async () => { const [rows] = await database.run(query); rows.forEach(row => { - const json = row.toJSON(); + row.toJSON(); }); provider.forceFlush(); @@ -1360,7 +1001,7 @@ describe('E2E traces with async/await', async () => { database.run(query, (err, rows) => { rows.forEach(row => { - const json = row.toJSON(); + row.toJSON(); }); provider.forceFlush(); @@ -1378,7 +1019,6 @@ describe('E2E traces with async/await', async () => { describe('Negative cases', async () => { let server: grpc.Server; let spanner: Spanner; - let database: Database; let spannerMock: mock.MockSpanner; let traceExporter: typeof InMemorySpanExporter; let provider: typeof TracerProvider; @@ -1548,14 +1188,9 @@ SELECT 1p // Finally check for the collective expected event names. const expectedEventNames = [ - 'Requesting 25 sessions', - 'Creating 25 sessions', - 'Requested for 25 sessions returned 25', + ...batchCreateSessionsEvents, 'Starting stream', - 'Acquiring session', - 'Waiting for a session to become available', - 'Acquired session', - 'Using Session', + ...waitingSessionsEvents, ]; assert.deepStrictEqual( actualEventNames, @@ -1569,7 +1204,7 @@ SELECT 1p const database = instance.database('database'); try { - const [rows] = await database.run(selectSql1p); + await database.run(selectSql1p); } catch (e) { // This catch is meant to ensure that we // can assert on the generated spans. @@ -1584,7 +1219,7 @@ SELECT 1p const instance = spanner.instance('instance'); const database = instance.database('database'); - database.run(selectSql1p, (err, rows) => { + database.run(selectSql1p, err => { assert.ok(err); provider.forceFlush(); assertRunBadSyntaxExpectations(); @@ -1704,9 +1339,7 @@ SELECT 1p // Finally check for the collective expected event names. const expectedEventNames = [ - 'Requesting 25 sessions', - 'Creating 25 sessions', - 'Requested for 25 sessions returned 25', + ...batchCreateSessionsEvents, 'Starting stream', 'Stream broken. Safe to retry', 'Begin Transaction', @@ -1715,10 +1348,7 @@ SELECT 1p 'Transaction Creation Done', 'Starting Commit', 'Commit Done', - 'Acquiring session', - 'Waiting for a session to become available', - 'Acquired session', - 'Using Session', + ...waitingSessionsEvents, 'exception', ]; assert.deepStrictEqual( @@ -2101,14 +1731,9 @@ describe('Traces for ExecuteStream broken stream retries', () => { // Finally check for the collective expected event names. const expectedEventNames = [ - 'Requesting 25 sessions', - 'Creating 25 sessions', - 'Requested for 25 sessions returned 25', + ...batchCreateSessionsEvents, 'Starting stream', - 'Acquiring session', - 'Waiting for a session to become available', - 'Acquired session', - 'Using Session', + ...waitingSessionsEvents, 'Transaction Creation Done', ]; assert.deepStrictEqual( @@ -2172,19 +1797,14 @@ describe('Traces for ExecuteStream broken stream retries', () => { // Finally check for the collective expected event names. const expectedEventNames = [ - 'Requesting 25 sessions', - 'Creating 25 sessions', - 'Requested for 25 sessions returned 25', + ...batchCreateSessionsEvents, 'Starting stream', 'Re-attempting start stream', 'Resuming stream', 'Resuming stream', 'Resuming stream', 'Resuming stream', - 'Acquiring session', - 'Waiting for a session to become available', - 'Acquired session', - 'Using Session', + ...waitingSessionsEvents, ]; assert.deepStrictEqual( actualEventNames, @@ -2243,19 +1863,14 @@ describe('Traces for ExecuteStream broken stream retries', () => { // Finally check for the collective expected event names. const expectedEventNames = [ - 'Requesting 25 sessions', - 'Creating 25 sessions', - 'Requested for 25 sessions returned 25', + ...batchCreateSessionsEvents, 'Starting stream', 'Re-attempting start stream', 'Begin Transaction', 'Transaction Creation Done', 'Starting Commit', 'Commit Done', - 'Acquiring session', - 'Waiting for a session to become available', - 'Acquired session', - 'Using Session', + ...waitingSessionsEvents, ]; assert.deepStrictEqual( actualEventNames, @@ -2298,43 +1913,20 @@ describe('Traces for ExecuteStream broken stream retries', () => { 1, 'runTransactionAsync.attempt must be 1' ); - - traceExporter.forceFlush(); - const spans = traceExporter.getFinishedSpans(); - - const actualSpanNames: string[] = []; - const actualEventNames: string[] = []; - spans.forEach(span => { - actualSpanNames.push(span.name); - span.events.forEach(event => { - actualEventNames.push(event.name); - }); - }); - const expectedSpanNames = [ 'CloudSpanner.Database.batchCreateSessions', 'CloudSpanner.SessionPool.createSessions', 'CloudSpanner.Database.runTransactionAsync', ]; - assert.deepStrictEqual( - actualSpanNames, - expectedSpanNames, - `span names mismatch:\n\tGot: ${actualSpanNames}\n\tWant: ${expectedSpanNames}` - ); const expectedEventNames = [ - 'Requesting 25 sessions', - 'Creating 25 sessions', - 'Requested for 25 sessions returned 25', - 'Acquiring session', - 'Waiting for a session to become available', - 'Acquired session', - 'Using Session', + ...batchCreateSessionsEvents, + ...waitingSessionsEvents, ]; - assert.deepStrictEqual( - actualEventNames, - expectedEventNames, - `Unexpected events:\n\tGot: ${actualEventNames}\n\tWant: ${expectedEventNames}` + await verifySpansAndEvents( + traceExporter, + expectedSpanNames, + expectedEventNames ); }); }); diff --git a/observability-test/transaction.ts b/observability-test/transaction.ts index 233728784..20e604d94 100644 --- a/observability-test/transaction.ts +++ b/observability-test/transaction.ts @@ -69,18 +69,9 @@ describe('Transaction', () => { const PARTIAL_RESULT_STREAM = sandbox.stub(); const PROMISIFY_ALL = sandbox.stub(); - // eslint-disable-next-line @typescript-eslint/no-explicit-any let Snapshot; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - let Dml; - // eslint-disable-next-line @typescript-eslint/no-explicit-any let Transaction; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - let PartitionedDml; - // eslint-disable-next-line @typescript-eslint/no-explicit-any let transaction; - - // eslint-disable-next-line @typescript-eslint/no-explicit-any let snapshot; before(() => { @@ -91,9 +82,7 @@ describe('Transaction', () => { }); Snapshot = txns.Snapshot; - Dml = txns.Dml; Transaction = txns.Transaction; - PartitionedDml = txns.PartitionedDml; }); let traceExporter: typeof InMemorySpanExporter; @@ -246,11 +235,10 @@ describe('Transaction', () => { const TABLE = 'my-table-123'; let fakeStream; - let stub; beforeEach(() => { fakeStream = new EventEmitter(); - stub = sandbox.stub(snapshot, 'createReadStream').returns(fakeStream); + sandbox.stub(snapshot, 'createReadStream').returns(fakeStream); }); it('with error', done => { @@ -348,11 +336,10 @@ describe('Transaction', () => { const QUERY = 'SELET * FROM `MyTable`'; let fakeStream; - let stub; beforeEach(() => { fakeStream = new EventEmitter(); - stub = sandbox.stub(snapshot, 'runStream').returns(fakeStream); + sandbox.stub(snapshot, 'runStream').returns(fakeStream); }); it('without error', done => { diff --git a/src/instrument.ts b/src/instrument.ts index 15fe7b8e3..8ad123bf6 100644 --- a/src/instrument.ts +++ b/src/instrument.ts @@ -131,7 +131,6 @@ export function startTrace( config: traceConfig | undefined, cb: (span: Span) => T ): T { - const origConfig = config; if (!config) { config = {} as traceConfig; } diff --git a/src/table.ts b/src/table.ts index 8f87ba00b..227f8d107 100644 --- a/src/table.ts +++ b/src/table.ts @@ -35,7 +35,6 @@ import { ObservabilityOptions, startTrace, setSpanError, - setSpanErrorAndException, traceConfig, } from './instrument'; diff --git a/test/database.ts b/test/database.ts index c1fa54ea1..d5f5ec6db 100644 --- a/test/database.ts +++ b/test/database.ts @@ -46,7 +46,6 @@ import { CommitOptions, MutationSet, } from '../src/transaction'; -import {error} from 'is'; let promisified = false; const fakePfy = extend({}, pfy, { @@ -836,9 +835,8 @@ describe('Database', () => { }); it('should return an error when passing null mutation', done => { - const fakeError = new Error('err'); try { - database.writeAtLeastOnce(null, (err, res) => {}); + database.writeAtLeastOnce(null, () => {}); } catch (err) { const errorMessage = (err as grpc.ServiceError).message; assert.ok( diff --git a/test/partial-result-stream.ts b/test/partial-result-stream.ts index 799d29b00..0154f048c 100644 --- a/test/partial-result-stream.ts +++ b/test/partial-result-stream.ts @@ -335,7 +335,7 @@ describe('PartialResultStream', () => { }); partialResultStream(requestFnStub, {gaxOptions: {timeout: 0}}) - .on('data', row => {}) + .on('data', () => {}) .on('error', err => { assert.strictEqual(err.code, grpc.status.DEADLINE_EXCEEDED); assert.strictEqual(requestFnStub.callCount, 1); diff --git a/test/spanner.ts b/test/spanner.ts index d324ed911..0212f6b92 100644 --- a/test/spanner.ts +++ b/test/spanner.ts @@ -3556,7 +3556,7 @@ describe('Spanner with mock server', () => { requestOptions: {transactionTag: 'transaction-tag'}, }); const transaction = promise[0]; - await transaction.run('SELECT 1').then(results => { + await transaction.run('SELECT 1').then(() => { const request = spannerMock.getRequests().find(val => { return (val as v1.ExecuteSqlRequest).sql; }) as v1.ExecuteSqlRequest;