diff --git a/sdk/monitor/monitor-query/CHANGELOG.md b/sdk/monitor/monitor-query/CHANGELOG.md index 9830173cc9c7..15337620adfa 100644 --- a/sdk/monitor/monitor-query/CHANGELOG.md +++ b/sdk/monitor/monitor-query/CHANGELOG.md @@ -1,14 +1,34 @@ # Release History -## 1.0.0-beta.6 (Unreleased) +## 1.0.0-beta.6 (2021-10-05) ### Features Added +- Added `audience` property in `MetricsClientOptions` +- Enabled browser support +- Added different result objects `LogsQueryPartialResult`, `LogsQuerySuccessfulResult` or `LogsQueryError` based on the success scenarios for log queries. + ### Breaking Changes +- Renamed `ErrorInfo` to `LogsErrorInfo`, which now extends the `Error` class and `code` as an additional property. Removed all the other properties. +- `query` method in `LogsQueryClient` renamed to `queryWorkspace` +- `query` method in `MetricsQueryClient` renamed to `queryResource` +- Renamed `credentialOptions.credentialScopes` property in `LogsQueryClientOptions` to `audience` +- Renamed the status types in `LogsQueryResultStatus`. `Partial` to `PartialFailure` and `Failed` to `Failure`. +- Renamed `timeGrain` in `MetricAvailability` to `granularity` +- Renamed `TimeInterval` to `QueryTimeInterval` +- Updated constants in `Durations` to camel-case. +- Removed `throwOnAnyError` flag from `LogsQueryOptions` and `LogsQueryBatchOptions` +- Removed the error classes `BatchError` and `AggregateBatchError` +- Updated `LogsQueryBatchResult` object to be a list of objects with the following possible types: + - `LogsQueryPartialResult` + - `LogsQuerySuccessfulResult` + - `LogsQueryError` +- Updated `LogsQueryResult` object to be of type `LogsQuerySuccessfulResult` or `LogsQueryPartialResult` + ### Bugs Fixed -### Other Changes +- Updated `listMetricNamespaces` signature to return the list of appropriate `MetricsNamespaces` object type ## 1.0.0-beta.5 (2021-09-09) diff --git a/sdk/monitor/monitor-query/README.md b/sdk/monitor/monitor-query/README.md index a95f89c1dda5..3edc1919e0dd 100644 --- a/sdk/monitor/monitor-query/README.md +++ b/sdk/monitor/monitor-query/README.md @@ -102,15 +102,26 @@ async function run() { const result = await logsQueryClient.queryWorkspace(azureLogAnalyticsWorkspaceId, kustoQuery, { duration: Durations.twentyFourHours }); - const tablesFromResult = result.tables; - if (tablesFromResult == null) { - console.log(`No results for query '${kustoQuery}'`); - return; - } + if (result.status === LogsQueryResultStatus.Success) { + const tablesFromResult: LogsTable[] = result.tables; - console.log(`Results for query '${kustoQuery}'`); + if (tablesFromResult.length === 0) { + console.log(`No results for query '${kustoQuery}'`); + return; + } + console.log(`This query has returned table(s) - `); + processTables(tablesFromResult); + } else { + console.log(`Error processing the query '${kustoQuery}' - ${result.partialError}`); + if (result.partialTables.length > 0) { + console.log(`This query has also returned partial data in the following table(s) - `); + processTables(result.partialTables); + } + } +} +async function processTables(tablesFromResult: LogsTable[]) { for (const table of tablesFromResult) { const columnHeaderString = table.columnDescriptors .map((column) => `${column.name}(${column.type}) `) @@ -123,41 +134,57 @@ async function run() { } } } + run().catch((err) => console.log("ERROR:", err)); ``` #### Handle logs query response -The `queryWorkspace` function of `LogsQueryClient` returns the `LogsQueryResult`. Here's a hierarchy of the response: +The `queryWorkspace` function of `LogsQueryClient` returns a `LogsQueryResult` object. The object type can be `LogsQuerySuccessfulResult` or `LogsQueryPartialResult`. Here's a hierarchy of the response: ``` -LogsQueryResult +LogsQuerySuccessfulResult |---statistics |---visualization -|---error -|---status ("PartialFailure" | "Success" | "Failure") +|---status ("Success") |---tables (list of `LogsTable` objects) |---name |---rows |---columnDescriptors (list of `LogsColumn` objects) |---name |---type + +LogsQueryPartialResult +|---statistics +|---visualization +|---status ("PartialFailure") +|---partialError + |--name + |--code + |--message + |--stack +|---partialTables (list of `LogsTable` objects) + |---name + |---rows + |---columnDescriptors (list of `LogsColumn` objects) + |---name + |---type ``` For example, to handle a response with tables: ```ts -const tablesFromResult = result.tables; - -for (const table of tablesFromResult) { - const columnHeaderString = table.columnDescriptors - .map((column) => `${column.name}(${column.type}) `) - .join("| "); - console.log("| " + columnHeaderString); +async function processTables(tablesFromResult: LogsTable[]) { + for (const table of tablesFromResult) { + const columnHeaderString = table.columnDescriptors + .map((column) => `${column.name}(${column.type}) `) + .join("| "); + console.log("| " + columnHeaderString); - for (const row of table.rows) { - const columnValuesString = row.map((columnValue) => `'${columnValue}' `).join("| "); - console.log("| " + columnValuesString); + for (const row of table.rows) { + const columnValuesString = row.map((columnValue) => `'${columnValue}' `).join("| "); + console.log("| " + columnValuesString); + } } } ``` @@ -210,89 +237,133 @@ export async function main() { } let i = 0; - for (const response of result.results) { + for (const response of result) { console.log(`Results for query with query: ${queriesBatch[i]}`); - - if (response.error) { - console.log(` Query had errors:`, response.error); + if (response.status === LogsQueryResultStatus.Success) { + console.log( + `Printing results from query '${queriesBatch[i].query}' for '${queriesBatch[i].timespan}'` + ); + processTables(response.tables); + } else if (response.status === LogsQueryResultStatus.PartialFailure) { + console.log( + `Printing partial results from query '${queriesBatch[i].query}' for '${queriesBatch[i].timespan}'` + ); + processTables(response.partialTables); + console.log( + ` Query had errors:${response.partialError.message} with code ${response.partialError.code}` + ); } else { - if (response.tables == null) { - console.log(`No results for query`); - } else { - console.log( - `Printing results from query '${queriesBatch[i].query}' for '${queriesBatch[i].timespan}'` - ); - - for (const table of response.tables) { - const columnHeaderString = table.columnDescriptors - .map((column) => `${column.name}(${column.type}) `) - .join("| "); - console.log(columnHeaderString); - - for (const row of table.rows) { - const columnValuesString = row.map((columnValue) => `'${columnValue}' `).join("| "); - console.log(columnValuesString); - } - } - } + console.log(`Printing errors from query '${queriesBatch[i].query}'`); + console.log(` Query had errors:${response.message} with code ${response.code}`); } // next query i++; } } + +async function processTables(tablesFromResult: LogsTable[]) { + for (const table of tablesFromResult) { + const columnHeaderString = table.columnDescriptors + .map((column) => `${column.name}(${column.type}) `) + .join("| "); + console.log("| " + columnHeaderString); + + for (const row of table.rows) { + const columnValuesString = row.map((columnValue) => `'${columnValue}' `).join("| "); + console.log("| " + columnValuesString); + } + } +} ``` #### Handle logs batch query response -The `queryBatch` function of `LogsQueryClient` returns a `LogsQueryBatchResult` object. Here's a hierarchy of the response: +The `queryBatch` function of `LogsQueryClient` returns a `LogsQueryBatchResult` object. `LogsQueryBatchResult` contains a list of objects with the following possible types: + +- `LogsQueryPartialResult` +- `LogsQuerySuccessfulResult` +- `LogsQueryError` + +Here's a hierarchy of the response: ``` -LogsQueryBatchResult -|---results (list of following objects) - |---statistics - |---visualization - |---error - |---status ("PartialFailure" | "Success" | "Failure") - |---tables (list of `LogsTable` objects) + +LogsQuerySuccessfulResult +|---statistics +|---visualization +|---status ("Success") +|---tables (list of `LogsTable` objects) + |---name + |---rows + |---columnDescriptors (list of `LogsColumn` objects) |---name - |---rows - |---columnDescriptors (list of `LogsColumn` objects) - |---name - |---type + |---type + +LogsQueryPartialResult +|---statistics +|---visualization +|---status ("PartialFailure") +|---partialError + |--name + |--code + |--message + |--stack +|---partialTables (list of `LogsTable` objects) + |---name + |---rows + |---columnDescriptors (list of `LogsColumn` objects) + |---name + |---type + +LogsQueryError +|--name +|--code +|--message +|--stack +|--status ("Failure") ``` -For example, to handle a batch logs query response: +For example, the following code handles a batch logs query response: ```ts -let i = 0; -for (const response of result.results) { - console.log(`Results for query with query: ${queriesBatch[i]}`); - - if (response.error) { - console.log(` Query had errors:`, response.error); - } else { - if (response.tables == null) { - console.log(`No results for query`); - } else { +async function processBatchResult(result: LogsQueryBatchResult) { + let i = 0; + for (const response of result) { + console.log(`Results for query with query: ${queriesBatch[i]}`); + if (response.status === LogsQueryResultStatus.Success) { console.log( `Printing results from query '${queriesBatch[i].query}' for '${queriesBatch[i].timespan}'` ); + processTables(response.tables); + } else if (response.status === LogsQueryResultStatus.PartialFailure) { + console.log( + `Printing partial results from query '${queriesBatch[i].query}' for '${queriesBatch[i].timespan}'` + ); + processTables(response.partialTables); + console.log( + ` Query had errors:${response.partialError.message} with code ${response.partialError.code}` + ); + } else { + console.log(`Printing errors from query '${queriesBatch[i].query}'`); + console.log(` Query had errors:${response.message} with code ${response.code}`); + } + // next query + i++; + } +} - for (const table of response.tables) { - const columnHeaderString = table.columnDescriptors - .map((column) => `${column.name}(${column.type}) `) - .join("| "); - console.log(columnHeaderString); +async function processTables(tablesFromResult: LogsTable[]) { + for (const table of tablesFromResult) { + const columnHeaderString = table.columnDescriptors + .map((column) => `${column.name}(${column.type}) `) + .join("| "); + console.log("| " + columnHeaderString); - for (const row of table.rows) { - const columnValuesString = row.map((columnValue) => `'${columnValue}' `).join("| "); - console.log(columnValuesString); - } - } + for (const row of table.rows) { + const columnValuesString = row.map((columnValue) => `'${columnValue}' `).join("| "); + console.log("| " + columnValuesString); } } - // next query - i++; } ``` diff --git a/sdk/monitor/monitor-query/review/monitor-query.api.md b/sdk/monitor/monitor-query/review/monitor-query.api.md index c13ec3a0b477..398400cd0d81 100644 --- a/sdk/monitor/monitor-query/review/monitor-query.api.md +++ b/sdk/monitor/monitor-query/review/monitor-query.api.md @@ -9,21 +9,9 @@ import { OperationOptions } from '@azure/core-client'; import { PagedAsyncIterableIterator } from '@azure/core-paging'; import { TokenCredential } from '@azure/core-auth'; -// @public -export class AggregateBatchError extends Error { - constructor(errors: LogsErrorInfo[]); - errors: BatchError[]; -} - // @public export type AggregationType = "None" | "Average" | "Count" | "Minimum" | "Maximum" | "Total"; -// @public -export class BatchError extends Error implements LogsErrorInfo { - constructor(errorInfo: LogsErrorInfo); - code: string; -} - // @public export const Durations: { readonly sevenDays: "P7D"; @@ -64,19 +52,10 @@ export interface LogsErrorInfo extends Error { // @public export interface LogsQueryBatchOptions extends OperationOptions { - throwOnAnyFailure?: boolean; } // @public -export interface LogsQueryBatchResult { - results: { - tables?: LogsTable[]; - error?: LogsErrorInfo; - status?: LogsQueryResultStatus; - statistics?: Record; - visualization?: Record; - }[]; -} +export type LogsQueryBatchResult = Array; // @public export class LogsQueryClient { @@ -91,26 +70,46 @@ export interface LogsQueryClientOptions extends CommonClientOptions { endpoint?: string; } +// @public +export interface LogsQueryError extends Error { + code: string; + status: LogsQueryResultStatus.Failure; +} + // @public export interface LogsQueryOptions extends OperationOptions { additionalWorkspaces?: string[]; includeQueryStatistics?: boolean; includeVisualization?: boolean; serverTimeoutInSeconds?: number; - throwOnAnyFailure?: boolean; } // @public -export interface LogsQueryResult { - error?: LogsErrorInfo; +export interface LogsQueryPartialResult { + partialError: LogsErrorInfo; + partialTables: LogsTable[]; statistics?: Record; - status: LogsQueryResultStatus; - tables: LogsTable[]; + status: LogsQueryResultStatus.PartialFailure; visualization?: Record; } // @public -export type LogsQueryResultStatus = "PartialFailure" | "Success" | "Failure"; +export type LogsQueryResult = LogsQuerySuccessfulResult | LogsQueryPartialResult; + +// @public +export enum LogsQueryResultStatus { + Failure = "Failure", + PartialFailure = "PartialFailure", + Success = "Success" +} + +// @public +export interface LogsQuerySuccessfulResult { + statistics?: Record; + status: LogsQueryResultStatus.Success; + tables: LogsTable[]; + visualization?: Record; +} // @public export interface LogsTable { diff --git a/sdk/monitor/monitor-query/samples-dev/logsQuery.ts b/sdk/monitor/monitor-query/samples-dev/logsQuery.ts index 3b1b3c98e262..611dcb0ae4a5 100644 --- a/sdk/monitor/monitor-query/samples-dev/logsQuery.ts +++ b/sdk/monitor/monitor-query/samples-dev/logsQuery.ts @@ -6,7 +6,13 @@ */ import { DefaultAzureCredential } from "@azure/identity"; -import { Durations, LogsQueryClient, LogsTable, LogsQueryOptions } from "@azure/monitor-query"; +import { + Durations, + LogsQueryClient, + LogsTable, + LogsQueryOptions, + LogsQueryResultStatus +} from "@azure/monitor-query"; import * as dotenv from "dotenv"; dotenv.config(); @@ -41,14 +47,6 @@ export async function main() { { duration: Durations.oneHour }, queryLogsOptions ); - - const tablesFromResult: LogsTable[] | undefined = result.tables; - - if (tablesFromResult == null) { - console.log(`No results for query '${kustoQuery}'`); - return; - } - const executionTime = result.statistics && result.statistics.query && (result.statistics.query as any).executionTime; @@ -58,6 +56,25 @@ export async function main() { }` ); + if (result.status === LogsQueryResultStatus.Success) { + const tablesFromResult: LogsTable[] = result.tables; + + if (tablesFromResult.length === 0) { + console.log(`No results for query '${kustoQuery}'`); + return; + } + console.log(`This query has returned table(s) - `); + processTables(tablesFromResult); + } else { + console.log(`Error processing the query '${kustoQuery}' - ${result.partialError}`); + if (result.partialTables.length > 0) { + console.log(`This query has also returned partial data in the following table(s) - `); + processTables(result.partialTables); + } + } +} + +async function processTables(tablesFromResult: LogsTable[]) { for (const table of tablesFromResult) { const columnHeaderString = table.columnDescriptors .map((column) => `${column.name}(${column.type}) `) diff --git a/sdk/monitor/monitor-query/samples-dev/logsQueryBatch.ts b/sdk/monitor/monitor-query/samples-dev/logsQueryBatch.ts index bf3dcbab31cb..f14f05325c25 100644 --- a/sdk/monitor/monitor-query/samples-dev/logsQueryBatch.ts +++ b/sdk/monitor/monitor-query/samples-dev/logsQueryBatch.ts @@ -6,7 +6,7 @@ */ import { DefaultAzureCredential } from "@azure/identity"; -import { LogsQueryClient } from "@azure/monitor-query"; +import { LogsQueryClient, LogsQueryResultStatus, LogsTable } from "@azure/monitor-query"; import * as dotenv from "dotenv"; dotenv.config(); @@ -47,43 +47,49 @@ export async function main() { ]; const result = await logsQueryClient.queryBatch(queriesBatch); - - if (result.results == null) { + if (result === null) { throw new Error("No response for query"); } let i = 0; - for (const response of result.results) { + for (const response of result) { console.log(`Results for query with query: ${queriesBatch[i]}`); - - if (response.error) { - console.log(` Query had errors:`, response.error); + if (response.status === LogsQueryResultStatus.Success) { + console.log( + `Printing results from query '${queriesBatch[i].query}' for '${queriesBatch[i].timespan}'` + ); + processTables(response.tables); + } else if (response.status === LogsQueryResultStatus.PartialFailure) { + console.log( + `Printing partial results from query '${queriesBatch[i].query}' for '${queriesBatch[i].timespan}'` + ); + processTables(response.partialTables); + console.log( + ` Query had errors:${response.partialError.message} with code ${response.partialError.code}` + ); } else { - if (response.tables == null) { - console.log(`No results for query`); - } else { - console.log( - `Printing results from query '${queriesBatch[i].query}' for '${queriesBatch[i].timespan}'` - ); - - for (const table of response.tables) { - const columnHeaderString = table.columnDescriptors - .map((column) => `${column.name}(${column.type}) `) - .join("| "); - console.log(columnHeaderString); - - for (const row of table.rows) { - const columnValuesString = row.map((columnValue) => `'${columnValue}' `).join("| "); - console.log(columnValuesString); - } - } - } + console.log(`Printing errors from query '${queriesBatch[i].query}'`); + console.log(` Query had errors:${response.message} with code ${response.code}`); } // next query i++; } } +async function processTables(tablesFromResult: LogsTable[]) { + for (const table of tablesFromResult) { + const columnHeaderString = table.columnDescriptors + .map((column) => `${column.name}(${column.type}) `) + .join("| "); + console.log("| " + columnHeaderString); + + for (const row of table.rows) { + const columnValuesString = row.map((columnValue) => `'${columnValue}' `).join("| "); + console.log("| " + columnValuesString); + } + } +} + main().catch((err) => { console.error("The sample encountered an error:", err); process.exit(1); diff --git a/sdk/monitor/monitor-query/samples-dev/logsQueryMultipleWorkspaces.ts b/sdk/monitor/monitor-query/samples-dev/logsQueryMultipleWorkspaces.ts index e47c45f830f6..d531c41de961 100644 --- a/sdk/monitor/monitor-query/samples-dev/logsQueryMultipleWorkspaces.ts +++ b/sdk/monitor/monitor-query/samples-dev/logsQueryMultipleWorkspaces.ts @@ -6,7 +6,13 @@ */ import { DefaultAzureCredential } from "@azure/identity"; -import { Durations, LogsQueryClient, LogsTable, LogsQueryOptions } from "@azure/monitor-query"; +import { + Durations, + LogsQueryClient, + LogsTable, + LogsQueryOptions, + LogsQueryResultStatus +} from "@azure/monitor-query"; import * as dotenv from "dotenv"; dotenv.config(); @@ -44,14 +50,6 @@ export async function main() { { duration: Durations.oneHour }, queryLogsOptions ); - - const tablesFromResult: LogsTable[] | undefined = result.tables; - - if (tablesFromResult == null) { - console.log(`No results for query '${kustoQuery}'`); - return; - } - const executionTime = result.statistics && result.statistics.query && (result.statistics.query as any).executionTime; @@ -61,6 +59,23 @@ export async function main() { }` ); + if (result.status === LogsQueryResultStatus.Success) { + const tablesFromResult: LogsTable[] = result.tables; + if (tablesFromResult == null) { + console.log(`No results for query '${kustoQuery}'`); + return; + } + processTables(tablesFromResult); + } else { + console.log(`Error processing the query '${kustoQuery}' - ${result.partialError}`); + if (result.partialTables.length > 0) { + console.log(`This query has also returned partial data in the following table(s) - `); + processTables(result.partialTables); + } + } +} + +async function processTables(tablesFromResult: LogsTable[]) { for (const table of tablesFromResult) { const columnHeaderString = table.columnDescriptors .map((column) => `${column.name}(${column.type}) `) diff --git a/sdk/monitor/monitor-query/samples/v1/javascript/logsQuery.js b/sdk/monitor/monitor-query/samples/v1/javascript/logsQuery.js index 898026e9dc3f..02d1cc91bcb5 100644 --- a/sdk/monitor/monitor-query/samples/v1/javascript/logsQuery.js +++ b/sdk/monitor/monitor-query/samples/v1/javascript/logsQuery.js @@ -6,7 +6,7 @@ */ const { DefaultAzureCredential } = require("@azure/identity"); -const { Durations, LogsQueryClient } = require("@azure/monitor-query"); +const { Durations, LogsQueryClient, LogsQueryResultStatus } = require("@azure/monitor-query"); const dotenv = require("dotenv"); dotenv.config(); @@ -32,23 +32,15 @@ async function main() { includeQueryStatistics: true }; - const result = await logsQueryClient.query( + const result = await logsQueryClient.queryWorkspace( monitorWorkspaceId, kustoQuery, // The timespan is an ISO8601 formatted time (or interval). Some common aliases // are available (like OneDay, OneHour, FoutyEightHours, etc..) but any properly formatted ISO8601 // value is valid. - { duration: Durations.OneHour }, + { duration: Durations.oneHour }, queryLogsOptions ); - - const tablesFromResult = result.tables; - - if (tablesFromResult == null) { - console.log(`No results for query '${kustoQuery}'`); - return; - } - const executionTime = result.statistics && result.statistics.query && result.statistics.query.executionTime; @@ -58,6 +50,25 @@ async function main() { }` ); + if (result.status === LogsQueryResultStatus.Success) { + const tablesFromResult = result.tables; + + if (tablesFromResult.length === 0) { + console.log(`No results for query '${kustoQuery}'`); + return; + } + console.log(`This query has returned table(s) - `); + processTables(tablesFromResult); + } else { + console.log(`Error processing the query '${kustoQuery}' - ${result.partialError}`); + if (result.partialTables.length > 0) { + console.log(`This query has also returned partial data in the following table(s) - `); + processTables(result.partialTables); + } + } +} + +async function processTables(tablesFromResult) { for (const table of tablesFromResult) { const columnHeaderString = table.columnDescriptors .map((column) => `${column.name}(${column.type}) `) diff --git a/sdk/monitor/monitor-query/samples/v1/javascript/logsQueryBatch.js b/sdk/monitor/monitor-query/samples/v1/javascript/logsQueryBatch.js index 7ea5e986a80c..5b1ba208e3b8 100644 --- a/sdk/monitor/monitor-query/samples/v1/javascript/logsQueryBatch.js +++ b/sdk/monitor/monitor-query/samples/v1/javascript/logsQueryBatch.js @@ -6,7 +6,7 @@ */ const { DefaultAzureCredential } = require("@azure/identity"); -const { LogsQueryClient } = require("@azure/monitor-query"); +const { LogsQueryClient, LogsQueryResultStatus } = require("@azure/monitor-query"); const dotenv = require("dotenv"); dotenv.config(); @@ -47,43 +47,49 @@ async function main() { ]; const result = await logsQueryClient.queryBatch(queriesBatch); - - if (result.results == null) { + if (result === null) { throw new Error("No response for query"); } let i = 0; - for (const response of result.results) { + for (const response of result) { console.log(`Results for query with query: ${queriesBatch[i]}`); - - if (response.error) { - console.log(` Query had errors:`, response.error); + if (response.status === LogsQueryResultStatus.Success) { + console.log( + `Printing results from query '${queriesBatch[i].query}' for '${queriesBatch[i].timespan}'` + ); + processTables(response.tables); + } else if (response.status === LogsQueryResultStatus.PartialFailure) { + console.log( + `Printing partial results from query '${queriesBatch[i].query}' for '${queriesBatch[i].timespan}'` + ); + processTables(response.partialTables); + console.log( + ` Query had errors:${response.partialError.message} with code ${response.partialError.code}` + ); } else { - if (response.tables == null) { - console.log(`No results for query`); - } else { - console.log( - `Printing results from query '${queriesBatch[i].query}' for '${queriesBatch[i].timespan}'` - ); - - for (const table of response.tables) { - const columnHeaderString = table.columnDescriptors - .map((column) => `${column.name}(${column.type}) `) - .join("| "); - console.log(columnHeaderString); - - for (const row of table.rows) { - const columnValuesString = row.map((columnValue) => `'${columnValue}' `).join("| "); - console.log(columnValuesString); - } - } - } + console.log(`Printing errors from query '${queriesBatch[i].query}'`); + console.log(` Query had errors:${response.message} with code ${response.code}`); } // next query i++; } } +async function processTables(tablesFromResult) { + for (const table of tablesFromResult) { + const columnHeaderString = table.columnDescriptors + .map((column) => `${column.name}(${column.type}) `) + .join("| "); + console.log("| " + columnHeaderString); + + for (const row of table.rows) { + const columnValuesString = row.map((columnValue) => `'${columnValue}' `).join("| "); + console.log("| " + columnValuesString); + } + } +} + main().catch((err) => { console.error("The sample encountered an error:", err); process.exit(1); diff --git a/sdk/monitor/monitor-query/samples/v1/javascript/logsQueryMultipleWorkspaces.js b/sdk/monitor/monitor-query/samples/v1/javascript/logsQueryMultipleWorkspaces.js index 0303b2f9bce0..05dcbcbddbd9 100644 --- a/sdk/monitor/monitor-query/samples/v1/javascript/logsQueryMultipleWorkspaces.js +++ b/sdk/monitor/monitor-query/samples/v1/javascript/logsQueryMultipleWorkspaces.js @@ -6,7 +6,7 @@ */ const { DefaultAzureCredential } = require("@azure/identity"); -const { Durations, LogsQueryClient } = require("@azure/monitor-query"); +const { Durations, LogsQueryClient, LogsQueryResultStatus } = require("@azure/monitor-query"); const dotenv = require("dotenv"); dotenv.config(); @@ -35,23 +35,15 @@ async function main() { additionalWorkspaces: [additionalWorkspaces1, additionalWorkspaces2] }; - const result = await logsQueryClient.query( + const result = await logsQueryClient.queryWorkspace( monitorWorkspaceId, kustoQuery, // The timespan is an ISO8601 formatted time (or interval). Some common aliases // are available (like durationOf1Day, durationOf1Hour, durationOf48Hours, etc..) but any properly formatted ISO8601 // value is valid. - { duration: Durations.OneHour }, + { duration: Durations.oneHour }, queryLogsOptions ); - - const tablesFromResult = result.tables; - - if (tablesFromResult == null) { - console.log(`No results for query '${kustoQuery}'`); - return; - } - const executionTime = result.statistics && result.statistics.query && result.statistics.query.executionTime; @@ -61,6 +53,23 @@ async function main() { }` ); + if (result.status === LogsQueryResultStatus.Success) { + const tablesFromResult = result.tables; + if (tablesFromResult == null) { + console.log(`No results for query '${kustoQuery}'`); + return; + } + processTables(tablesFromResult); + } else { + console.log(`Error processing the query '${kustoQuery}' - ${result.partialError}`); + if (result.partialTables.length > 0) { + console.log(`This query has also returned partial data in the following table(s) - `); + processTables(result.partialTables); + } + } +} + +async function processTables(tablesFromResult) { for (const table of tablesFromResult) { const columnHeaderString = table.columnDescriptors .map((column) => `${column.name}(${column.type}) `) diff --git a/sdk/monitor/monitor-query/samples/v1/javascript/metricsQuery.js b/sdk/monitor/monitor-query/samples/v1/javascript/metricsQuery.js index ecf7ea903151..df1bf90de4e5 100644 --- a/sdk/monitor/monitor-query/samples/v1/javascript/metricsQuery.js +++ b/sdk/monitor/monitor-query/samples/v1/javascript/metricsQuery.js @@ -32,9 +32,9 @@ async function main() { if (metricNames.length > 0) { console.log(`Picking an example list of metrics to query: ${metricNames}`); - const metricsResponse = await metricsQueryClient.query(metricsResourceId, metricNames, { + const metricsResponse = await metricsQueryClient.queryResource(metricsResourceId, metricNames, { granularity: "PT1M", - timespan: { duration: Durations.FiveMinutes } + timespan: { duration: Durations.fiveMinutes } }); console.log( diff --git a/sdk/monitor/monitor-query/samples/v1/javascript/package.json b/sdk/monitor/monitor-query/samples/v1/javascript/package.json index f39d84df49d7..1746269369ca 100644 --- a/sdk/monitor/monitor-query/samples/v1/javascript/package.json +++ b/sdk/monitor/monitor-query/samples/v1/javascript/package.json @@ -25,6 +25,6 @@ "dependencies": { "@azure/monitor-query": "next", "dotenv": "latest", - "@azure/identity": "^1.1.0" + "@azure/identity": "2.0.0-beta.7" } } diff --git a/sdk/monitor/monitor-query/samples/v1/typescript/package.json b/sdk/monitor/monitor-query/samples/v1/typescript/package.json index 0689a42f9ed6..ad0ef023dbd4 100644 --- a/sdk/monitor/monitor-query/samples/v1/typescript/package.json +++ b/sdk/monitor/monitor-query/samples/v1/typescript/package.json @@ -29,7 +29,7 @@ "dependencies": { "@azure/monitor-query": "next", "dotenv": "latest", - "@azure/identity": "^1.1.0" + "@azure/identity": "2.0.0-beta.7" }, "devDependencies": { "typescript": "~4.2.0", diff --git a/sdk/monitor/monitor-query/samples/v1/typescript/src/logsQuery.ts b/sdk/monitor/monitor-query/samples/v1/typescript/src/logsQuery.ts index dff6a944da6a..611dcb0ae4a5 100644 --- a/sdk/monitor/monitor-query/samples/v1/typescript/src/logsQuery.ts +++ b/sdk/monitor/monitor-query/samples/v1/typescript/src/logsQuery.ts @@ -6,7 +6,13 @@ */ import { DefaultAzureCredential } from "@azure/identity"; -import { Durations, LogsQueryClient, LogsTable, LogsQueryOptions } from "@azure/monitor-query"; +import { + Durations, + LogsQueryClient, + LogsTable, + LogsQueryOptions, + LogsQueryResultStatus +} from "@azure/monitor-query"; import * as dotenv from "dotenv"; dotenv.config(); @@ -32,23 +38,15 @@ export async function main() { includeQueryStatistics: true }; - const result = await logsQueryClient.query( + const result = await logsQueryClient.queryWorkspace( monitorWorkspaceId, kustoQuery, // The timespan is an ISO8601 formatted time (or interval). Some common aliases // are available (like OneDay, OneHour, FoutyEightHours, etc..) but any properly formatted ISO8601 // value is valid. - { duration: Durations.OneHour }, + { duration: Durations.oneHour }, queryLogsOptions ); - - const tablesFromResult: LogsTable[] | undefined = result.tables; - - if (tablesFromResult == null) { - console.log(`No results for query '${kustoQuery}'`); - return; - } - const executionTime = result.statistics && result.statistics.query && (result.statistics.query as any).executionTime; @@ -58,6 +56,25 @@ export async function main() { }` ); + if (result.status === LogsQueryResultStatus.Success) { + const tablesFromResult: LogsTable[] = result.tables; + + if (tablesFromResult.length === 0) { + console.log(`No results for query '${kustoQuery}'`); + return; + } + console.log(`This query has returned table(s) - `); + processTables(tablesFromResult); + } else { + console.log(`Error processing the query '${kustoQuery}' - ${result.partialError}`); + if (result.partialTables.length > 0) { + console.log(`This query has also returned partial data in the following table(s) - `); + processTables(result.partialTables); + } + } +} + +async function processTables(tablesFromResult: LogsTable[]) { for (const table of tablesFromResult) { const columnHeaderString = table.columnDescriptors .map((column) => `${column.name}(${column.type}) `) diff --git a/sdk/monitor/monitor-query/samples/v1/typescript/src/logsQueryBatch.ts b/sdk/monitor/monitor-query/samples/v1/typescript/src/logsQueryBatch.ts index bf3dcbab31cb..f14f05325c25 100644 --- a/sdk/monitor/monitor-query/samples/v1/typescript/src/logsQueryBatch.ts +++ b/sdk/monitor/monitor-query/samples/v1/typescript/src/logsQueryBatch.ts @@ -6,7 +6,7 @@ */ import { DefaultAzureCredential } from "@azure/identity"; -import { LogsQueryClient } from "@azure/monitor-query"; +import { LogsQueryClient, LogsQueryResultStatus, LogsTable } from "@azure/monitor-query"; import * as dotenv from "dotenv"; dotenv.config(); @@ -47,43 +47,49 @@ export async function main() { ]; const result = await logsQueryClient.queryBatch(queriesBatch); - - if (result.results == null) { + if (result === null) { throw new Error("No response for query"); } let i = 0; - for (const response of result.results) { + for (const response of result) { console.log(`Results for query with query: ${queriesBatch[i]}`); - - if (response.error) { - console.log(` Query had errors:`, response.error); + if (response.status === LogsQueryResultStatus.Success) { + console.log( + `Printing results from query '${queriesBatch[i].query}' for '${queriesBatch[i].timespan}'` + ); + processTables(response.tables); + } else if (response.status === LogsQueryResultStatus.PartialFailure) { + console.log( + `Printing partial results from query '${queriesBatch[i].query}' for '${queriesBatch[i].timespan}'` + ); + processTables(response.partialTables); + console.log( + ` Query had errors:${response.partialError.message} with code ${response.partialError.code}` + ); } else { - if (response.tables == null) { - console.log(`No results for query`); - } else { - console.log( - `Printing results from query '${queriesBatch[i].query}' for '${queriesBatch[i].timespan}'` - ); - - for (const table of response.tables) { - const columnHeaderString = table.columnDescriptors - .map((column) => `${column.name}(${column.type}) `) - .join("| "); - console.log(columnHeaderString); - - for (const row of table.rows) { - const columnValuesString = row.map((columnValue) => `'${columnValue}' `).join("| "); - console.log(columnValuesString); - } - } - } + console.log(`Printing errors from query '${queriesBatch[i].query}'`); + console.log(` Query had errors:${response.message} with code ${response.code}`); } // next query i++; } } +async function processTables(tablesFromResult: LogsTable[]) { + for (const table of tablesFromResult) { + const columnHeaderString = table.columnDescriptors + .map((column) => `${column.name}(${column.type}) `) + .join("| "); + console.log("| " + columnHeaderString); + + for (const row of table.rows) { + const columnValuesString = row.map((columnValue) => `'${columnValue}' `).join("| "); + console.log("| " + columnValuesString); + } + } +} + main().catch((err) => { console.error("The sample encountered an error:", err); process.exit(1); diff --git a/sdk/monitor/monitor-query/samples/v1/typescript/src/logsQueryMultipleWorkspaces.ts b/sdk/monitor/monitor-query/samples/v1/typescript/src/logsQueryMultipleWorkspaces.ts index 73102d386d62..d531c41de961 100644 --- a/sdk/monitor/monitor-query/samples/v1/typescript/src/logsQueryMultipleWorkspaces.ts +++ b/sdk/monitor/monitor-query/samples/v1/typescript/src/logsQueryMultipleWorkspaces.ts @@ -6,7 +6,13 @@ */ import { DefaultAzureCredential } from "@azure/identity"; -import { Durations, LogsQueryClient, LogsTable, LogsQueryOptions } from "@azure/monitor-query"; +import { + Durations, + LogsQueryClient, + LogsTable, + LogsQueryOptions, + LogsQueryResultStatus +} from "@azure/monitor-query"; import * as dotenv from "dotenv"; dotenv.config(); @@ -35,23 +41,15 @@ export async function main() { additionalWorkspaces: [additionalWorkspaces1, additionalWorkspaces2] }; - const result = await logsQueryClient.query( + const result = await logsQueryClient.queryWorkspace( monitorWorkspaceId, kustoQuery, // The timespan is an ISO8601 formatted time (or interval). Some common aliases // are available (like durationOf1Day, durationOf1Hour, durationOf48Hours, etc..) but any properly formatted ISO8601 // value is valid. - { duration: Durations.OneHour }, + { duration: Durations.oneHour }, queryLogsOptions ); - - const tablesFromResult: LogsTable[] | undefined = result.tables; - - if (tablesFromResult == null) { - console.log(`No results for query '${kustoQuery}'`); - return; - } - const executionTime = result.statistics && result.statistics.query && (result.statistics.query as any).executionTime; @@ -61,6 +59,23 @@ export async function main() { }` ); + if (result.status === LogsQueryResultStatus.Success) { + const tablesFromResult: LogsTable[] = result.tables; + if (tablesFromResult == null) { + console.log(`No results for query '${kustoQuery}'`); + return; + } + processTables(tablesFromResult); + } else { + console.log(`Error processing the query '${kustoQuery}' - ${result.partialError}`); + if (result.partialTables.length > 0) { + console.log(`This query has also returned partial data in the following table(s) - `); + processTables(result.partialTables); + } + } +} + +async function processTables(tablesFromResult: LogsTable[]) { for (const table of tablesFromResult) { const columnHeaderString = table.columnDescriptors .map((column) => `${column.name}(${column.type}) `) diff --git a/sdk/monitor/monitor-query/samples/v1/typescript/src/metricsQuery.ts b/sdk/monitor/monitor-query/samples/v1/typescript/src/metricsQuery.ts index 55c367036fb4..297aa14d19f7 100644 --- a/sdk/monitor/monitor-query/samples/v1/typescript/src/metricsQuery.ts +++ b/sdk/monitor/monitor-query/samples/v1/typescript/src/metricsQuery.ts @@ -32,9 +32,9 @@ export async function main() { if (metricNames.length > 0) { console.log(`Picking an example list of metrics to query: ${metricNames}`); - const metricsResponse = await metricsQueryClient.query(metricsResourceId, metricNames, { + const metricsResponse = await metricsQueryClient.queryResource(metricsResourceId, metricNames, { granularity: "PT1M", - timespan: { duration: Durations.FiveMinutes } + timespan: { duration: Durations.fiveMinutes } }); console.log( diff --git a/sdk/monitor/monitor-query/src/index.ts b/sdk/monitor/monitor-query/src/index.ts index 9e29250b8108..6b557f3e6da4 100644 --- a/sdk/monitor/monitor-query/src/index.ts +++ b/sdk/monitor/monitor-query/src/index.ts @@ -11,14 +11,13 @@ export { LogsQueryBatchResult, LogsQueryOptions, LogsQueryResult, - // TODO: design issues around this still pending. - // QueryStatistics, + LogsQueryError, + LogsQueryPartialResult, + LogsQuerySuccessfulResult, LogsTable, LogsColumn, LogsQueryResultStatus, - LogsErrorInfo, - BatchError, - AggregateBatchError + LogsErrorInfo } from "./models/publicLogsModels"; export { MetricsQueryClient, diff --git a/sdk/monitor/monitor-query/src/internal/modelConverters.ts b/sdk/monitor/monitor-query/src/internal/modelConverters.ts index 17c6f81948d9..1db9685a5248 100644 --- a/sdk/monitor/monitor-query/src/internal/modelConverters.ts +++ b/sdk/monitor/monitor-query/src/internal/modelConverters.ts @@ -48,7 +48,13 @@ import { convertIntervalToTimeIntervalObject, convertTimespanToInterval } from "../timespanConversion"; -import { LogsErrorInfo, LogsQueryResult } from "../models/publicLogsModels"; +import { + LogsErrorInfo, + LogsQueryError, + LogsQueryPartialResult, + LogsQueryResultStatus, + LogsQuerySuccessfulResult +} from "../models/publicLogsModels"; /** * @internal @@ -115,36 +121,22 @@ export function convertResponseForQueryBatch( */ const responseList = generatedResponse.responses || []; - const newResponse: LogsQueryBatchResult = { - results: responseList - ?.sort((a, b) => { - let left = 0; - if (a.id != null) { - left = parseInt(a.id, 10); - } + const newResponse: LogsQueryBatchResult = responseList + ?.sort((a, b) => { + let left = 0; + if (a.id != null) { + left = parseInt(a.id, 10); + } - let right = 0; - if (b.id != null) { - right = parseInt(b.id, 10); - } + let right = 0; + if (b.id != null) { + right = parseInt(b.id, 10); + } + + return left - right; + }) + ?.map((response: GeneratedBatchQueryResponse) => convertBatchQueryResponseHelper(response)); - return left - right; - }) - ?.map((response: GeneratedBatchQueryResponse) => convertBatchQueryResponseHelper(response)) - }; - // compute status for failed or succeed or partial results - - const resultsCount = newResponse.results?.length ?? 0; - for (let i = 0; i < resultsCount; i++) { - const result = newResponse.results[i]; - if (result.error && result.tables) { - result.status = "PartialFailure"; - } else if (result.tables) { - result.status = "Success"; - } else { - result.status = "Failure"; - } - } (newResponse as any)["__fixApplied"] = fixApplied; return newResponse; } @@ -410,43 +402,63 @@ export function convertGeneratedTable(table: GeneratedTable): LogsTable { */ export function convertBatchQueryResponseHelper( response: GeneratedBatchQueryResponse -): Partial { +): LogsQueryPartialResult | LogsQuerySuccessfulResult | LogsQueryError { try { const parsedResponseBody: GeneratedBatchQueryResults = JSON.parse( response.body as any ) as GeneratedBatchQueryResults; - return { - visualization: parsedResponseBody.render, - status: "Success", // Assume success until shown otherwise. - statistics: parsedResponseBody.statistics, - error: mapError(parsedResponseBody.error), // ? { ...parsedResponseBody.error, name: "Error" } : undefined, - tables: parsedResponseBody.tables?.map((table: GeneratedTable) => - convertGeneratedTable(table) - ) - }; + + return computeResultType(parsedResponseBody); } catch (e) { - return { - visualization: response.body?.render, - status: "Success", // Assume success until shown otherwise. - statistics: response.body?.statistics, - error: mapError(response.body?.error), - tables: response.body?.tables?.map((table: GeneratedTable) => convertGeneratedTable(table)) - }; + if (response.body) return computeResultType(response.body); + else return {} as LogsQuerySuccessfulResult; } } -export function mapError(error?: GeneratedErrorInfo): LogsErrorInfo | undefined { - if (error) { - let innermostError = error; - while (innermostError.innerError) { - innermostError = innermostError.innerError; +export function computeResultType( + generatedResponse: GeneratedBatchQueryResults +): LogsQueryPartialResult | LogsQuerySuccessfulResult | LogsQueryError { + if (!generatedResponse.error) { + const result: LogsQuerySuccessfulResult = { + visualization: generatedResponse.render, + status: LogsQueryResultStatus.Success, + statistics: generatedResponse.statistics, + tables: + generatedResponse.tables?.map((table: GeneratedTable) => convertGeneratedTable(table)) || [] + }; + return result; + } else { + if (generatedResponse.tables) { + const result: LogsQueryPartialResult = { + visualization: generatedResponse.render, + status: LogsQueryResultStatus.PartialFailure, + statistics: generatedResponse.statistics, + partialTables: generatedResponse.tables?.map((table: GeneratedTable) => + convertGeneratedTable(table) + ), + partialError: mapError(generatedResponse.error) + }; + return result; + } else { + const errorInfo: LogsErrorInfo = mapError(generatedResponse.error); + const result: LogsQueryError = { + status: LogsQueryResultStatus.Failure, + ...errorInfo + }; + return result; } + } +} - return { - name: "Error", - code: error.code, - message: `${error.message}. ${innermostError.message}` - }; +export function mapError(error: GeneratedErrorInfo): LogsErrorInfo { + let innermostError = error; + while (innermostError.innerError) { + innermostError = innermostError.innerError; } - return undefined; + + return { + name: "Error", + code: error.code, + message: `${error.message}. ${innermostError.message}` + }; } diff --git a/sdk/monitor/monitor-query/src/logsQueryClient.ts b/sdk/monitor/monitor-query/src/logsQueryClient.ts index f8ba5f34fa2d..05218888db02 100644 --- a/sdk/monitor/monitor-query/src/logsQueryClient.ts +++ b/sdk/monitor/monitor-query/src/logsQueryClient.ts @@ -10,9 +10,9 @@ import { LogsQueryBatchResult, LogsQueryOptions, LogsQueryResult, - AggregateBatchError, - BatchError, - LogsErrorInfo + LogsQueryResultStatus, + LogsQuerySuccessfulResult, + LogsQueryPartialResult } from "./models/publicLogsModels"; import { @@ -25,6 +25,7 @@ import { formatPreferHeader } from "./internal/util"; import { CommonClientOptions, FullOperationResponse, OperationOptions } from "@azure/core-client"; import { QueryTimeInterval } from "./models/timeInterval"; import { convertTimespanToInterval } from "./timespanConversion"; +import { SDK_VERSION } from "./constants"; const defaultMonitorScope = "https://api.loganalytics.io/.default"; @@ -63,12 +64,20 @@ export class LogsQueryClient { const credentialOptions = { credentialScopes: options?.audience }; + const packageDetails = `azsdk-js-monitor-query/${SDK_VERSION}`; + const userAgentPrefix = + options?.userAgentOptions && options?.userAgentOptions.userAgentPrefix + ? `${options?.userAgentOptions.userAgentPrefix} ${packageDetails}` + : `${packageDetails}`; this._logAnalytics = new AzureLogAnalytics({ ...options, $host: options?.endpoint, endpoint: options?.endpoint, credentialScopes: credentialOptions?.credentialScopes ?? defaultMonitorScope, - credential: tokenCredential + credential: tokenCredential, + userAgentOptions: { + userAgentPrefix + } }); } @@ -116,28 +125,32 @@ export class LogsQueryClient { const parsedBody = JSON.parse(rawResponse.bodyAsText!); flatResponse.tables = parsedBody.tables; - const result: LogsQueryResult = { + + const res = { tables: flatResponse.tables.map(convertGeneratedTable), statistics: flatResponse.statistics, - visualization: flatResponse.render, - error: mapError(flatResponse.error), - status: "Success" // Assume success until shown otherwise. + visualization: flatResponse.render }; - if (!result.error) { + + if (!flatResponse.error) { // if there is no error field, it is success - result.status = "Success"; + const result: LogsQuerySuccessfulResult = { + tables: res.tables, + statistics: res.statistics, + visualization: res.visualization, + status: LogsQueryResultStatus.Success + }; + return result; } else { - // result.tables is always present in single query response, even is there is error - if (result.tables.length === 0) { - result.status = "Failure"; - } else { - result.status = "PartialFailure"; - } + const result: LogsQueryPartialResult = { + partialTables: res.tables, + status: LogsQueryResultStatus.PartialFailure, + partialError: mapError(flatResponse.error), + statistics: res.statistics, + visualization: res.visualization + }; + return result; } - if (options?.throwOnAnyFailure && result.status !== "Success") { - throw new BatchError(result.error as LogsErrorInfo); - } - return result; } /** @@ -156,14 +169,6 @@ export class LogsQueryClient { options || {} ); const result: LogsQueryBatchResult = convertResponseForQueryBatch(flatResponse, rawResponse); - - if (options?.throwOnAnyFailure && result.results.some((it) => it.status !== "Success")) { - const errorResults = result.results - .filter((it) => it.status !== "Success") - .map((x) => x.error); - const batchErrorList = errorResults.map((x) => new BatchError(x as LogsErrorInfo)); - throw new AggregateBatchError(batchErrorList); - } return result; } } diff --git a/sdk/monitor/monitor-query/src/metricsQueryClient.ts b/sdk/monitor/monitor-query/src/metricsQueryClient.ts index b5e6fc1a78b0..bf2077656b40 100644 --- a/sdk/monitor/monitor-query/src/metricsQueryClient.ts +++ b/sdk/monitor/monitor-query/src/metricsQueryClient.ts @@ -32,7 +32,7 @@ import { convertResponseForMetrics, convertResponseForMetricsDefinitions } from "./internal/modelConverters"; - +import { SDK_VERSION } from "./constants"; const defaultMetricsScope = "https://management.azure.com/.default"; /** @@ -66,13 +66,20 @@ export class MetricsQueryClient { const credentialOptions = { credentialScopes: options?.audience }; - + const packageDetails = `azsdk-js-monitor-query/${SDK_VERSION}`; + const userAgentPrefix = + options?.userAgentOptions && options?.userAgentOptions.userAgentPrefix + ? `${options?.userAgentOptions.userAgentPrefix} ${packageDetails}` + : `${packageDetails}`; const serviceClientOptions = { ...options, $host: options?.endpoint, endpoint: options?.endpoint, credentialScopes: credentialOptions?.credentialScopes ?? defaultMetricsScope, - credential: tokenCredential + credential: tokenCredential, + userAgentOptions: { + userAgentPrefix + } }; this._metricsClient = new GeneratedMetricsClient( diff --git a/sdk/monitor/monitor-query/src/models/publicLogsModels.ts b/sdk/monitor/monitor-query/src/models/publicLogsModels.ts index cc01e1392873..cf90b73c3248 100644 --- a/sdk/monitor/monitor-query/src/models/publicLogsModels.ts +++ b/sdk/monitor/monitor-query/src/models/publicLogsModels.ts @@ -35,10 +35,6 @@ export interface LogsQueryOptions extends OperationOptions { * Results will also include visualization information, in JSON format. */ includeVisualization?: boolean; - /** - * If true, will cause this operation to throw if query operation did not succeed. - */ - throwOnAnyFailure?: boolean; } /** @@ -58,56 +54,62 @@ export interface LogsErrorInfo extends Error { code: string; } -/** Batch Error class for type of each error item in the {@link AggregateBatchError} list returned in logs query batch API */ -export class BatchError extends Error implements LogsErrorInfo { - /** A machine readable error code. */ - code: string; - - constructor(errorInfo: LogsErrorInfo) { - super(); - this.name = "Error"; - this.code = errorInfo.code; - this.message = errorInfo.message; - } -} -/** AggregateBatchError type for errors returned in logs query batch API*/ -export class AggregateBatchError extends Error { - /** Represents list of errors if thrown for the queries executed in the queryBatch operation */ - errors: BatchError[]; - constructor(errors: LogsErrorInfo[]) { - super(); - this.errors = errors.map((x) => new BatchError(x)); - } -} /** * Tables and statistic results from a logs query. */ -export interface LogsQueryResult { +export type LogsQueryResult = LogsQuerySuccessfulResult | LogsQueryPartialResult; + +/** Indicates if a query succeeded or failed or partially failed. + * Represented by PartialFailure" | "Success" | "Failure". + */ +export enum LogsQueryResultStatus { + /** Represents Partial Failure scenario where partial data and errors of type {@link LogsQueryPartialResult} is returned for query */ + PartialFailure = "PartialFailure", + /** Represents Failure scenario where only error of type {@link LogsQueryError} is returned for query */ + Failure = "Failure", + /** Represents Success scenario where all data of type {@link LogsQuerySuccessfulResult} is returned for query */ + Success = "Success" +} + +/** Result type for Success Scenario for logs query workspace and query batch operations. */ +export interface LogsQuerySuccessfulResult { /** Populated results from the query. */ tables: LogsTable[]; + /** Indicates that the query succeeded */ + status: LogsQueryResultStatus.Success; + /** Statistics represented in JSON format. */ + statistics?: Record; + /** Visualization data in JSON format. */ + visualization?: Record; +} + +/** Result type for Partial Failure Scenario for logs queryWorkspace and queryBatch operations. */ +export interface LogsQueryPartialResult { + /** Populated results from the query. */ + partialTables: LogsTable[]; /** error information for partial errors or failed queries */ - error?: LogsErrorInfo; - /** Indicates if a query succeeded or failed or partially failed. - * Represented by "Partial" | "Success" | "Failed". - * For partially failed queries, users can find data in "tables" attribute - * and error information in "error" attribute */ - status: LogsQueryResultStatus; + partialError: LogsErrorInfo; + /** Indicates that the query partially failed.*/ + status: LogsQueryResultStatus.PartialFailure; /** Statistics represented in JSON format. */ statistics?: Record; /** Visualization data in JSON format. */ visualization?: Record; } -/** Configurable HTTP request settings and `throwOnAnyFailure` setting for the Logs query batch operation. */ -export interface LogsQueryBatchOptions extends OperationOptions { - /** - * If true, will cause the batch operation to throw if any query operations in the batch did not succeed. - */ - throwOnAnyFailure?: boolean; +/** Result type for Failure Scenario representing error for logs queryWorkspace and queryBatch operations. */ +export interface LogsQueryError extends Error { + /** A machine readable error code. */ + code: string; + /** Indicates that the query failed */ + status: LogsQueryResultStatus.Failure; } -/** The Analytics query. Learn more about the [Analytics query syntax](https://azure.microsoft.com/documentation/articles/app-insights-analytics-reference/) */ +/** Configurable HTTP request settings for the Logs query batch operation. */ +export interface LogsQueryBatchOptions extends OperationOptions {} + +/** The Kusto query. For more information about Kusto, see [Kusto query overview](https://docs.microsoft.com/azure/data-explorer/kusto/query). */ // NOTE: 'id' is added automatically by our LogsQueryClient. export interface QueryBatch { /** The workspace for this query. */ @@ -145,31 +147,12 @@ export interface QueryBatch { includeVisualization?: boolean; } -/** Results for a batch query. */ -export interface LogsQueryBatchResult { - /** An array of responses corresponding to each individual request in a batch. */ - results: { - /** Populated results from the query */ - tables?: LogsTable[]; - /** error information for partial errors or failed queries */ - error?: LogsErrorInfo; - /** Indicates if a query succeeded or failed or partially failed. - * Represented by "Partial" | "Success" | "Failed". - * For partially failed queries, users can find data in "tables" attribute - * and error information in "error" attribute */ - status?: LogsQueryResultStatus; - /** Statistics represented in JSON format. */ - statistics?: Record; - /** Visualization data in JSON format. */ - visualization?: Record; - }[]; -} - -/** Indicates if a query succeeded or failed or partially failed. - * Represented by "Partial" | "Success" | "Failed". - * For partially failed queries, users can find data in "tables" attribute - * and error information in "error" attribute */ -export type LogsQueryResultStatus = "PartialFailure" | "Success" | "Failure"; +/** Results for a batch query. Each result in the array is either of type + * {@link LogsQueryError} or {@link LogsQueryPartialResult} or {@link LogsQuerySuccessfulResult} + */ +export type LogsQueryBatchResult = Array< + LogsQueryPartialResult | LogsQuerySuccessfulResult | LogsQueryError +>; /** Contains the columns and rows for one table in a query response. */ export interface LogsTable { diff --git a/sdk/monitor/monitor-query/test/public/logsQueryClient.spec.ts b/sdk/monitor/monitor-query/test/public/logsQueryClient.spec.ts index 6fe8c1eba7d6..959183f20fc2 100644 --- a/sdk/monitor/monitor-query/test/public/logsQueryClient.spec.ts +++ b/sdk/monitor/monitor-query/test/public/logsQueryClient.spec.ts @@ -6,7 +6,7 @@ import { Context } from "mocha"; import { env } from "process"; import { createRecorderAndLogsClient, RecorderAndLogsClient } from "./shared/testShared"; import { Recorder } from "@azure-tools/test-recorder"; -import { Durations, LogsQueryClient, QueryBatch } from "../../src"; +import { Durations, LogsQueryClient, LogsQueryResultStatus, QueryBatch } from "../../src"; // import { runWithTelemetry } from "../setupOpenTelemetry"; import { assertQueryTable, getMonitorWorkspaceId, loggerForTest } from "./shared/testShared"; @@ -135,71 +135,72 @@ describe("LogsQueryClient live tests", function() { const results = await logsClient.queryWorkspace(monitorWorkspaceId, constantsQuery, { duration: Durations.fiveMinutes }); + if (results.status === LogsQueryResultStatus.Success) { + const table = results.tables[0]; + + // check the column types all match what we expect. + assert.deepEqual( + [ + { + name: "stringcolumn", + type: "string" + }, + { + name: "boolcolumn", + type: "bool" + }, + { + name: "datecolumn", + type: "datetime" + }, + { + name: "intcolumn", + type: "int" + }, + { + name: "longcolumn", + type: "long" + }, + { + name: "realcolumn", + type: "real" + }, + { + name: "dynamiccolumn", + type: "dynamic" + } + ], + table.columnDescriptors + ); - const table = results.tables[0]; - - // check the column types all match what we expect. - assert.deepEqual( - [ - { - name: "stringcolumn", - type: "string" - }, - { - name: "boolcolumn", - type: "bool" - }, - { - name: "datecolumn", - type: "datetime" - }, - { - name: "intcolumn", - type: "int" - }, - { - name: "longcolumn", - type: "long" - }, - { - name: "realcolumn", - type: "real" - }, - { - name: "dynamiccolumn", - type: "dynamic" - } - ], - table.columnDescriptors - ); + table.rows.map((rowValues) => { + const [ + stringColumn, + boolColumn, + dateColumn, + intColumn, + longColumn, + realColumn, + dynamicColumn, + ...rest + ] = rowValues; + + assert.strictEqual(stringColumn, "hello"); + assert.equal((dateColumn as Date).valueOf(), new Date("2000-01-02 03:04:05Z").valueOf()); + assert.strictEqual(boolColumn, true); + + // all the number types (real, int, long) are all represented using `number` + assert.strictEqual(intColumn, 100); + assert.strictEqual(longColumn, 101); + assert.strictEqual(realColumn, 102.1); + + assert.deepEqual(dynamicColumn, { + hello: "world" + }); - table.rows.map((rowValues) => { - const [ - stringColumn, - boolColumn, - dateColumn, - intColumn, - longColumn, - realColumn, - dynamicColumn, - ...rest - ] = rowValues; - - assert.strictEqual(stringColumn, "hello"); - assert.equal((dateColumn as Date).valueOf(), new Date("2000-01-02 03:04:05Z").valueOf()); - assert.strictEqual(boolColumn, true); - - // all the number types (real, int, long) are all represented using `number` - assert.strictEqual(intColumn, 100); - assert.strictEqual(longColumn, 101); - assert.strictEqual(realColumn, 102.1); - - assert.deepEqual(dynamicColumn, { - hello: "world" + assert.isEmpty(rest); }); - - assert.isEmpty(rest); - }); + } }); it("queryLogsBatch with types", async () => { @@ -225,82 +226,79 @@ describe("LogsQueryClient live tests", function() { if ((result as any)["__fixApplied"]) { console.log(`TODO: Fix was required to pass`); } - - const table = result.results?.[0].tables?.[0]; - console.log(JSON.stringify(result.results?.[0].tables)); - - if (table == null) { - throw new Error(JSON.stringify(result.results?.[0].error)); - } - - if (result.results?.[0].status === "PartialFailure") { - throw new Error( - JSON.stringify({ ...result.results?.[0].error, ...result.results?.[0].tables }) + if (result[0].status === LogsQueryResultStatus.Success) { + const table = result[0].tables[0]; + console.log(JSON.stringify(result[0].tables)); + + // check the column types all match what we expect. + assert.deepEqual( + [ + { + name: "stringcolumn", + type: "string" + }, + { + name: "boolcolumn", + type: "bool" + }, + { + name: "datecolumn", + type: "datetime" + }, + { + name: "intcolumn", + type: "int" + }, + { + name: "longcolumn", + type: "long" + }, + { + name: "realcolumn", + type: "real" + }, + { + name: "dynamiccolumn", + type: "dynamic" + } + ], + table.columnDescriptors ); - } - // check the column types all match what we expect. - assert.deepEqual( - [ - { - name: "stringcolumn", - type: "string" - }, - { - name: "boolcolumn", - type: "bool" - }, - { - name: "datecolumn", - type: "datetime" - }, - { - name: "intcolumn", - type: "int" - }, - { - name: "longcolumn", - type: "long" - }, - { - name: "realcolumn", - type: "real" - }, - { - name: "dynamiccolumn", - type: "dynamic" - } - ], - table.columnDescriptors - ); + table.rows.map((rowValues) => { + const [ + stringColumn, + boolColumn, + dateColumn, + intColumn, + longColumn, + realColumn, + dynamicColumn, + ...rest + ] = rowValues; + + assert.strictEqual(stringColumn, "hello"); + assert.equal((dateColumn as Date).valueOf(), new Date("2000-01-02 03:04:05Z").valueOf()); + assert.strictEqual(boolColumn, true); + + // all the number types (real, int, long) are all represented using `number` + assert.strictEqual(intColumn, 100); + assert.strictEqual(longColumn, 101); + assert.strictEqual(realColumn, 102.1); + + assert.deepEqual(dynamicColumn, { + hello: "world" + }); - table.rows.map((rowValues) => { - const [ - stringColumn, - boolColumn, - dateColumn, - intColumn, - longColumn, - realColumn, - dynamicColumn, - ...rest - ] = rowValues; - - assert.strictEqual(stringColumn, "hello"); - assert.equal((dateColumn as Date).valueOf(), new Date("2000-01-02 03:04:05Z").valueOf()); - assert.strictEqual(boolColumn, true); - - // all the number types (real, int, long) are all represented using `number` - assert.strictEqual(intColumn, 100); - assert.strictEqual(longColumn, 101); - assert.strictEqual(realColumn, 102.1); - - assert.deepEqual(dynamicColumn, { - hello: "world" + assert.isEmpty(rest); }); - - assert.isEmpty(rest); - }); + } + if (result[0].status === LogsQueryResultStatus.PartialFailure) { + throw new Error(JSON.stringify({ ...result[0].partialError, ...result[0].partialTables })); + } + if (result[0].status === LogsQueryResultStatus.Failure) { + throw new Error(JSON.stringify({ ...result[0] })); + } }); describe.skip("Ingested data tests (can be slow due to loading times)", () => { @@ -347,16 +345,17 @@ describe("LogsQueryClient live tests", function() { // TODO: the actual types aren't being deserialized (everything is coming back as 'string') // this is incorrect, it'll be updated. - - assertQueryTable( - singleQueryLogsResult.tables?.[0], - { - name: "PrimaryResult", - columns: ["Kind", "Name", "Target", "TestRunId"], - rows: [["now", "testSpan", "testSpan", testRunId.toString()]] - }, - "Query for the last day" - ); + if (singleQueryLogsResult.status === LogsQueryResultStatus.Success) { + assertQueryTable( + singleQueryLogsResult.tables?.[0], + { + name: "PrimaryResult", + columns: ["Kind", "Name", "Target", "TestRunId"], + rows: [["now", "testSpan", "testSpan", testRunId.toString()]] + }, + "Query for the last day" + ); + } }); it("queryLogsBatch", async () => { @@ -380,26 +379,28 @@ describe("LogsQueryClient live tests", function() { if ((result as any)["__fixApplied"]) { console.log(`TODO: Fix was required to pass`); } - - assertQueryTable( - result.results?.[0].tables?.[0], - { - name: "PrimaryResult", - columns: ["Kind", "Name", "Target", "TestRunId"], - rows: [["now", "testSpan", "testSpan", testRunId.toString()]] - }, - "Standard results" - ); - - assertQueryTable( - result.results?.[1].tables?.[0], - { - name: "PrimaryResult", - columns: ["Count"], - rows: [["1"]] - }, - "count table" - ); + if (result[0].status === LogsQueryResultStatus.Success) { + assertQueryTable( + result[0].tables?.[0], + { + name: "PrimaryResult", + columns: ["Kind", "Name", "Target", "TestRunId"], + rows: [["now", "testSpan", "testSpan", testRunId.toString()]] + }, + "Standard results" + ); + } + if (result[1].status === LogsQueryResultStatus.Success) { + assertQueryTable( + result[1].tables?.[0], + { + name: "PrimaryResult", + columns: ["Count"], + rows: [["1"]] + }, + "count table" + ); + } }); async function checkLogsHaveBeenIngested(args: { @@ -419,13 +420,24 @@ describe("LogsQueryClient live tests", function() { duration: Durations.twentyFourHours }); - const numRows = result.tables?.[0].rows?.length; - - if (numRows != null && numRows > 0) { - loggerForTest.verbose( - `[Attempt: ${i}/${args.maxTries}] Results came back, done waiting.` - ); - return; + if (result.status === LogsQueryResultStatus.Success) { + const numRows = result.tables?.[0].rows?.length; + + if (numRows != null && numRows > 0) { + loggerForTest.verbose( + `[Attempt: ${i}/${args.maxTries}] Results came back, done waiting.` + ); + return; + } + } else if (result.status === LogsQueryResultStatus.PartialFailure) { + const numRows = result.partialTables?.[0].rows?.length; + + if (numRows != null && numRows > 0) { + loggerForTest.verbose( + `[Attempt: ${i}/${args.maxTries}] Partial Results came back, done waiting.` + ); + return; + } } loggerForTest.verbose(