diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.irequesttypesmap.es.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.irequesttypesmap.es.md deleted file mode 100644 index 9cebff05dc9db3..00000000000000 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.irequesttypesmap.es.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [IRequestTypesMap](./kibana-plugin-plugins-data-server.irequesttypesmap.md) > [es](./kibana-plugin-plugins-data-server.irequesttypesmap.es.md) - -## IRequestTypesMap.es property - -Signature: - -```typescript -[ES_SEARCH_STRATEGY]: IEsSearchRequest; -``` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.irequesttypesmap.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.irequesttypesmap.md deleted file mode 100644 index 3f5e4ba0f77999..00000000000000 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.irequesttypesmap.md +++ /dev/null @@ -1,20 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [IRequestTypesMap](./kibana-plugin-plugins-data-server.irequesttypesmap.md) - -## IRequestTypesMap interface - -The map of search strategy IDs to the corresponding request type definitions. - -Signature: - -```typescript -export interface IRequestTypesMap -``` - -## Properties - -| Property | Type | Description | -| --- | --- | --- | -| [es](./kibana-plugin-plugins-data-server.irequesttypesmap.es.md) | IEsSearchRequest | | - diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.iresponsetypesmap.es.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.iresponsetypesmap.es.md deleted file mode 100644 index 1154fc141d6c7d..00000000000000 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.iresponsetypesmap.es.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [IResponseTypesMap](./kibana-plugin-plugins-data-server.iresponsetypesmap.md) > [es](./kibana-plugin-plugins-data-server.iresponsetypesmap.es.md) - -## IResponseTypesMap.es property - -Signature: - -```typescript -[ES_SEARCH_STRATEGY]: IEsSearchResponse; -``` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.iresponsetypesmap.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.iresponsetypesmap.md deleted file mode 100644 index 629ab4347eda80..00000000000000 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.iresponsetypesmap.md +++ /dev/null @@ -1,20 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [IResponseTypesMap](./kibana-plugin-plugins-data-server.iresponsetypesmap.md) - -## IResponseTypesMap interface - -The map of search strategy IDs to the corresponding response type definitions. - -Signature: - -```typescript -export interface IResponseTypesMap -``` - -## Properties - -| Property | Type | Description | -| --- | --- | --- | -| [es](./kibana-plugin-plugins-data-server.iresponsetypesmap.es.md) | IEsSearchResponse | | - diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearch.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearch.md deleted file mode 100644 index 96991579c17169..00000000000000 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearch.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [ISearch](./kibana-plugin-plugins-data-server.isearch.md) - -## ISearch type - -Signature: - -```typescript -export declare type ISearch = (context: RequestHandlerContext, request: IRequestTypesMap[T], options?: ISearchOptions) => Promise; -``` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchcancel.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchcancel.md deleted file mode 100644 index b5a687d1b19d84..00000000000000 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchcancel.md +++ /dev/null @@ -1,11 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [ISearchCancel](./kibana-plugin-plugins-data-server.isearchcancel.md) - -## ISearchCancel type - -Signature: - -```typescript -export declare type ISearchCancel = (context: RequestHandlerContext, id: string) => Promise; -``` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.md index 49412fc42d3b5f..002ce864a1aa41 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.md @@ -15,4 +15,5 @@ export interface ISearchOptions | Property | Type | Description | | --- | --- | --- | | [signal](./kibana-plugin-plugins-data-server.isearchoptions.signal.md) | AbortSignal | An AbortSignal that allows the caller of search to abort a search request. | +| [strategy](./kibana-plugin-plugins-data-server.isearchoptions.strategy.md) | string | | diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.strategy.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.strategy.md new file mode 100644 index 00000000000000..6df72d023e2c08 --- /dev/null +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchoptions.strategy.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [ISearchOptions](./kibana-plugin-plugins-data-server.isearchoptions.md) > [strategy](./kibana-plugin-plugins-data-server.isearchoptions.strategy.md) + +## ISearchOptions.strategy property + +Signature: + +```typescript +strategy?: string; +``` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchsetup.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchsetup.md index 93e253b2e98a3f..ca8ad8fdc06eac 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchsetup.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchsetup.md @@ -14,5 +14,5 @@ export interface ISearchSetup | Property | Type | Description | | --- | --- | --- | -| [registerSearchStrategy](./kibana-plugin-plugins-data-server.isearchsetup.registersearchstrategy.md) | TRegisterSearchStrategy | Extension point exposed for other plugins to register their own search strategies. | +| [registerSearchStrategy](./kibana-plugin-plugins-data-server.isearchsetup.registersearchstrategy.md) | (name: string, strategy: ISearchStrategy) => void | Extension point exposed for other plugins to register their own search strategies. | diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchsetup.registersearchstrategy.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchsetup.registersearchstrategy.md index c06b8b00806bfe..73c575e7095ed2 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchsetup.registersearchstrategy.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchsetup.registersearchstrategy.md @@ -9,5 +9,5 @@ Extension point exposed for other plugins to register their own search strategie Signature: ```typescript -registerSearchStrategy: TRegisterSearchStrategy; +registerSearchStrategy: (name: string, strategy: ISearchStrategy) => void; ``` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.getsearchstrategy.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.getsearchstrategy.md index 0ba4bf578d6cc9..970b2811a574b0 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.getsearchstrategy.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.getsearchstrategy.md @@ -9,5 +9,5 @@ Get other registered search strategies. For example, if a new strategy needs to Signature: ```typescript -getSearchStrategy: TGetSearchStrategy; +getSearchStrategy: (name: string) => ISearchStrategy; ``` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.md index abe72396f61e18..308ce3cb568bc7 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.md @@ -14,5 +14,6 @@ export interface ISearchStart | Property | Type | Description | | --- | --- | --- | -| [getSearchStrategy](./kibana-plugin-plugins-data-server.isearchstart.getsearchstrategy.md) | TGetSearchStrategy | Get other registered search strategies. For example, if a new strategy needs to use the already-registered ES search strategy, it can use this function to accomplish that. | +| [getSearchStrategy](./kibana-plugin-plugins-data-server.isearchstart.getsearchstrategy.md) | (name: string) => ISearchStrategy | Get other registered search strategies. For example, if a new strategy needs to use the already-registered ES search strategy, it can use this function to accomplish that. | +| [search](./kibana-plugin-plugins-data-server.isearchstart.search.md) | (context: RequestHandlerContext, request: IKibanaSearchRequest, options: ISearchOptions) => Promise<IKibanaSearchResponse> | | diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.search.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.search.md new file mode 100644 index 00000000000000..1c2ae916995591 --- /dev/null +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstart.search.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [ISearchStart](./kibana-plugin-plugins-data-server.isearchstart.md) > [search](./kibana-plugin-plugins-data-server.isearchstart.search.md) + +## ISearchStart.search property + +Signature: + +```typescript +search: (context: RequestHandlerContext, request: IKibanaSearchRequest, options: ISearchOptions) => Promise; +``` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstrategy.cancel.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstrategy.cancel.md index c1e0c3d9f23300..34903697090ead 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstrategy.cancel.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstrategy.cancel.md @@ -7,5 +7,5 @@ Signature: ```typescript -cancel?: ISearchCancel; +cancel?: (context: RequestHandlerContext, id: string) => Promise; ``` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstrategy.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstrategy.md index 167c6ab6e5a16f..d54e027c4b847e 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstrategy.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstrategy.md @@ -9,13 +9,13 @@ Search strategy interface contains a search method that takes in a request and r Signature: ```typescript -export interface ISearchStrategy +export interface ISearchStrategy ``` ## Properties | Property | Type | Description | | --- | --- | --- | -| [cancel](./kibana-plugin-plugins-data-server.isearchstrategy.cancel.md) | ISearchCancel<T> | | -| [search](./kibana-plugin-plugins-data-server.isearchstrategy.search.md) | ISearch<T> | | +| [cancel](./kibana-plugin-plugins-data-server.isearchstrategy.cancel.md) | (context: RequestHandlerContext, id: string) => Promise<void> | | +| [search](./kibana-plugin-plugins-data-server.isearchstrategy.search.md) | (context: RequestHandlerContext, request: IEsSearchRequest, options?: ISearchOptions) => Promise<IEsSearchResponse> | | diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstrategy.search.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstrategy.search.md index 34a17ca87807a4..1a225d0c9aeabf 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstrategy.search.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.isearchstrategy.search.md @@ -7,5 +7,5 @@ Signature: ```typescript -search: ISearch; +search: (context: RequestHandlerContext, request: IEsSearchRequest, options?: ISearchOptions) => Promise; ``` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.md index c80112fb17dde5..9adefda7183388 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.md @@ -40,8 +40,6 @@ | [IIndexPattern](./kibana-plugin-plugins-data-server.iindexpattern.md) | | | [IndexPatternAttributes](./kibana-plugin-plugins-data-server.indexpatternattributes.md) | Use data plugin interface instead | | [IndexPatternFieldDescriptor](./kibana-plugin-plugins-data-server.indexpatternfielddescriptor.md) | | -| [IRequestTypesMap](./kibana-plugin-plugins-data-server.irequesttypesmap.md) | The map of search strategy IDs to the corresponding request type definitions. | -| [IResponseTypesMap](./kibana-plugin-plugins-data-server.iresponsetypesmap.md) | The map of search strategy IDs to the corresponding response type definitions. | | [ISearchOptions](./kibana-plugin-plugins-data-server.isearchoptions.md) | | | [ISearchSetup](./kibana-plugin-plugins-data-server.isearchsetup.md) | | | [ISearchStart](./kibana-plugin-plugins-data-server.isearchstart.md) | | @@ -73,8 +71,5 @@ | --- | --- | | [FieldFormatsGetConfigFn](./kibana-plugin-plugins-data-server.fieldformatsgetconfigfn.md) | | | [IFieldFormatsRegistry](./kibana-plugin-plugins-data-server.ifieldformatsregistry.md) | | -| [ISearch](./kibana-plugin-plugins-data-server.isearch.md) | | -| [ISearchCancel](./kibana-plugin-plugins-data-server.isearchcancel.md) | | | [ParsedInterval](./kibana-plugin-plugins-data-server.parsedinterval.md) | | -| [TStrategyTypes](./kibana-plugin-plugins-data-server.tstrategytypes.md) | Contains all known strategy type identifiers that will be used to map to request and response shapes. Plugins that wish to add their own custom search strategies should extend this type via:const MY\_STRATEGY = 'MY\_STRATEGY';declare module 'src/plugins/search/server' { export interface IRequestTypesMap { \[MY\_STRATEGY\]: IMySearchRequest; }export interface IResponseTypesMap { \[MY\_STRATEGY\]: IMySearchResponse } } | diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.tstrategytypes.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.tstrategytypes.md deleted file mode 100644 index 443d8d1b424d0b..00000000000000 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.tstrategytypes.md +++ /dev/null @@ -1,19 +0,0 @@ - - -[Home](./index.md) > [kibana-plugin-plugins-data-server](./kibana-plugin-plugins-data-server.md) > [TStrategyTypes](./kibana-plugin-plugins-data-server.tstrategytypes.md) - -## TStrategyTypes type - -Contains all known strategy type identifiers that will be used to map to request and response shapes. Plugins that wish to add their own custom search strategies should extend this type via: - -const MY\_STRATEGY = 'MY\_STRATEGY'; - -declare module 'src/plugins/search/server' { export interface IRequestTypesMap { \[MY\_STRATEGY\]: IMySearchRequest; } - -export interface IResponseTypesMap { \[MY\_STRATEGY\]: IMySearchResponse } } - -Signature: - -```typescript -export declare type TStrategyTypes = typeof ES_SEARCH_STRATEGY | string; -``` diff --git a/packages/kbn-config-schema/src/types/string_type.ts b/packages/kbn-config-schema/src/types/string_type.ts index cb780bcbbc6bde..c7d386df7c3bad 100644 --- a/packages/kbn-config-schema/src/types/string_type.ts +++ b/packages/kbn-config-schema/src/types/string_type.ts @@ -29,8 +29,8 @@ export type StringOptions = TypeOptions & { export class StringType extends Type { constructor(options: StringOptions = {}) { - // We want to allow empty strings, however calling `allow('')` casues - // Joi to whitelist the value and skip any additional validation. + // We want to allow empty strings, however calling `allow('')` causes + // Joi to allow the value and skip any additional validation. // Instead, we reimplement the string validator manually except in the // hostname case where empty strings aren't allowed anyways. let schema = diff --git a/src/core/MIGRATION.md b/src/core/MIGRATION.md index 90b1bb4fd5320d..5757b6dff8d3f7 100644 --- a/src/core/MIGRATION.md +++ b/src/core/MIGRATION.md @@ -1309,7 +1309,7 @@ This table shows where these uiExports have moved to in the New Platform. In mos | `hacks` | n/a | Just run the code in your plugin's `start` method. | | `home` | [`plugins.home.featureCatalogue.register`](./src/plugins/home/public/feature_catalogue) | Must add `home` as a dependency in your kibana.json. | | `indexManagement` | | Should be an API on the indexManagement plugin. | -| `injectDefaultVars` | n/a | Plugins will only be able to "whitelist" config values for the frontend. See [#41990](https://github.com/elastic/kibana/issues/41990) | +| `injectDefaultVars` | n/a | Plugins will only be able to allow config values for the frontend. See [#41990](https://github.com/elastic/kibana/issues/41990) | | `inspectorViews` | | Should be an API on the data (?) plugin. | | `interpreter` | | Should be an API on the interpreter plugin. | | `links` | n/a | Not necessary, just register your app via `core.application.register` | @@ -1389,7 +1389,7 @@ class MyPlugin { } ``` -If your plugin also have a client-side part, you can also expose configuration properties to it using a whitelisting mechanism with the configuration `exposeToBrowser` property. +If your plugin also have a client-side part, you can also expose configuration properties to it using the configuration `exposeToBrowser` allow-list property. ```typescript // my_plugin/server/index.ts diff --git a/src/core/server/elasticsearch/client/mocks.ts b/src/core/server/elasticsearch/client/mocks.ts index 75644435a7f2a7..34e83922d4d86c 100644 --- a/src/core/server/elasticsearch/client/mocks.ts +++ b/src/core/server/elasticsearch/client/mocks.ts @@ -28,7 +28,7 @@ const createInternalClientMock = (): DeeplyMockedKeys => { node: 'http://localhost', }) as any; - const blackListedProps = [ + const omittedProps = [ '_events', '_eventsCount', '_maxListeners', @@ -39,9 +39,9 @@ const createInternalClientMock = (): DeeplyMockedKeys => { 'helpers', ]; - const mockify = (obj: Record, blacklist: string[] = []) => { + const mockify = (obj: Record, omitted: string[] = []) => { Object.keys(obj) - .filter((key) => !blacklist.includes(key)) + .filter((key) => !omitted.includes(key)) .forEach((key) => { const propType = typeof obj[key]; if (propType === 'function') { @@ -52,7 +52,7 @@ const createInternalClientMock = (): DeeplyMockedKeys => { }); }; - mockify(client, blackListedProps); + mockify(client, omittedProps); client.transport = { request: jest.fn(), diff --git a/src/core/server/saved_objects/mappings/lib/get_root_properties_objects.ts b/src/core/server/saved_objects/mappings/lib/get_root_properties_objects.ts index 81ba1d8235561f..a998dbee0259e2 100644 --- a/src/core/server/saved_objects/mappings/lib/get_root_properties_objects.ts +++ b/src/core/server/saved_objects/mappings/lib/get_root_properties_objects.ts @@ -39,14 +39,14 @@ import { getRootProperties } from './get_root_properties'; * @return {EsPropertyMappings} */ -const blacklist = ['migrationVersion', 'references']; +const omittedRootProps = ['migrationVersion', 'references']; export function getRootPropertiesObjects(mappings: IndexMapping) { const rootProperties = getRootProperties(mappings); return Object.entries(rootProperties).reduce((acc, [key, value]) => { // we consider the existence of the properties or type of object to designate that this is an object datatype if ( - !blacklist.includes(key) && + !omittedRootProps.includes(key) && ((value as SavedObjectsComplexFieldMapping).properties || value.type === 'object') ) { acc[key] = value; diff --git a/src/plugins/data/public/search/aggs/metrics/top_hit.test.ts b/src/plugins/data/public/search/aggs/metrics/top_hit.test.ts index cd5b4a2f724bd9..c2434df3ae53c1 100644 --- a/src/plugins/data/public/search/aggs/metrics/top_hit.test.ts +++ b/src/plugins/data/public/search/aggs/metrics/top_hit.test.ts @@ -111,9 +111,7 @@ describe('Top hit metric', () => { it('requests both source and docvalues_fields for non-text aggregatable fields', () => { init({ fieldName: 'bytes', readFromDocValues: true }); expect(aggDsl.top_hits._source).toBe('bytes'); - expect(aggDsl.top_hits.docvalue_fields).toEqual([ - { field: 'bytes', format: 'use_field_mapping' }, - ]); + expect(aggDsl.top_hits.docvalue_fields).toEqual([{ field: 'bytes' }]); }); it('requests both source and docvalues_fields for date aggregatable fields', () => { diff --git a/src/plugins/data/public/search/aggs/metrics/top_hit.ts b/src/plugins/data/public/search/aggs/metrics/top_hit.ts index 5ca883e60afd3b..bee731dcc2e0d6 100644 --- a/src/plugins/data/public/search/aggs/metrics/top_hit.ts +++ b/src/plugins/data/public/search/aggs/metrics/top_hit.ts @@ -88,12 +88,15 @@ export const getTopHitMetricAgg = () => { }; } else { if (field.readFromDocValues) { - // always format date fields as date_time to avoid - // displaying unformatted dates like epoch_millis - // or other not-accepted momentjs formats - const format = - field.type === KBN_FIELD_TYPES.DATE ? 'date_time' : 'use_field_mapping'; - output.params.docvalue_fields = [{ field: field.name, format }]; + output.params.docvalue_fields = [ + { + field: field.name, + // always format date fields as date_time to avoid + // displaying unformatted dates like epoch_millis + // or other not-accepted momentjs formats + ...(field.type === KBN_FIELD_TYPES.DATE && { format: 'date_time' }), + }, + ]; } output.params._source = field.name === '_source' ? true : field.name; } diff --git a/src/plugins/data/server/index.ts b/src/plugins/data/server/index.ts index b94238dcf96a4b..321bd913ce760a 100644 --- a/src/plugins/data/server/index.ts +++ b/src/plugins/data/server/index.ts @@ -164,15 +164,10 @@ import { export { ParsedInterval } from '../common'; export { - ISearch, - ISearchCancel, + ISearchStrategy, ISearchOptions, - IRequestTypesMap, - IResponseTypesMap, ISearchSetup, ISearchStart, - TStrategyTypes, - ISearchStrategy, getDefaultSearchParams, getTotalLoaded, } from './search'; diff --git a/src/plugins/data/server/search/es_search/es_search_strategy.ts b/src/plugins/data/server/search/es_search/es_search_strategy.ts index db08ddf9208185..82f8ef21ebb386 100644 --- a/src/plugins/data/server/search/es_search/es_search_strategy.ts +++ b/src/plugins/data/server/search/es_search/es_search_strategy.ts @@ -17,17 +17,16 @@ * under the License. */ import { first } from 'rxjs/operators'; -import { RequestHandlerContext, SharedGlobalConfig } from 'kibana/server'; +import { SharedGlobalConfig } from 'kibana/server'; import { SearchResponse } from 'elasticsearch'; import { Observable } from 'rxjs'; -import { ES_SEARCH_STRATEGY } from '../../../common/search'; import { ISearchStrategy, getDefaultSearchParams, getTotalLoaded } from '..'; export const esSearchStrategyProvider = ( config$: Observable -): ISearchStrategy => { +): ISearchStrategy => { return { - search: async (context: RequestHandlerContext, request, options) => { + search: async (context, request, options) => { const config = await config$.pipe(first()).toPromise(); const defaultParams = getDefaultSearchParams(config); diff --git a/src/plugins/data/server/search/index.ts b/src/plugins/data/server/search/index.ts index 882f56e83d4ca2..67789fcbf56b47 100644 --- a/src/plugins/data/server/search/index.ts +++ b/src/plugins/data/server/search/index.ts @@ -17,16 +17,6 @@ * under the License. */ -export { - ISearch, - ISearchCancel, - ISearchOptions, - IRequestTypesMap, - IResponseTypesMap, - ISearchSetup, - ISearchStart, - TStrategyTypes, - ISearchStrategy, -} from './types'; +export { ISearchStrategy, ISearchOptions, ISearchSetup, ISearchStart } from './types'; export { getDefaultSearchParams, getTotalLoaded } from './es_search'; diff --git a/src/plugins/data/server/search/mocks.ts b/src/plugins/data/server/search/mocks.ts index 0aab466a9a0d9e..b210df3c55db96 100644 --- a/src/plugins/data/server/search/mocks.ts +++ b/src/plugins/data/server/search/mocks.ts @@ -26,5 +26,6 @@ export function createSearchSetupMock() { export function createSearchStartMock() { return { getSearchStrategy: jest.fn(), + search: jest.fn(), }; } diff --git a/src/plugins/data/server/search/routes.test.ts b/src/plugins/data/server/search/routes.test.ts index 4ef67de93e4549..167bd5af5d51d6 100644 --- a/src/plugins/data/server/search/routes.test.ts +++ b/src/plugins/data/server/search/routes.test.ts @@ -33,9 +33,8 @@ describe('Search service', () => { }); it('handler calls context.search.search with the given request and strategy', async () => { - const mockSearch = jest.fn().mockResolvedValue('yay'); - mockDataStart.search.getSearchStrategy.mockReturnValueOnce({ search: mockSearch }); - + const response = { id: 'yay' }; + mockDataStart.search.search.mockResolvedValue(response); const mockContext = {}; const mockBody = { params: {} }; const mockParams = { strategy: 'foo' }; @@ -51,21 +50,21 @@ describe('Search service', () => { const handler = mockRouter.post.mock.calls[0][1]; await handler((mockContext as unknown) as RequestHandlerContext, mockRequest, mockResponse); - expect(mockDataStart.search.getSearchStrategy.mock.calls[0][0]).toBe(mockParams.strategy); - expect(mockSearch).toBeCalled(); - expect(mockSearch.mock.calls[0][1]).toStrictEqual(mockBody); + expect(mockDataStart.search.search).toBeCalled(); + expect(mockDataStart.search.search.mock.calls[0][1]).toStrictEqual(mockBody); expect(mockResponse.ok).toBeCalled(); - expect(mockResponse.ok.mock.calls[0][0]).toEqual({ body: 'yay' }); + expect(mockResponse.ok.mock.calls[0][0]).toEqual({ + body: response, + }); }); it('handler throws an error if the search throws an error', async () => { - const mockSearch = jest.fn().mockRejectedValue({ + mockDataStart.search.search.mockRejectedValue({ message: 'oh no', body: { error: 'oops', }, }); - mockDataStart.search.getSearchStrategy.mockReturnValueOnce({ search: mockSearch }); const mockContext = {}; const mockBody = { params: {} }; @@ -82,9 +81,8 @@ describe('Search service', () => { const handler = mockRouter.post.mock.calls[0][1]; await handler((mockContext as unknown) as RequestHandlerContext, mockRequest, mockResponse); - expect(mockDataStart.search.getSearchStrategy.mock.calls[0][0]).toBe(mockParams.strategy); - expect(mockSearch).toBeCalled(); - expect(mockSearch.mock.calls[0][1]).toStrictEqual(mockBody); + expect(mockDataStart.search.search).toBeCalled(); + expect(mockDataStart.search.search.mock.calls[0][1]).toStrictEqual(mockBody); expect(mockResponse.customError).toBeCalled(); const error: any = mockResponse.customError.mock.calls[0][0]; expect(error.body.message).toBe('oh no'); diff --git a/src/plugins/data/server/search/routes.ts b/src/plugins/data/server/search/routes.ts index 7b6c045b0908c4..bf1982a1f7fb2b 100644 --- a/src/plugins/data/server/search/routes.ts +++ b/src/plugins/data/server/search/routes.ts @@ -42,10 +42,12 @@ export function registerSearchRoute(core: CoreSetup): v const signal = getRequestAbortedSignal(request.events.aborted$); const [, , selfStart] = await core.getStartServices(); - const searchStrategy = selfStart.search.getSearchStrategy(strategy); try { - const response = await searchStrategy.search(context, searchRequest, { signal }); + const response = await selfStart.search.search(context, searchRequest, { + signal, + strategy, + }); return res.ok({ body: response }); } catch (err) { return res.customError({ diff --git a/src/plugins/data/server/search/search_service.ts b/src/plugins/data/server/search/search_service.ts index 34ed8c6c6f4012..20f9a7488893f7 100644 --- a/src/plugins/data/server/search/search_service.ts +++ b/src/plugins/data/server/search/search_service.ts @@ -17,20 +17,24 @@ * under the License. */ -import { Plugin, PluginInitializerContext, CoreSetup } from '../../../../core/server'; import { - ISearchSetup, - ISearchStart, - TSearchStrategiesMap, - TRegisterSearchStrategy, - TGetSearchStrategy, -} from './types'; + Plugin, + PluginInitializerContext, + CoreSetup, + RequestHandlerContext, +} from '../../../../core/server'; +import { ISearchSetup, ISearchStart, ISearchStrategy } from './types'; import { registerSearchRoute } from './routes'; import { ES_SEARCH_STRATEGY, esSearchStrategyProvider } from './es_search'; import { DataPluginStart } from '../plugin'; +import { IEsSearchRequest } from '../../common'; + +interface StrategyMap { + [name: string]: ISearchStrategy; +} export class SearchService implements Plugin { - private searchStrategies: TSearchStrategiesMap = {}; + private searchStrategies: StrategyMap = {}; constructor(private initializerContext: PluginInitializerContext) {} @@ -45,17 +49,28 @@ export class SearchService implements Plugin { return { registerSearchStrategy: this.registerSearchStrategy }; } + private search(context: RequestHandlerContext, searchRequest: IEsSearchRequest, options: any) { + return this.getSearchStrategy(options.strategy || ES_SEARCH_STRATEGY).search( + context, + searchRequest, + { signal: options.signal } + ); + } + public start(): ISearchStart { - return { getSearchStrategy: this.getSearchStrategy }; + return { + getSearchStrategy: this.getSearchStrategy, + search: this.search, + }; } public stop() {} - private registerSearchStrategy: TRegisterSearchStrategy = (name, strategy) => { + private registerSearchStrategy = (name: string, strategy: ISearchStrategy) => { this.searchStrategies[name] = strategy; }; - private getSearchStrategy: TGetSearchStrategy = (name) => { + private getSearchStrategy = (name: string): ISearchStrategy => { const strategy = this.searchStrategies[name]; if (!strategy) { throw new Error(`Search strategy ${name} not found`); diff --git a/src/plugins/data/server/search/types.ts b/src/plugins/data/server/search/types.ts index dea325cc063bbf..12f1a1a508bd23 100644 --- a/src/plugins/data/server/search/types.ts +++ b/src/plugins/data/server/search/types.ts @@ -19,14 +19,22 @@ import { RequestHandlerContext } from '../../../../core/server'; import { IKibanaSearchResponse, IKibanaSearchRequest } from '../../common/search'; -import { ES_SEARCH_STRATEGY, IEsSearchRequest, IEsSearchResponse } from './es_search'; +import { IEsSearchRequest, IEsSearchResponse } from './es_search'; + +export interface ISearchOptions { + /** + * An `AbortSignal` that allows the caller of `search` to abort a search request. + */ + signal?: AbortSignal; + strategy?: string; +} export interface ISearchSetup { /** * Extension point exposed for other plugins to register their own search * strategies. */ - registerSearchStrategy: TRegisterSearchStrategy; + registerSearchStrategy: (name: string, strategy: ISearchStrategy) => void; } export interface ISearchStart { @@ -34,78 +42,23 @@ export interface ISearchStart { * Get other registered search strategies. For example, if a new strategy needs to use the * already-registered ES search strategy, it can use this function to accomplish that. */ - getSearchStrategy: TGetSearchStrategy; -} - -export interface ISearchOptions { - /** - * An `AbortSignal` that allows the caller of `search` to abort a search request. - */ - signal?: AbortSignal; + getSearchStrategy: (name: string) => ISearchStrategy; + search: ( + context: RequestHandlerContext, + request: IKibanaSearchRequest, + options: ISearchOptions + ) => Promise; } -/** - * Contains all known strategy type identifiers that will be used to map to - * request and response shapes. Plugins that wish to add their own custom search - * strategies should extend this type via: - * - * const MY_STRATEGY = 'MY_STRATEGY'; - * - * declare module 'src/plugins/search/server' { - * export interface IRequestTypesMap { - * [MY_STRATEGY]: IMySearchRequest; - * } - * - * export interface IResponseTypesMap { - * [MY_STRATEGY]: IMySearchResponse - * } - * } - */ -export type TStrategyTypes = typeof ES_SEARCH_STRATEGY | string; - -/** - * The map of search strategy IDs to the corresponding request type definitions. - */ -export interface IRequestTypesMap { - [ES_SEARCH_STRATEGY]: IEsSearchRequest; - [key: string]: IKibanaSearchRequest; -} - -/** - * The map of search strategy IDs to the corresponding response type definitions. - */ -export interface IResponseTypesMap { - [ES_SEARCH_STRATEGY]: IEsSearchResponse; - [key: string]: IKibanaSearchResponse; -} - -export type ISearch = ( - context: RequestHandlerContext, - request: IRequestTypesMap[T], - options?: ISearchOptions -) => Promise; - -export type ISearchCancel = ( - context: RequestHandlerContext, - id: string -) => Promise; - /** * Search strategy interface contains a search method that takes in a request and returns a promise * that resolves to a response. */ -export interface ISearchStrategy { - search: ISearch; - cancel?: ISearchCancel; +export interface ISearchStrategy { + search: ( + context: RequestHandlerContext, + request: IEsSearchRequest, + options?: ISearchOptions + ) => Promise; + cancel?: (context: RequestHandlerContext, id: string) => Promise; } - -export type TRegisterSearchStrategy = ( - name: T, - searchStrategy: ISearchStrategy -) => void; - -export type TGetSearchStrategy = (name: T) => ISearchStrategy; - -export type TSearchStrategiesMap = { - [K in TStrategyTypes]?: ISearchStrategy; -}; diff --git a/src/plugins/data/server/server.api.md b/src/plugins/data/server/server.api.md index 1fe03119c789de..88f2cc3264c6e6 100644 --- a/src/plugins/data/server/server.api.md +++ b/src/plugins/data/server/server.api.md @@ -507,77 +507,46 @@ export class IndexPatternsFetcher { }): Promise; } -// Warning: (ae-missing-release-tag) "IRequestTypesMap" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) -// -// @public -export interface IRequestTypesMap { - // Warning: (ae-forgotten-export) The symbol "IKibanaSearchRequest" needs to be exported by the entry point index.d.ts - // - // (undocumented) - [key: string]: IKibanaSearchRequest; - // Warning: (ae-forgotten-export) The symbol "ES_SEARCH_STRATEGY" needs to be exported by the entry point index.d.ts - // Warning: (ae-forgotten-export) The symbol "IEsSearchRequest" needs to be exported by the entry point index.d.ts - // - // (undocumented) - [ES_SEARCH_STRATEGY]: IEsSearchRequest; -} - -// Warning: (ae-missing-release-tag) "IResponseTypesMap" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) -// -// @public -export interface IResponseTypesMap { - // Warning: (ae-forgotten-export) The symbol "IKibanaSearchResponse" needs to be exported by the entry point index.d.ts - // - // (undocumented) - [key: string]: IKibanaSearchResponse; - // Warning: (ae-forgotten-export) The symbol "IEsSearchResponse" needs to be exported by the entry point index.d.ts - // - // (undocumented) - [ES_SEARCH_STRATEGY]: IEsSearchResponse; -} - -// Warning: (ae-forgotten-export) The symbol "RequestHandlerContext" needs to be exported by the entry point index.d.ts -// Warning: (ae-missing-release-tag) "ISearch" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) -// -// @public (undocumented) -export type ISearch = (context: RequestHandlerContext, request: IRequestTypesMap[T], options?: ISearchOptions) => Promise; - -// Warning: (ae-missing-release-tag) "ISearchCancel" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) -// -// @public (undocumented) -export type ISearchCancel = (context: RequestHandlerContext, id: string) => Promise; - // Warning: (ae-missing-release-tag) "ISearchOptions" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) // // @public (undocumented) export interface ISearchOptions { signal?: AbortSignal; + // (undocumented) + strategy?: string; } // Warning: (ae-missing-release-tag) "ISearchSetup" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) // // @public (undocumented) export interface ISearchSetup { - // Warning: (ae-forgotten-export) The symbol "TRegisterSearchStrategy" needs to be exported by the entry point index.d.ts - registerSearchStrategy: TRegisterSearchStrategy; + registerSearchStrategy: (name: string, strategy: ISearchStrategy) => void; } // Warning: (ae-missing-release-tag) "ISearchStart" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) // // @public (undocumented) export interface ISearchStart { - // Warning: (ae-forgotten-export) The symbol "TGetSearchStrategy" needs to be exported by the entry point index.d.ts - getSearchStrategy: TGetSearchStrategy; + getSearchStrategy: (name: string) => ISearchStrategy; + // Warning: (ae-forgotten-export) The symbol "RequestHandlerContext" needs to be exported by the entry point index.d.ts + // Warning: (ae-forgotten-export) The symbol "IKibanaSearchRequest" needs to be exported by the entry point index.d.ts + // Warning: (ae-forgotten-export) The symbol "IKibanaSearchResponse" needs to be exported by the entry point index.d.ts + // + // (undocumented) + search: (context: RequestHandlerContext, request: IKibanaSearchRequest, options: ISearchOptions) => Promise; } // Warning: (ae-missing-release-tag) "ISearchStrategy" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) // // @public -export interface ISearchStrategy { +export interface ISearchStrategy { // (undocumented) - cancel?: ISearchCancel; + cancel?: (context: RequestHandlerContext, id: string) => Promise; + // Warning: (ae-forgotten-export) The symbol "IEsSearchRequest" needs to be exported by the entry point index.d.ts + // Warning: (ae-forgotten-export) The symbol "IEsSearchResponse" needs to be exported by the entry point index.d.ts + // // (undocumented) - search: ISearch; + search: (context: RequestHandlerContext, request: IEsSearchRequest, options?: ISearchOptions) => Promise; } // @public (undocumented) @@ -757,11 +726,6 @@ export interface TimeRange { to: string; } -// Warning: (ae-missing-release-tag) "TStrategyTypes" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) -// -// @public -export type TStrategyTypes = typeof ES_SEARCH_STRATEGY | string; - // Warning: (ae-missing-release-tag) "UI_SETTINGS" is exported by the package, but it is missing a release tag (@alpha, @beta, @public, or @internal) // // @public (undocumented) @@ -820,13 +784,13 @@ export const UI_SETTINGS: { // src/plugins/data/server/index.ts:101:26 - (ae-forgotten-export) The symbol "TruncateFormat" needs to be exported by the entry point index.d.ts // src/plugins/data/server/index.ts:127:27 - (ae-forgotten-export) The symbol "isFilterable" needs to be exported by the entry point index.d.ts // src/plugins/data/server/index.ts:127:27 - (ae-forgotten-export) The symbol "isNestedField" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:183:1 - (ae-forgotten-export) The symbol "dateHistogramInterval" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:184:1 - (ae-forgotten-export) The symbol "InvalidEsCalendarIntervalError" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:185:1 - (ae-forgotten-export) The symbol "InvalidEsIntervalFormatError" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:186:1 - (ae-forgotten-export) The symbol "Ipv4Address" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:187:1 - (ae-forgotten-export) The symbol "isValidEsInterval" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:188:1 - (ae-forgotten-export) The symbol "isValidInterval" needs to be exported by the entry point index.d.ts -// src/plugins/data/server/index.ts:191:1 - (ae-forgotten-export) The symbol "toAbsoluteDates" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:178:1 - (ae-forgotten-export) The symbol "dateHistogramInterval" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:179:1 - (ae-forgotten-export) The symbol "InvalidEsCalendarIntervalError" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:180:1 - (ae-forgotten-export) The symbol "InvalidEsIntervalFormatError" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:181:1 - (ae-forgotten-export) The symbol "Ipv4Address" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:182:1 - (ae-forgotten-export) The symbol "isValidEsInterval" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:183:1 - (ae-forgotten-export) The symbol "isValidInterval" needs to be exported by the entry point index.d.ts +// src/plugins/data/server/index.ts:186:1 - (ae-forgotten-export) The symbol "toAbsoluteDates" needs to be exported by the entry point index.d.ts // (No @packageDocumentation comment for this package) diff --git a/src/plugins/telemetry_collection_manager/server/plugin.ts b/src/plugins/telemetry_collection_manager/server/plugin.ts index c3db0ca39e6acf..051bb3a11cb16c 100644 --- a/src/plugins/telemetry_collection_manager/server/plugin.ts +++ b/src/plugins/telemetry_collection_manager/server/plugin.ts @@ -37,7 +37,7 @@ import { UsageStatsPayload, StatsCollectionContext, } from './types'; - +import { isClusterOptedIn } from './util'; import { encryptTelemetry } from './encryption'; interface TelemetryCollectionPluginsDepsSetup { @@ -205,7 +205,9 @@ export class TelemetryCollectionManagerPlugin return usageData; } - return encryptTelemetry(usageData, { useProdKey: this.isDistributable }); + return encryptTelemetry(usageData.filter(isClusterOptedIn), { + useProdKey: this.isDistributable, + }); } } catch (err) { this.logger.debug( diff --git a/src/plugins/telemetry_collection_manager/server/util.test.ts b/src/plugins/telemetry_collection_manager/server/util.test.ts new file mode 100644 index 00000000000000..ba5d999c3bf9a6 --- /dev/null +++ b/src/plugins/telemetry_collection_manager/server/util.test.ts @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { isClusterOptedIn } from './util'; + +const createMockClusterUsage = (plugins: any) => { + return { + stack_stats: { + kibana: { plugins }, + }, + }; +}; + +describe('isClusterOptedIn', () => { + it('returns true if cluster has opt_in_status: true', () => { + const mockClusterUsage = createMockClusterUsage({ telemetry: { opt_in_status: true } }); + const result = isClusterOptedIn(mockClusterUsage); + expect(result).toBe(true); + }); + it('returns false if cluster has opt_in_status: false', () => { + const mockClusterUsage = createMockClusterUsage({ telemetry: { opt_in_status: false } }); + const result = isClusterOptedIn(mockClusterUsage); + expect(result).toBe(false); + }); + it('returns false if cluster has opt_in_status: undefined', () => { + const mockClusterUsage = createMockClusterUsage({ telemetry: {} }); + const result = isClusterOptedIn(mockClusterUsage); + expect(result).toBe(false); + }); + it('returns false if cluster stats is malformed', () => { + expect(isClusterOptedIn(createMockClusterUsage({}))).toBe(false); + expect(isClusterOptedIn({})).toBe(false); + expect(isClusterOptedIn(undefined)).toBe(false); + }); +}); diff --git a/src/plugins/telemetry_collection_manager/server/util.ts b/src/plugins/telemetry_collection_manager/server/util.ts new file mode 100644 index 00000000000000..d6e1b516636885 --- /dev/null +++ b/src/plugins/telemetry_collection_manager/server/util.ts @@ -0,0 +1,22 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export const isClusterOptedIn = (clusterUsage: any): boolean => { + return clusterUsage?.stack_stats?.kibana?.plugins?.telemetry?.opt_in_status === true; +}; diff --git a/src/plugins/vis_type_table/public/__snapshots__/table_vis_fn.test.ts.snap b/src/plugins/vis_type_table/public/__snapshots__/table_vis_fn.test.ts.snap index a8fe25582717c9..dc6571de969f02 100644 --- a/src/plugins/vis_type_table/public/__snapshots__/table_vis_fn.test.ts.snap +++ b/src/plugins/vis_type_table/public/__snapshots__/table_vis_fn.test.ts.snap @@ -30,6 +30,7 @@ Object { "columnIndex": null, "direction": null, }, + "title": "My Chart title", "totalFunc": "sum", }, "visData": Object { diff --git a/src/plugins/vis_type_table/public/agg_table/agg_table.js b/src/plugins/vis_type_table/public/agg_table/agg_table.js index bd7626a493338e..1e98a06c2a6a99 100644 --- a/src/plugins/vis_type_table/public/agg_table/agg_table.js +++ b/src/plugins/vis_type_table/public/agg_table/agg_table.js @@ -116,7 +116,7 @@ export function KbnAggTable(config, RecursionHelper) { return; } - self.csv.filename = (exportTitle || table.title || 'table') + '.csv'; + self.csv.filename = (exportTitle || table.title || 'unsaved') + '.csv'; $scope.rows = table.rows; $scope.formattedColumns = []; diff --git a/src/plugins/vis_type_table/public/table_vis_fn.test.ts b/src/plugins/vis_type_table/public/table_vis_fn.test.ts index 9accf8950d910b..6cb3f3e0f37791 100644 --- a/src/plugins/vis_type_table/public/table_vis_fn.test.ts +++ b/src/plugins/vis_type_table/public/table_vis_fn.test.ts @@ -37,6 +37,7 @@ describe('interpreter/functions#table', () => { columns: [{ id: 'col-0-1', name: 'Count' }], }; const visConfig = { + title: 'My Chart title', perPage: 10, showPartialRows: false, showMetricsAtAllLevels: false, diff --git a/src/plugins/vis_type_table/public/vis_controller.ts b/src/plugins/vis_type_table/public/vis_controller.ts index a5086e0c9a2d80..d87812b9f5d694 100644 --- a/src/plugins/vis_type_table/public/vis_controller.ts +++ b/src/plugins/vis_type_table/public/vis_controller.ts @@ -78,8 +78,18 @@ export function getTableVisualizationControllerClass( if (!this.$scope) { return; } + + // How things get into this $scope? + // To inject variables into this $scope there's the following pipeline of stuff to check: + // - visualize_embeddable => that's what the editor creates to wrap this Angular component + // - build_pipeline => it serialize all the params into an Angular template compiled on the fly + // - table_vis_fn => unserialize the params and prepare them for the final React/Angular bridge + // - visualization_renderer => creates the wrapper component for this controller and passes the params + // + // In case some prop is missing check into the top of the chain if they are available and check + // the list above that it is passing through this.$scope.vis = this.vis; - this.$scope.visState = { params: visParams }; + this.$scope.visState = { params: visParams, title: visParams.title }; this.$scope.esResponse = esResponse; this.$scope.visParams = visParams; diff --git a/src/plugins/visualizations/public/expressions/visualization_renderer.tsx b/src/plugins/visualizations/public/expressions/visualization_renderer.tsx index 0fd81c753da249..1bca5b4f0d5397 100644 --- a/src/plugins/visualizations/public/expressions/visualization_renderer.tsx +++ b/src/plugins/visualizations/public/expressions/visualization_renderer.tsx @@ -33,6 +33,7 @@ export const visualization = () => ({ const visType = config.visType || visConfig.type; const vis = new ExprVis({ + title: config.title, type: visType as string, params: visConfig as VisParams, }); diff --git a/src/plugins/visualizations/public/legacy/build_pipeline.ts b/src/plugins/visualizations/public/legacy/build_pipeline.ts index 62ff1f83426b9c..2ef07bf18c91ce 100644 --- a/src/plugins/visualizations/public/legacy/build_pipeline.ts +++ b/src/plugins/visualizations/public/legacy/build_pipeline.ts @@ -490,7 +490,7 @@ export const buildPipeline = async ( const { indexPattern, searchSource } = vis.data; const query = searchSource!.getField('query'); const filters = searchSource!.getField('filter'); - const { uiState } = vis; + const { uiState, title } = vis; // context let pipeline = `kibana | kibana_context `; @@ -519,7 +519,7 @@ export const buildPipeline = async ( timefilter: params.timefilter, }); if (buildPipelineVisFunction[vis.type.name]) { - pipeline += buildPipelineVisFunction[vis.type.name](vis.params, schemas, uiState); + pipeline += buildPipelineVisFunction[vis.type.name]({ title, ...vis.params }, schemas, uiState); } else if (vislibCharts.includes(vis.type.name)) { const visConfig = { ...vis.params }; visConfig.dimensions = await buildVislibDimensions(vis, params); diff --git a/test/functional/apps/visualize/_area_chart.js b/test/functional/apps/visualize/_area_chart.js index ea4db35d75ccf5..41e56986f677b6 100644 --- a/test/functional/apps/visualize/_area_chart.js +++ b/test/functional/apps/visualize/_area_chart.js @@ -246,9 +246,7 @@ export default function ({ getService, getPageObjects }) { await inspector.close(); }); - // Preventing ES Promotion for master (8.0) - // https://github.com/elastic/kibana/issues/64734 - it.skip('does not scale top hit agg', async () => { + it('does not scale top hit agg', async () => { const expectedTableData = [ ['2015-09-20 00:00', '6', '9.035KB'], ['2015-09-20 01:00', '9', '5.854KB'], diff --git a/x-pack/plugins/actions/README.md b/x-pack/plugins/actions/README.md index e6b22da7a1fe3d..3470ede0f15c7e 100644 --- a/x-pack/plugins/actions/README.md +++ b/x-pack/plugins/actions/README.md @@ -160,7 +160,7 @@ This is the primary function for an action type. Whenever the action needs to ex | config | The decrypted configuration given to an action. This comes from the action saved object that is partially or fully encrypted within the data store. If you would like to validate the config before being passed to the executor, define `validate.config` within the action type. | | params | Parameters for the execution. These will be given at execution time by either an alert or manually provided when calling the plugin provided execute function. | | services.callCluster(path, opts) | Use this to do Elasticsearch queries on the cluster Kibana connects to. This function is the same as any other `callCluster` in Kibana but runs in the context of the user who is calling the action when security is enabled. | -| services.getScopedCallCluster | This function scopes an instance of CallCluster by returning a `callCluster(path, opts)` function that runs in the context of the user who is calling the action when security is enabled. This must only be called with instances of CallCluster provided by core. | +| services.getLegacyScopedClusterClient | This function returns an instance of the LegacyScopedClusterClient scoped to the user who is calling the action when security is enabled. | | services.savedObjectsClient | This is an instance of the saved objects client. This provides the ability to do CRUD on any saved objects within the same space the alert lives in.

The scope of the saved objects client is tied to the user in context calling the execute API or the API key provided to the execute plugin function (only when security isenabled). | | services.log(tags, [data], [timestamp]) | Use this to create server logs. (This is the same function as server.log) | diff --git a/x-pack/plugins/actions/server/mocks.ts b/x-pack/plugins/actions/server/mocks.ts index 4baf453dcb5644..e2f11abeefff22 100644 --- a/x-pack/plugins/actions/server/mocks.ts +++ b/x-pack/plugins/actions/server/mocks.ts @@ -43,7 +43,7 @@ const createServicesMock = () => { } > = { callCluster: elasticsearchServiceMock.createLegacyScopedClusterClient().callAsCurrentUser, - getScopedCallCluster: jest.fn(), + getLegacyScopedClusterClient: jest.fn(), savedObjectsClient: savedObjectsClientMock.create(), }; return mock; diff --git a/x-pack/plugins/actions/server/plugin.ts b/x-pack/plugins/actions/server/plugin.ts index c5a6db3cf4347b..9a03bee41eeeaf 100644 --- a/x-pack/plugins/actions/server/plugin.ts +++ b/x-pack/plugins/actions/server/plugin.ts @@ -337,8 +337,8 @@ export class ActionsPlugin implements Plugin, Plugi return (request) => ({ callCluster: elasticsearch.legacy.client.asScoped(request).callAsCurrentUser, savedObjectsClient: getScopedClient(request), - getScopedCallCluster(clusterClient: ILegacyClusterClient) { - return clusterClient.asScoped(request).callAsCurrentUser; + getLegacyScopedClusterClient(clusterClient: ILegacyClusterClient) { + return clusterClient.asScoped(request); }, }); } diff --git a/x-pack/plugins/actions/server/types.ts b/x-pack/plugins/actions/server/types.ts index ca5da2779139e9..a8e19e3ff2e79d 100644 --- a/x-pack/plugins/actions/server/types.ts +++ b/x-pack/plugins/actions/server/types.ts @@ -25,9 +25,7 @@ export type SpaceIdToNamespaceFunction = (spaceId?: string) => string | undefine export interface Services { callCluster: ILegacyScopedClusterClient['callAsCurrentUser']; savedObjectsClient: SavedObjectsClientContract; - getScopedCallCluster( - clusterClient: ILegacyClusterClient - ): ILegacyScopedClusterClient['callAsCurrentUser']; + getLegacyScopedClusterClient(clusterClient: ILegacyClusterClient): ILegacyScopedClusterClient; } declare module 'src/core/server' { diff --git a/x-pack/plugins/alerts/README.md b/x-pack/plugins/alerts/README.md index 8ffd39df8f22e1..10568abbe3c72c 100644 --- a/x-pack/plugins/alerts/README.md +++ b/x-pack/plugins/alerts/README.md @@ -105,7 +105,7 @@ This is the primary function for an alert type. Whenever the alert needs to exec |---|---| |services.callCluster(path, opts)|Use this to do Elasticsearch queries on the cluster Kibana connects to. This function is the same as any other `callCluster` in Kibana but in the context of the user who created the alert when security is enabled.| |services.savedObjectsClient|This is an instance of the saved objects client. This provides the ability to do CRUD on any saved objects within the same space the alert lives in.

The scope of the saved objects client is tied to the user who created the alert (only when security isenabled).| -|services.getScopedCallCluster|This function scopes an instance of CallCluster by returning a `callCluster(path, opts)` function that runs in the context of the user who created the alert when security is enabled. This must only be called with instances of CallCluster provided by core.| +|services.getLegacyScopedClusterClient|This function returns an instance of the LegacyScopedClusterClient scoped to the user who created the alert when security is enabled.| |services.alertInstanceFactory(id)|This [alert instance factory](#alert-instance-factory) creates instances of alerts and must be used in order to execute actions. The id you give to the alert instance factory is a unique identifier to the alert instance.| |services.log(tags, [data], [timestamp])|Use this to create server logs. (This is the same function as server.log)| |startedAt|The date and time the alert type started execution.| diff --git a/x-pack/plugins/alerts/server/alerts_client.ts b/x-pack/plugins/alerts/server/alerts_client.ts index 9f1cd0b8ab6b6c..1f286b42c14491 100644 --- a/x-pack/plugins/alerts/server/alerts_client.ts +++ b/x-pack/plugins/alerts/server/alerts_client.ts @@ -5,7 +5,7 @@ */ import Boom from 'boom'; -import { omit, isEqual, map, uniq, pick } from 'lodash'; +import { omit, isEqual, map, uniq, pick, truncate } from 'lodash'; import { i18n } from '@kbn/i18n'; import { Logger, @@ -64,7 +64,7 @@ export interface ConstructorOptions { spaceId?: string; namespace?: string; getUserName: () => Promise; - createAPIKey: () => Promise; + createAPIKey: (name: string) => Promise; invalidateAPIKey: (params: InvalidateAPIKeyParams) => Promise; getActionsClient: () => Promise; } @@ -141,7 +141,7 @@ export class AlertsClient { private readonly unsecuredSavedObjectsClient: SavedObjectsClientContract; private readonly authorization: AlertsAuthorization; private readonly alertTypeRegistry: AlertTypeRegistry; - private readonly createAPIKey: () => Promise; + private readonly createAPIKey: (name: string) => Promise; private readonly invalidateAPIKey: ( params: InvalidateAPIKeyParams ) => Promise; @@ -191,7 +191,10 @@ export class AlertsClient { const validatedAlertTypeParams = validateAlertTypeParams(alertType, data.params); const username = await this.getUserName(); - const createdAPIKey = data.enabled ? await this.createAPIKey() : null; + + const createdAPIKey = data.enabled + ? await this.createAPIKey(this.generateAPIKeyName(alertType.id, data.name)) + : null; this.validateActions(alertType, data.actions); @@ -407,7 +410,9 @@ export class AlertsClient { const { actions, references } = await this.denormalizeActions(data.actions); const username = await this.getUserName(); - const createdAPIKey = attributes.enabled ? await this.createAPIKey() : null; + const createdAPIKey = attributes.enabled + ? await this.createAPIKey(this.generateAPIKeyName(alertType.id, data.name)) + : null; const apiKeyAttributes = this.apiKeyAsAlertAttributes(createdAPIKey, username); const updatedObject = await this.unsecuredSavedObjectsClient.update( @@ -488,7 +493,10 @@ export class AlertsClient { id, { ...attributes, - ...this.apiKeyAsAlertAttributes(await this.createAPIKey(), username), + ...this.apiKeyAsAlertAttributes( + await this.createAPIKey(this.generateAPIKeyName(attributes.alertTypeId, attributes.name)), + username + ), updatedBy: username, }, { version } @@ -556,7 +564,12 @@ export class AlertsClient { { ...attributes, enabled: true, - ...this.apiKeyAsAlertAttributes(await this.createAPIKey(), username), + ...this.apiKeyAsAlertAttributes( + await this.createAPIKey( + this.generateAPIKeyName(attributes.alertTypeId, attributes.name) + ), + username + ), updatedBy: username, }, { version } @@ -864,4 +877,8 @@ export class AlertsClient { private includeFieldsRequiredForAuthentication(fields: string[]): string[] { return uniq([...fields, 'alertTypeId', 'consumer']); } + + private generateAPIKeyName(alertTypeId: string, alertName: string) { + return truncate(`Alerting: ${alertTypeId}/${alertName}`, { length: 256 }); + } } diff --git a/x-pack/plugins/alerts/server/alerts_client_factory.ts b/x-pack/plugins/alerts/server/alerts_client_factory.ts index 79c527c1b993d1..1b405e7fcd0b4c 100644 --- a/x-pack/plugins/alerts/server/alerts_client_factory.ts +++ b/x-pack/plugins/alerts/server/alerts_client_factory.ts @@ -90,7 +90,7 @@ export class AlertsClientFactory { const user = await securityPluginSetup.authc.getCurrentUser(request); return user ? user.username : null; }, - async createAPIKey() { + async createAPIKey(name: string) { if (!securityPluginSetup) { return { apiKeysEnabled: false }; } @@ -98,7 +98,11 @@ export class AlertsClientFactory { // API key for the user, instead of having the user create it themselves, which requires api_key // privileges const createAPIKeyResult = await securityPluginSetup.authc.grantAPIKeyAsInternalUser( - request + request, + { + name, + role_descriptors: {}, + } ); if (!createAPIKeyResult) { return { apiKeysEnabled: false }; diff --git a/x-pack/plugins/alerts/server/mocks.ts b/x-pack/plugins/alerts/server/mocks.ts index 84f79d53f218cb..c39aa13b580fcd 100644 --- a/x-pack/plugins/alerts/server/mocks.ts +++ b/x-pack/plugins/alerts/server/mocks.ts @@ -59,7 +59,7 @@ const createAlertServicesMock = () => { .fn, [string]>() .mockReturnValue(alertInstanceFactoryMock), callCluster: elasticsearchServiceMock.createLegacyScopedClusterClient().callAsCurrentUser, - getScopedCallCluster: jest.fn(), + getLegacyScopedClusterClient: jest.fn(), savedObjectsClient: savedObjectsClientMock.create(), }; }; diff --git a/x-pack/plugins/alerts/server/plugin.ts b/x-pack/plugins/alerts/server/plugin.ts index 6ca65ac152ee33..7a0916b9d6554c 100644 --- a/x-pack/plugins/alerts/server/plugin.ts +++ b/x-pack/plugins/alerts/server/plugin.ts @@ -272,8 +272,8 @@ export class AlertingPlugin { return (request) => ({ callCluster: elasticsearch.legacy.client.asScoped(request).callAsCurrentUser, savedObjectsClient: this.getScopedClientWithAlertSavedObjectType(savedObjects, request), - getScopedCallCluster(clusterClient: ILegacyClusterClient) { - return clusterClient.asScoped(request).callAsCurrentUser; + getLegacyScopedClusterClient(clusterClient: ILegacyClusterClient) { + return clusterClient.asScoped(request); }, }); } diff --git a/x-pack/plugins/alerts/server/types.ts b/x-pack/plugins/alerts/server/types.ts index 24dfb391f0791f..66eec370f2c205 100644 --- a/x-pack/plugins/alerts/server/types.ts +++ b/x-pack/plugins/alerts/server/types.ts @@ -40,9 +40,7 @@ declare module 'src/core/server' { export interface Services { callCluster: ILegacyScopedClusterClient['callAsCurrentUser']; savedObjectsClient: SavedObjectsClientContract; - getScopedCallCluster( - clusterClient: ILegacyClusterClient - ): ILegacyScopedClusterClient['callAsCurrentUser']; + getLegacyScopedClusterClient(clusterClient: ILegacyClusterClient): ILegacyScopedClusterClient; } export interface AlertServices extends Services { diff --git a/x-pack/plugins/apm/common/__snapshots__/apm_telemetry.test.ts.snap b/x-pack/plugins/apm/common/__snapshots__/apm_telemetry.test.ts.snap index 4ee7692222d689..1d8cfa28aea75e 100644 --- a/x-pack/plugins/apm/common/__snapshots__/apm_telemetry.test.ts.snap +++ b/x-pack/plugins/apm/common/__snapshots__/apm_telemetry.test.ts.snap @@ -1,929 +1,973 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP exports[`APM telemetry helpers getApmTelemetry generates a JSON object with the telemetry mapping 1`] = ` -Object { - "properties": Object { - "agents": Object { - "properties": Object { - "dotnet": Object { - "properties": Object { - "agent": Object { - "properties": Object { - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "service": Object { - "properties": Object { - "framework": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "language": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "runtime": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - }, - }, - }, - }, - "go": Object { - "properties": Object { - "agent": Object { - "properties": Object { - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "service": Object { - "properties": Object { - "framework": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "language": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "runtime": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - }, - }, - }, - }, - "java": Object { - "properties": Object { - "agent": Object { - "properties": Object { - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "service": Object { - "properties": Object { - "framework": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "language": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "runtime": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - }, - }, - }, - }, - "js-base": Object { - "properties": Object { - "agent": Object { - "properties": Object { - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "service": Object { - "properties": Object { - "framework": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "language": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "runtime": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - }, - }, - }, - }, - "nodejs": Object { - "properties": Object { - "agent": Object { - "properties": Object { - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "service": Object { - "properties": Object { - "framework": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "language": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "runtime": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - }, - }, - }, - }, - "python": Object { - "properties": Object { - "agent": Object { - "properties": Object { - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "service": Object { - "properties": Object { - "framework": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "language": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "runtime": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - }, - }, - }, - }, - "ruby": Object { - "properties": Object { - "agent": Object { - "properties": Object { - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "service": Object { - "properties": Object { - "framework": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "language": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "runtime": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - }, - }, - }, - }, - "rum-js": Object { - "properties": Object { - "agent": Object { - "properties": Object { - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "service": Object { - "properties": Object { - "framework": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "language": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "runtime": Object { - "properties": Object { - "composite": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "name": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "version": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - }, - }, - }, - }, - }, - }, - "cardinality": Object { - "properties": Object { - "transaction": Object { - "properties": Object { - "name": Object { - "properties": Object { - "all_agents": Object { - "properties": Object { - "1d": Object { - "type": "long", - }, - }, - }, - "rum": Object { - "properties": Object { - "1d": Object { - "type": "long", - }, - }, - }, - }, - }, - }, - }, - "user_agent": Object { - "properties": Object { - "original": Object { - "properties": Object { - "all_agents": Object { - "properties": Object { - "1d": Object { - "type": "long", - }, - }, - }, - "rum": Object { - "properties": Object { - "1d": Object { - "type": "long", - }, - }, - }, - }, - }, - }, - }, - }, - }, - "cloud": Object { - "properties": Object { - "availability_zone": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "provider": Object { - "ignore_above": 1024, - "type": "keyword", - }, - "region": Object { - "ignore_above": 1024, - "type": "keyword", - }, - }, - }, - "counts": Object { - "properties": Object { - "agent_configuration": Object { - "properties": Object { - "all": Object { - "type": "long", - }, - }, - }, - "error": Object { - "properties": Object { - "1d": Object { - "type": "long", - }, - "all": Object { - "type": "long", - }, - }, - }, - "max_error_groups_per_service": Object { - "properties": Object { - "1d": Object { - "type": "long", - }, - }, - }, - "max_transaction_groups_per_service": Object { - "properties": Object { - "1d": Object { - "type": "long", - }, - }, - }, - "metric": Object { - "properties": Object { - "1d": Object { - "type": "long", - }, - "all": Object { - "type": "long", - }, - }, - }, - "onboarding": Object { - "properties": Object { - "1d": Object { - "type": "long", - }, - "all": Object { - "type": "long", - }, - }, - }, - "services": Object { - "properties": Object { - "1d": Object { - "type": "long", - }, - }, - }, - "sourcemap": Object { - "properties": Object { - "1d": Object { - "type": "long", - }, - "all": Object { - "type": "long", - }, - }, - }, - "span": Object { - "properties": Object { - "1d": Object { - "type": "long", - }, - "all": Object { - "type": "long", - }, - }, - }, - "traces": Object { - "properties": Object { - "1d": Object { - "type": "long", - }, - }, - }, - "transaction": Object { - "properties": Object { - "1d": Object { - "type": "long", - }, - "all": Object { - "type": "long", - }, - }, - }, - }, - }, - "has_any_services": Object { - "type": "boolean", - }, - "indices": Object { - "properties": Object { - "all": Object { - "properties": Object { - "total": Object { - "properties": Object { - "docs": Object { - "properties": Object { - "count": Object { - "type": "long", - }, - }, - }, - "store": Object { - "properties": Object { - "size_in_bytes": Object { - "type": "long", - }, - }, - }, - }, - }, - }, - }, - "shards": Object { - "properties": Object { - "total": Object { - "type": "long", - }, - }, - }, - }, - }, - "integrations": Object { - "properties": Object { - "ml": Object { - "properties": Object { - "all_jobs_count": Object { - "type": "long", - }, - }, - }, - }, - }, - "retainment": Object { - "properties": Object { - "error": Object { - "properties": Object { - "ms": Object { - "type": "long", - }, - }, - }, - "metric": Object { - "properties": Object { - "ms": Object { - "type": "long", - }, - }, - }, - "onboarding": Object { - "properties": Object { - "ms": Object { - "type": "long", - }, - }, - }, - "span": Object { - "properties": Object { - "ms": Object { - "type": "long", - }, - }, - }, - "transaction": Object { - "properties": Object { - "ms": Object { - "type": "long", - }, - }, - }, - }, - }, - "services_per_agent": Object { - "properties": Object { - "dotnet": Object { - "null_value": 0, - "type": "long", - }, - "go": Object { - "null_value": 0, - "type": "long", - }, - "java": Object { - "null_value": 0, - "type": "long", - }, - "js-base": Object { - "null_value": 0, - "type": "long", - }, - "nodejs": Object { - "null_value": 0, - "type": "long", - }, - "python": Object { - "null_value": 0, - "type": "long", - }, - "ruby": Object { - "null_value": 0, - "type": "long", - }, - "rum-js": Object { - "null_value": 0, - "type": "long", - }, - }, - }, - "tasks": Object { - "properties": Object { - "agent_configuration": Object { - "properties": Object { - "took": Object { - "properties": Object { - "ms": Object { - "type": "long", - }, - }, - }, - }, - }, - "agents": Object { - "properties": Object { - "took": Object { - "properties": Object { - "ms": Object { - "type": "long", - }, - }, - }, - }, - }, - "cardinality": Object { - "properties": Object { - "took": Object { - "properties": Object { - "ms": Object { - "type": "long", - }, - }, - }, - }, - }, - "groupings": Object { - "properties": Object { - "took": Object { - "properties": Object { - "ms": Object { - "type": "long", - }, - }, - }, - }, - }, - "indices_stats": Object { - "properties": Object { - "took": Object { - "properties": Object { - "ms": Object { - "type": "long", - }, - }, - }, - }, - }, - "integrations": Object { - "properties": Object { - "took": Object { - "properties": Object { - "ms": Object { - "type": "long", - }, - }, - }, - }, - }, - "processor_events": Object { - "properties": Object { - "took": Object { - "properties": Object { - "ms": Object { - "type": "long", - }, - }, - }, - }, - }, - "services": Object { - "properties": Object { - "took": Object { - "properties": Object { - "ms": Object { - "type": "long", - }, - }, - }, - }, - }, - "versions": Object { - "properties": Object { - "took": Object { - "properties": Object { - "ms": Object { - "type": "long", - }, - }, - }, - }, - }, - }, - }, - "version": Object { - "properties": Object { - "apm_server": Object { - "properties": Object { - "major": Object { - "type": "long", - }, - "minor": Object { - "type": "long", - }, - "patch": Object { - "type": "long", - }, - }, - }, - }, - }, - }, +{ + "properties": { + "stack_stats": { + "properties": { + "kibana": { + "properties": { + "plugins": { + "properties": { + "apm": { + "properties": { + "agents": { + "properties": { + "dotnet": { + "properties": { + "agent": { + "properties": { + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "service": { + "properties": { + "framework": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "language": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "runtime": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + } + } + } + } + }, + "go": { + "properties": { + "agent": { + "properties": { + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "service": { + "properties": { + "framework": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "language": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "runtime": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + } + } + } + } + }, + "java": { + "properties": { + "agent": { + "properties": { + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "service": { + "properties": { + "framework": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "language": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "runtime": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + } + } + } + } + }, + "js-base": { + "properties": { + "agent": { + "properties": { + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "service": { + "properties": { + "framework": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "language": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "runtime": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + } + } + } + } + }, + "nodejs": { + "properties": { + "agent": { + "properties": { + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "service": { + "properties": { + "framework": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "language": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "runtime": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + } + } + } + } + }, + "python": { + "properties": { + "agent": { + "properties": { + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "service": { + "properties": { + "framework": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "language": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "runtime": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + } + } + } + } + }, + "ruby": { + "properties": { + "agent": { + "properties": { + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "service": { + "properties": { + "framework": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "language": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "runtime": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + } + } + } + } + }, + "rum-js": { + "properties": { + "agent": { + "properties": { + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "service": { + "properties": { + "framework": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "language": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "runtime": { + "properties": { + "composite": { + "type": "keyword", + "ignore_above": 1024 + }, + "name": { + "type": "keyword", + "ignore_above": 1024 + }, + "version": { + "type": "keyword", + "ignore_above": 1024 + } + } + } + } + } + } + } + } + }, + "cloud": { + "properties": { + "availability_zone": { + "type": "keyword", + "ignore_above": 1024 + }, + "provider": { + "type": "keyword", + "ignore_above": 1024 + }, + "region": { + "type": "keyword", + "ignore_above": 1024 + } + } + }, + "counts": { + "properties": { + "agent_configuration": { + "properties": { + "all": { + "type": "long" + } + } + }, + "error": { + "properties": { + "1d": { + "type": "long" + }, + "all": { + "type": "long" + } + } + }, + "max_error_groups_per_service": { + "properties": { + "1d": { + "type": "long" + } + } + }, + "max_transaction_groups_per_service": { + "properties": { + "1d": { + "type": "long" + } + } + }, + "metric": { + "properties": { + "1d": { + "type": "long" + }, + "all": { + "type": "long" + } + } + }, + "onboarding": { + "properties": { + "1d": { + "type": "long" + }, + "all": { + "type": "long" + } + } + }, + "services": { + "properties": { + "1d": { + "type": "long" + } + } + }, + "sourcemap": { + "properties": { + "1d": { + "type": "long" + }, + "all": { + "type": "long" + } + } + }, + "span": { + "properties": { + "1d": { + "type": "long" + }, + "all": { + "type": "long" + } + } + }, + "traces": { + "properties": { + "1d": { + "type": "long" + } + } + }, + "transaction": { + "properties": { + "1d": { + "type": "long" + }, + "all": { + "type": "long" + } + } + } + } + }, + "cardinality": { + "properties": { + "client": { + "properties": { + "geo": { + "properites": { + "country_iso_code": { + "rum": { + "properties": { + "1d": { + "type": "long" + } + } + } + } + } + } + } + }, + "user_agent": { + "properties": { + "original": { + "properties": { + "all_agents": { + "properties": { + "1d": { + "type": "long" + } + } + }, + "rum": { + "properties": { + "1d": { + "type": "long" + } + } + } + } + } + } + }, + "transaction": { + "properties": { + "name": { + "properties": { + "all_agents": { + "properties": { + "1d": { + "type": "long" + } + } + }, + "rum": { + "properties": { + "1d": { + "type": "long" + } + } + } + } + } + } + } + } + }, + "has_any_services": { + "type": "boolean" + }, + "indices": { + "properties": { + "all": { + "properties": { + "total": { + "properties": { + "docs": { + "properties": { + "count": { + "type": "long" + } + } + }, + "store": { + "properties": { + "size_in_bytes": { + "type": "long" + } + } + } + } + } + } + }, + "shards": { + "properties": { + "total": { + "type": "long" + } + } + } + } + }, + "integrations": { + "properties": { + "ml": { + "properties": { + "all_jobs_count": { + "type": "long" + } + } + } + } + }, + "retainment": { + "properties": { + "error": { + "properties": { + "ms": { + "type": "long" + } + } + }, + "metric": { + "properties": { + "ms": { + "type": "long" + } + } + }, + "onboarding": { + "properties": { + "ms": { + "type": "long" + } + } + }, + "span": { + "properties": { + "ms": { + "type": "long" + } + } + }, + "transaction": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, + "services_per_agent": { + "properties": { + "dotnet": { + "type": "long", + "null_value": 0 + }, + "go": { + "type": "long", + "null_value": 0 + }, + "java": { + "type": "long", + "null_value": 0 + }, + "js-base": { + "type": "long", + "null_value": 0 + }, + "nodejs": { + "type": "long", + "null_value": 0 + }, + "python": { + "type": "long", + "null_value": 0 + }, + "ruby": { + "type": "long", + "null_value": 0 + }, + "rum-js": { + "type": "long", + "null_value": 0 + } + } + }, + "tasks": { + "properties": { + "agent_configuration": { + "properties": { + "took": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, + "agents": { + "properties": { + "took": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, + "cardinality": { + "properties": { + "took": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, + "cloud": { + "properties": { + "took": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, + "groupings": { + "properties": { + "took": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, + "indices_stats": { + "properties": { + "took": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, + "integrations": { + "properties": { + "took": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, + "processor_events": { + "properties": { + "took": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, + "services": { + "properties": { + "took": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, + "versions": { + "properties": { + "took": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + } + } + }, + "version": { + "properties": { + "apm_server": { + "properties": { + "major": { + "type": "long" + }, + "minor": { + "type": "long" + }, + "patch": { + "type": "long" + } + } + } + } + } + } + } + } + } + } + } + } + } + } } `; diff --git a/x-pack/plugins/apm/common/apm_telemetry.test.ts b/x-pack/plugins/apm/common/apm_telemetry.test.ts index 1612716142ce70..035c546a5b49a2 100644 --- a/x-pack/plugins/apm/common/apm_telemetry.test.ts +++ b/x-pack/plugins/apm/common/apm_telemetry.test.ts @@ -4,48 +4,43 @@ * you may not use this file except in compliance with the Elastic License. */ -import { - getApmTelemetryMapping, - mergeApmTelemetryMapping, -} from './apm_telemetry'; +import { getApmTelemetryMapping } from './apm_telemetry'; + +// Add this snapshot serializer for this test. The default snapshot serializer +// prints "Object" next to objects in the JSON output, but we want to be able to +// Use the output from this JSON snapshot to share with the telemetry team. When +// new fields are added to the mapping, we'll have a diff in the snapshot. +expect.addSnapshotSerializer({ + print: (contents) => { + return JSON.stringify(contents, null, 2); + }, + test: () => true, +}); describe('APM telemetry helpers', () => { describe('getApmTelemetry', () => { + // This test creates a snapshot with the JSON of our full telemetry mapping + // that can be PUT in a query to the index on the telemetry cluster. Sharing + // the contents of the snapshot with the telemetry team can provide them with + // useful information about changes to our telmetry. it('generates a JSON object with the telemetry mapping', () => { - expect(getApmTelemetryMapping()).toMatchSnapshot(); - }); - }); - - describe('mergeApmTelemetryMapping', () => { - describe('with an invalid mapping', () => { - it('throws an error', () => { - expect(() => mergeApmTelemetryMapping({})).toThrowError(); - }); - }); - - describe('with a valid mapping', () => { - it('merges the mapping', () => { - // This is "valid" in the sense that it has all of the deep fields - // needed to merge. It's not a valid mapping opbject. - const validTelemetryMapping = { - mappings: { + expect({ + properties: { + stack_stats: { properties: { - stack_stats: { + kibana: { properties: { - kibana: { - properties: { plugins: { properties: { apm: {} } } }, + plugins: { + properties: { + apm: getApmTelemetryMapping(), + }, }, }, }, }, }, - }; - - expect( - mergeApmTelemetryMapping(validTelemetryMapping)?.mappings.properties - .stack_stats.properties.kibana.properties.plugins.properties.apm - ).toEqual(getApmTelemetryMapping()); - }); + }, + }).toMatchSnapshot(); }); }); }); diff --git a/x-pack/plugins/apm/common/apm_telemetry.ts b/x-pack/plugins/apm/common/apm_telemetry.ts index 5837648f3e5054..5fb6414674d1cb 100644 --- a/x-pack/plugins/apm/common/apm_telemetry.ts +++ b/x-pack/plugins/apm/common/apm_telemetry.ts @@ -3,7 +3,6 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { produce } from 'immer'; import { AGENT_NAMES } from './agent_name'; /** @@ -115,6 +114,15 @@ export function getApmTelemetryMapping() { }, cardinality: { properties: { + client: { + properties: { + geo: { + properites: { + country_iso_code: { rum: oneDayProperties }, + }, + }, + }, + }, user_agent: { properties: { original: { @@ -199,6 +207,7 @@ export function getApmTelemetryMapping() { agent_configuration: tookProperties, agents: tookProperties, cardinality: tookProperties, + cloud: tookProperties, groupings: tookProperties, indices_stats: tookProperties, integrations: tookProperties, @@ -221,16 +230,3 @@ export function getApmTelemetryMapping() { }, }; } - -/** - * Merge a telemetry mapping object (from https://github.com/elastic/telemetry/blob/master/config/templates/xpack-phone-home.json) - * with the output from `getApmTelemetryMapping`. - */ -export function mergeApmTelemetryMapping( - xpackPhoneHomeMapping: Record -) { - return produce(xpackPhoneHomeMapping, (draft: Record) => { - draft.mappings.properties.stack_stats.properties.kibana.properties.plugins.properties.apm = getApmTelemetryMapping(); - return draft; - }); -} diff --git a/x-pack/plugins/apm/dev_docs/telemetry.md b/x-pack/plugins/apm/dev_docs/telemetry.md index fa8e057a595954..d61afbe07522f1 100644 --- a/x-pack/plugins/apm/dev_docs/telemetry.md +++ b/x-pack/plugins/apm/dev_docs/telemetry.md @@ -55,20 +55,16 @@ The mapping for the telemetry data is here under `stack_stats.kibana.plugins.apm The mapping used there can be generated with the output of the [`getTelemetryMapping`](../common/apm_telemetry.ts) function. -To make a change to the mapping, edit this function, run the tests to update the snapshots, then use the `merge_telemetry_mapping` script to merge the data into the telemetry repository. +The `schema` property of the `makeUsageCollector` call in the [`createApmTelemetry` function](../server/lib/apm_telemetry/index.ts) contains the output of `getTelemetryMapping`. -If the [telemetry repository](https://github.com/elastic/telemetry) is cloned as a sibling to the kibana directory, you can run the following from x-pack/plugins/apm: - -```bash -node ./scripts/merge-telemetry-mapping.js ../../../../telemetry/config/templates/xpack-phone-home.json -``` - -this will replace the contents of the mapping in the repository checkout with the updated mapping. You can then [follow the telemetry team's instructions](https://github.com/elastic/telemetry#mappings) for opening a pull request with the mapping changes. +When adding a task, the key of the task and the `took` properties need to be added under the `tasks` properties in the mapping, as when tasks run they report the time they took. The queries for the stats are in the [collect data telemetry tasks](../server/lib/apm_telemetry/collect_data_telemetry/tasks.ts). The collection tasks also use the [`APMDataTelemetry` type](../server/lib/apm_telemetry/types.ts) which also needs to be updated with any changes to the fields. +Running `node scripts/telemetry_check --fix` from the root Kibana directory will update the schemas which schema should automatically notify the Telemetry team when a pull request is opened so they can update the mapping in the telemetry clusters. (At the time of this writing the APM schema is excluded. #70180 is open to remove these exclusions so at this time any pull requests with mapping changes will have to manually request the Telemetry team as a reviewer.) + ## Behavioral Telemetry Behavioral telemetry is recorded with the ui_metrics and application_usage methods from the Usage Collection plugin. diff --git a/x-pack/plugins/apm/scripts/merge-telemetry-mapping.js b/x-pack/plugins/apm/scripts/merge-telemetry-mapping.js deleted file mode 100644 index 741df981a9cb0a..00000000000000 --- a/x-pack/plugins/apm/scripts/merge-telemetry-mapping.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -// compile typescript on the fly -// eslint-disable-next-line import/no-extraneous-dependencies -require('@babel/register')({ - extensions: ['.ts'], - plugins: [ - '@babel/plugin-proposal-optional-chaining', - '@babel/plugin-proposal-nullish-coalescing-operator', - ], - presets: [ - '@babel/typescript', - ['@babel/preset-env', { targets: { node: 'current' } }], - ], -}); - -require('./merge-telemetry-mapping/index.ts'); diff --git a/x-pack/plugins/apm/scripts/merge-telemetry-mapping/index.ts b/x-pack/plugins/apm/scripts/merge-telemetry-mapping/index.ts deleted file mode 100644 index c06d4cec150dcf..00000000000000 --- a/x-pack/plugins/apm/scripts/merge-telemetry-mapping/index.ts +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { readFileSync, truncateSync, writeFileSync } from 'fs'; -import { resolve } from 'path'; -import { argv } from 'yargs'; -import { mergeApmTelemetryMapping } from '../../common/apm_telemetry'; - -function errorExit(error?: Error) { - console.error(`usage: ${argv.$0} /path/to/xpack-phone-home.json`); // eslint-disable-line no-console - if (error) { - throw error; - } - process.exit(1); -} - -try { - const filename = resolve(argv._[0]); - const xpackPhoneHomeMapping = JSON.parse(readFileSync(filename, 'utf-8')); - - const newMapping = mergeApmTelemetryMapping(xpackPhoneHomeMapping); - - truncateSync(filename); - writeFileSync(filename, JSON.stringify(newMapping, null, 2)); -} catch (error) { - errorExit(error); -} diff --git a/x-pack/plugins/apm/server/lib/apm_telemetry/collect_data_telemetry/tasks.test.ts b/x-pack/plugins/apm/server/lib/apm_telemetry/collect_data_telemetry/tasks.test.ts index e3161b49b315d9..ea2b57c01acff7 100644 --- a/x-pack/plugins/apm/server/lib/apm_telemetry/collect_data_telemetry/tasks.test.ts +++ b/x-pack/plugins/apm/server/lib/apm_telemetry/collect_data_telemetry/tasks.test.ts @@ -16,7 +16,7 @@ describe('data telemetry collection tasks', () => { } as ApmIndicesConfig; describe('cloud', () => { - const cloudTask = tasks.find((task) => task.name === 'cloud'); + const task = tasks.find((t) => t.name === 'cloud'); it('returns a map of cloud provider data', async () => { const search = jest.fn().mockResolvedValueOnce({ @@ -42,7 +42,7 @@ describe('data telemetry collection tasks', () => { }, }); - expect(await cloudTask?.executor({ indices, search } as any)).toEqual({ + expect(await task?.executor({ indices, search } as any)).toEqual({ cloud: { availability_zone: ['us-west-1', 'europe-west1-c'], provider: ['aws', 'gcp'], @@ -55,7 +55,7 @@ describe('data telemetry collection tasks', () => { it('returns an empty map', async () => { const search = jest.fn().mockResolvedValueOnce({}); - expect(await cloudTask?.executor({ indices, search } as any)).toEqual({ + expect(await task?.executor({ indices, search } as any)).toEqual({ cloud: { availability_zone: [], provider: [], @@ -66,8 +66,83 @@ describe('data telemetry collection tasks', () => { }); }); + describe('processor_events', () => { + const task = tasks.find((t) => t.name === 'processor_events'); + + it('returns a map of processor events', async () => { + const getTime = jest + .spyOn(Date.prototype, 'getTime') + .mockReturnValue(1594330792957); + + const search = jest.fn().mockImplementation((params: any) => { + const isTotalHitsQuery = params?.body?.track_total_hits; + + return Promise.resolve( + isTotalHitsQuery + ? { hits: { total: { value: 1 } } } + : { + hits: { + hits: [{ _source: { '@timestamp': 1 } }], + }, + } + ); + }); + + expect(await task?.executor({ indices, search } as any)).toEqual({ + counts: { + error: { + '1d': 1, + all: 1, + }, + metric: { + '1d': 1, + all: 1, + }, + onboarding: { + '1d': 1, + all: 1, + }, + sourcemap: { + '1d': 1, + all: 1, + }, + span: { + '1d': 1, + all: 1, + }, + transaction: { + '1d': 1, + all: 1, + }, + }, + retainment: { + error: { + ms: 0, + }, + metric: { + ms: 0, + }, + onboarding: { + ms: 0, + }, + sourcemap: { + ms: 0, + }, + span: { + ms: 0, + }, + transaction: { + ms: 0, + }, + }, + }); + + getTime.mockRestore(); + }); + }); + describe('integrations', () => { - const integrationsTask = tasks.find((task) => task.name === 'integrations'); + const task = tasks.find((t) => t.name === 'integrations'); it('returns the count of ML jobs', async () => { const transportRequest = jest @@ -75,7 +150,7 @@ describe('data telemetry collection tasks', () => { .mockResolvedValueOnce({ body: { count: 1 } }); expect( - await integrationsTask?.executor({ indices, transportRequest } as any) + await task?.executor({ indices, transportRequest } as any) ).toEqual({ integrations: { ml: { @@ -90,7 +165,7 @@ describe('data telemetry collection tasks', () => { const transportRequest = jest.fn().mockResolvedValueOnce({}); expect( - await integrationsTask?.executor({ indices, transportRequest } as any) + await task?.executor({ indices, transportRequest } as any) ).toEqual({ integrations: { ml: { @@ -101,4 +176,93 @@ describe('data telemetry collection tasks', () => { }); }); }); + + describe('indices_stats', () => { + const task = tasks.find((t) => t.name === 'indices_stats'); + + it('returns a map of index stats', async () => { + const indicesStats = jest.fn().mockResolvedValueOnce({ + _all: { total: { docs: { count: 1 }, store: { size_in_bytes: 1 } } }, + _shards: { total: 1 }, + }); + + expect(await task?.executor({ indices, indicesStats } as any)).toEqual({ + indices: { + shards: { + total: 1, + }, + all: { + total: { + docs: { + count: 1, + }, + store: { + size_in_bytes: 1, + }, + }, + }, + }, + }); + }); + + describe('with no results', () => { + it('returns zero values', async () => { + const indicesStats = jest.fn().mockResolvedValueOnce({}); + + expect(await task?.executor({ indices, indicesStats } as any)).toEqual({ + indices: { + shards: { + total: 0, + }, + all: { + total: { + docs: { + count: 0, + }, + store: { + size_in_bytes: 0, + }, + }, + }, + }, + }); + }); + }); + }); + + describe('cardinality', () => { + const task = tasks.find((t) => t.name === 'cardinality'); + + it('returns cardinalities', async () => { + const search = jest.fn().mockImplementation((params: any) => { + const isRumQuery = params.body.query.bool.filter.length === 2; + if (isRumQuery) { + return Promise.resolve({ + aggregations: { + 'client.geo.country_iso_code': { value: 5 }, + 'transaction.name': { value: 1 }, + 'user_agent.original': { value: 2 }, + }, + }); + } else { + return Promise.resolve({ + aggregations: { + 'transaction.name': { value: 3 }, + 'user_agent.original': { value: 4 }, + }, + }); + } + }); + + expect(await task?.executor({ search } as any)).toEqual({ + cardinality: { + client: { geo: { country_iso_code: { rum: { '1d': 5 } } } }, + transaction: { name: { all_agents: { '1d': 3 }, rum: { '1d': 1 } } }, + user_agent: { + original: { all_agents: { '1d': 4 }, rum: { '1d': 2 } }, + }, + }, + }); + }); + }); }); diff --git a/x-pack/plugins/apm/server/lib/apm_telemetry/collect_data_telemetry/tasks.ts b/x-pack/plugins/apm/server/lib/apm_telemetry/collect_data_telemetry/tasks.ts index 4bbaaf3e86e780..2ecb5a935893f4 100644 --- a/x-pack/plugins/apm/server/lib/apm_telemetry/collect_data_telemetry/tasks.ts +++ b/x-pack/plugins/apm/server/lib/apm_telemetry/collect_data_telemetry/tasks.ts @@ -9,6 +9,7 @@ import { AGENT_NAMES } from '../../../../common/agent_name'; import { AGENT_NAME, AGENT_VERSION, + CLIENT_GEO_COUNTRY_ISO_CODE, CLOUD_AVAILABILITY_ZONE, CLOUD_PROVIDER, CLOUD_REGION, @@ -34,6 +35,9 @@ import { APMTelemetry } from '../types'; const TIME_RANGES = ['1d', 'all'] as const; type TimeRange = typeof TIME_RANGES[number]; +const range1d = { range: { '@timestamp': { gte: 'now-1d' } } }; +const timeout = '5m'; + export const tasks: TelemetryTask[] = [ { name: 'cloud', @@ -62,6 +66,7 @@ export const tasks: TelemetryTask[] = [ ], body: { size: 0, + timeout, aggs: { [az]: { terms: { @@ -109,15 +114,14 @@ export const tasks: TelemetryTask[] = [ type ProcessorEvent = keyof typeof indicesByProcessorEvent; - const jobs: Array<{ + interface Job { processorEvent: ProcessorEvent; timeRange: TimeRange; - }> = flatten( - (Object.keys( - indicesByProcessorEvent - ) as ProcessorEvent[]).map((processorEvent) => - TIME_RANGES.map((timeRange) => ({ processorEvent, timeRange })) - ) + } + + const events = Object.keys(indicesByProcessorEvent) as ProcessorEvent[]; + const jobs: Job[] = events.flatMap((processorEvent) => + TIME_RANGES.map((timeRange) => ({ processorEvent, timeRange })) ); const allData = await jobs.reduce((prevJob, current) => { @@ -128,21 +132,12 @@ export const tasks: TelemetryTask[] = [ index: indicesByProcessorEvent[processorEvent], body: { size: 0, + timeout, query: { bool: { filter: [ { term: { [PROCESSOR_EVENT]: processorEvent } }, - ...(timeRange !== 'all' - ? [ - { - range: { - '@timestamp': { - gte: `now-${timeRange}`, - }, - }, - }, - ] - : []), + ...(timeRange === '1d' ? [range1d] : []), ], }, }, @@ -155,6 +150,7 @@ export const tasks: TelemetryTask[] = [ ? await search({ index: indicesByProcessorEvent[processorEvent], body: { + timeout, query: { bool: { filter: [ @@ -208,6 +204,7 @@ export const tasks: TelemetryTask[] = [ index: indices.apmAgentConfigurationIndex, body: { size: 0, + timeout, track_total_hits: true, }, }) @@ -237,6 +234,7 @@ export const tasks: TelemetryTask[] = [ ], body: { size: 0, + timeout, query: { bool: { filter: [ @@ -245,13 +243,7 @@ export const tasks: TelemetryTask[] = [ [AGENT_NAME]: agentName, }, }, - { - range: { - '@timestamp': { - gte: 'now-1d', - }, - }, - }, + range1d, ], }, }, @@ -297,6 +289,7 @@ export const tasks: TelemetryTask[] = [ }, }, size: 1, + timeout, sort: { '@timestamp': 'desc', }, @@ -330,12 +323,12 @@ export const tasks: TelemetryTask[] = [ { name: 'groupings', executor: async ({ search, indices }) => { - const range1d = { range: { '@timestamp': { gte: 'now-1d' } } }; const errorGroupsCount = ( await search({ index: indices['apm_oss.errorIndices'], body: { size: 0, + timeout, query: { bool: { filter: [{ term: { [PROCESSOR_EVENT]: 'error' } }, range1d], @@ -368,6 +361,7 @@ export const tasks: TelemetryTask[] = [ index: indices['apm_oss.transactionIndices'], body: { size: 0, + timeout, query: { bool: { filter: [ @@ -415,6 +409,7 @@ export const tasks: TelemetryTask[] = [ }, track_total_hits: true, size: 0, + timeout, }, }) ).hits.total.value; @@ -428,6 +423,7 @@ export const tasks: TelemetryTask[] = [ ], body: { size: 0, + timeout, query: { bool: { filter: [range1d], @@ -497,12 +493,10 @@ export const tasks: TelemetryTask[] = [ ], body: { size: 0, + timeout, query: { bool: { - filter: [ - { term: { [AGENT_NAME]: agentName } }, - { range: { '@timestamp': { gte: 'now-1d' } } }, - ], + filter: [{ term: { [AGENT_NAME]: agentName } }, range1d], }, }, sort: { @@ -699,15 +693,15 @@ export const tasks: TelemetryTask[] = [ return { indices: { shards: { - total: response._shards.total, + total: response._shards?.total ?? 0, }, all: { total: { docs: { - count: response._all.total.docs.count, + count: response._all?.total?.docs?.count ?? 0, }, store: { - size_in_bytes: response._all.total.store.size_in_bytes, + size_in_bytes: response._all?.total?.store?.size_in_bytes ?? 0, }, }, }, @@ -721,9 +715,10 @@ export const tasks: TelemetryTask[] = [ const allAgentsCardinalityResponse = await search({ body: { size: 0, + timeout, query: { bool: { - filter: [{ range: { '@timestamp': { gte: 'now-1d' } } }], + filter: [range1d], }, }, aggs: { @@ -744,15 +739,19 @@ export const tasks: TelemetryTask[] = [ const rumAgentCardinalityResponse = await search({ body: { size: 0, + timeout, query: { bool: { filter: [ - { range: { '@timestamp': { gte: 'now-1d' } } }, + range1d, { terms: { [AGENT_NAME]: ['rum-js', 'js-base'] } }, ], }, }, aggs: { + [CLIENT_GEO_COUNTRY_ISO_CODE]: { + cardinality: { field: CLIENT_GEO_COUNTRY_ISO_CODE }, + }, [TRANSACTION_NAME]: { cardinality: { field: TRANSACTION_NAME, @@ -769,6 +768,18 @@ export const tasks: TelemetryTask[] = [ return { cardinality: { + client: { + geo: { + country_iso_code: { + rum: { + '1d': + rumAgentCardinalityResponse.aggregations?.[ + CLIENT_GEO_COUNTRY_ISO_CODE + ].value, + }, + }, + }, + }, transaction: { name: { all_agents: { diff --git a/x-pack/plugins/apm/server/lib/apm_telemetry/index.ts b/x-pack/plugins/apm/server/lib/apm_telemetry/index.ts index 632e653a2f6e94..2836cf100a4324 100644 --- a/x-pack/plugins/apm/server/lib/apm_telemetry/index.ts +++ b/x-pack/plugins/apm/server/lib/apm_telemetry/index.ts @@ -3,25 +3,26 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import { CoreSetup, Logger } from 'src/core/server'; import { Observable } from 'rxjs'; import { take } from 'rxjs/operators'; +import { CoreSetup, Logger } from 'src/core/server'; import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; +import { APMConfig } from '../..'; import { - TaskManagerStartContract, TaskManagerSetupContract, + TaskManagerStartContract, } from '../../../../task_manager/server'; -import { getApmIndices } from '../settings/apm_indices/get_apm_indices'; import { APM_TELEMETRY_SAVED_OBJECT_ID, APM_TELEMETRY_SAVED_OBJECT_TYPE, } from '../../../common/apm_saved_object_constants'; +import { getApmTelemetryMapping } from '../../../common/apm_telemetry'; +import { getInternalSavedObjectsClient } from '../helpers/get_internal_saved_objects_client'; +import { getApmIndices } from '../settings/apm_indices/get_apm_indices'; import { collectDataTelemetry, CollectTelemetryParams, } from './collect_data_telemetry'; -import { APMConfig } from '../..'; -import { getInternalSavedObjectsClient } from '../helpers/get_internal_saved_objects_client'; const APM_TELEMETRY_TASK_NAME = 'apm-telemetry-task'; @@ -97,6 +98,7 @@ export async function createApmTelemetry({ const collector = usageCollector.makeUsageCollector({ type: 'apm', + schema: getApmTelemetryMapping(), fetch: async () => { try { const data = ( diff --git a/x-pack/plugins/apm/server/lib/apm_telemetry/types.ts b/x-pack/plugins/apm/server/lib/apm_telemetry/types.ts index a1d94333b1a08f..4c376aac52f5b5 100644 --- a/x-pack/plugins/apm/server/lib/apm_telemetry/types.ts +++ b/x-pack/plugins/apm/server/lib/apm_telemetry/types.ts @@ -44,6 +44,7 @@ export type APMDataTelemetry = DeepPartial<{ services: TimeframeMap; }; cardinality: { + client: { geo: { country_iso_code: { rum: TimeframeMap1d } } }; user_agent: { original: { all_agents: TimeframeMap1d; diff --git a/x-pack/plugins/apm/server/lib/helpers/setup_request.ts b/x-pack/plugins/apm/server/lib/helpers/setup_request.ts index af073076a812a7..6f381d4945ab4b 100644 --- a/x-pack/plugins/apm/server/lib/helpers/setup_request.ts +++ b/x-pack/plugins/apm/server/lib/helpers/setup_request.ts @@ -112,7 +112,7 @@ function getMlSetup(context: APMRequestHandlerContext, request: KibanaRequest) { return; } const ml = context.plugins.ml; - const mlClient = ml.mlClient.asScoped(request).callAsCurrentUser; + const mlClient = ml.mlClient.asScoped(request); return { mlSystem: ml.mlSystemProvider(mlClient, request), anomalyDetectors: ml.anomalyDetectorsProvider(mlClient, request), diff --git a/x-pack/plugins/canvas/public/components/saved_elements_modal/saved_elements_modal.tsx b/x-pack/plugins/canvas/public/components/saved_elements_modal/saved_elements_modal.tsx index 44d2f70fcdfada..c318743086b44d 100644 --- a/x-pack/plugins/canvas/public/components/saved_elements_modal/saved_elements_modal.tsx +++ b/x-pack/plugins/canvas/public/components/saved_elements_modal/saved_elements_modal.tsx @@ -4,7 +4,14 @@ * you may not use this file except in compliance with the Elastic License. */ -import React, { Fragment, ChangeEvent, FunctionComponent, useState, useEffect } from 'react'; +import React, { + Fragment, + ChangeEvent, + FunctionComponent, + useState, + useEffect, + useRef, +} from 'react'; import PropTypes from 'prop-types'; import { EuiModal, @@ -72,12 +79,16 @@ export const SavedElementsModal: FunctionComponent = ({ removeCustomElement, updateCustomElement, }) => { + const hasLoadedElements = useRef(false); const [elementToDelete, setElementToDelete] = useState(null); const [elementToEdit, setElementToEdit] = useState(null); useEffect(() => { - findCustomElements(); - }); + if (!hasLoadedElements.current) { + hasLoadedElements.current = true; + findCustomElements(); + } + }, [findCustomElements, hasLoadedElements]); const showEditModal = (element: CustomElement) => setElementToEdit(element); const hideEditModal = () => setElementToEdit(null); diff --git a/x-pack/plugins/data_enhanced/server/search/es_search_strategy.ts b/x-pack/plugins/data_enhanced/server/search/es_search_strategy.ts index 7c1001697421f1..7b29117495a676 100644 --- a/x-pack/plugins/data_enhanced/server/search/es_search_strategy.ts +++ b/x-pack/plugins/data_enhanced/server/search/es_search_strategy.ts @@ -8,12 +8,13 @@ import { first } from 'rxjs/operators'; import { mapKeys, snakeCase } from 'lodash'; import { SearchResponse } from 'elasticsearch'; import { Observable } from 'rxjs'; -import { LegacyAPICaller, SharedGlobalConfig } from '../../../../../src/core/server'; -import { ES_SEARCH_STRATEGY } from '../../../../../src/plugins/data/common'; import { - ISearch, + LegacyAPICaller, + SharedGlobalConfig, + RequestHandlerContext, +} from '../../../../../src/core/server'; +import { ISearchOptions, - ISearchCancel, getDefaultSearchParams, getTotalLoaded, ISearchStrategy, @@ -30,11 +31,11 @@ export interface AsyncSearchResponse { export const enhancedEsSearchStrategyProvider = ( config$: Observable -): ISearchStrategy => { - const search: ISearch = async ( - context, +): ISearchStrategy => { + const search = async ( + context: RequestHandlerContext, request: IEnhancedEsSearchRequest, - options + options?: ISearchOptions ) => { const config = await config$.pipe(first()).toPromise(); const caller = context.core.elasticsearch.legacy.client.callAsCurrentUser; @@ -46,7 +47,7 @@ export const enhancedEsSearchStrategyProvider = ( : asyncSearch(caller, { ...request, params }, options); }; - const cancel: ISearchCancel = async (context, id) => { + const cancel = async (context: RequestHandlerContext, id: string) => { const method = 'DELETE'; const path = encodeURI(`/_async_search/${id}`); await context.core.elasticsearch.legacy.client.callAsCurrentUser('transport.request', { diff --git a/x-pack/plugins/event_log/server/es/context.mock.ts b/x-pack/plugins/event_log/server/es/context.mock.ts index 0c9f7b29b64119..8d5483b88c4fab 100644 --- a/x-pack/plugins/event_log/server/es/context.mock.ts +++ b/x-pack/plugins/event_log/server/es/context.mock.ts @@ -17,7 +17,7 @@ const createContextMock = () => { logger: loggingSystemMock.createLogger(), esNames: namesMock.create(), initialize: jest.fn(), - waitTillReady: jest.fn(), + waitTillReady: jest.fn(async () => true), esAdapter: clusterClientAdapterMock.create(), initialized: true, }; diff --git a/x-pack/plugins/event_log/server/es/context.test.ts b/x-pack/plugins/event_log/server/es/context.test.ts index a78e47446fef87..f30b71c99a0432 100644 --- a/x-pack/plugins/event_log/server/es/context.test.ts +++ b/x-pack/plugins/event_log/server/es/context.test.ts @@ -7,9 +7,8 @@ import { createEsContext } from './context'; import { LegacyClusterClient, Logger } from '../../../../../src/core/server'; import { elasticsearchServiceMock, loggingSystemMock } from '../../../../../src/core/server/mocks'; -jest.mock('../lib/../../../../package.json', () => ({ - version: '1.2.3', -})); +jest.mock('../lib/../../../../package.json', () => ({ version: '1.2.3' })); +jest.mock('./init'); type EsClusterClient = Pick, 'callAsInternalUser' | 'asScoped'>; let logger: Logger; @@ -92,4 +91,16 @@ describe('createEsContext', () => { ); expect(doesIndexTemplateExist).toBeTruthy(); }); + + test('should handled failed initialization', async () => { + jest.requireMock('./init').initializeEs.mockResolvedValue(false); + const context = createEsContext({ + logger, + clusterClientPromise: Promise.resolve(clusterClient), + indexNameRoot: 'test2', + }); + context.initialize(); + const success = await context.waitTillReady(); + expect(success).toBe(false); + }); }); diff --git a/x-pack/plugins/event_log/server/es/context.ts b/x-pack/plugins/event_log/server/es/context.ts index 16a460be1793b2..8c967e68299b55 100644 --- a/x-pack/plugins/event_log/server/es/context.ts +++ b/x-pack/plugins/event_log/server/es/context.ts @@ -64,9 +64,9 @@ class EsContextImpl implements EsContext { setImmediate(async () => { try { - await this._initialize(); - this.logger.debug('readySignal.signal(true)'); - this.readySignal.signal(true); + const success = await this._initialize(); + this.logger.debug(`readySignal.signal(${success})`); + this.readySignal.signal(success); } catch (err) { this.logger.debug('readySignal.signal(false)'); this.readySignal.signal(false); @@ -74,11 +74,13 @@ class EsContextImpl implements EsContext { }); } + // waits till the ES initialization is done, returns true if it was successful, + // false if it was not successful async waitTillReady(): Promise { return await this.readySignal.wait(); } - private async _initialize() { - await initializeEs(this); + private async _initialize(): Promise { + return await initializeEs(this); } } diff --git a/x-pack/plugins/event_log/server/event_logger.test.ts b/x-pack/plugins/event_log/server/event_logger.test.ts index d4d3df3ef8267c..fde3b2de8dd36e 100644 --- a/x-pack/plugins/event_log/server/event_logger.test.ts +++ b/x-pack/plugins/event_log/server/event_logger.test.ts @@ -14,25 +14,52 @@ import { delay } from './lib/delay'; import { EVENT_LOGGED_PREFIX } from './event_logger'; const KIBANA_SERVER_UUID = '424-24-2424'; +const WRITE_LOG_WAIT_MILLIS = 3000; describe('EventLogger', () => { let systemLogger: ReturnType; - let esContext: EsContext; + let esContext: jest.Mocked; let service: IEventLogService; let eventLogger: IEventLogger; beforeEach(() => { + jest.resetAllMocks(); systemLogger = loggingSystemMock.createLogger(); esContext = contextMock.create(); service = new EventLogService({ esContext, systemLogger, - config: { enabled: true, logEntries: true, indexEntries: false }, + config: { enabled: true, logEntries: true, indexEntries: true }, kibanaUUID: KIBANA_SERVER_UUID, }); eventLogger = service.getLogger({}); }); + test('handles successful initialization', async () => { + service.registerProviderActions('test-provider', ['test-action-1']); + eventLogger = service.getLogger({ + event: { provider: 'test-provider', action: 'test-action-1' }, + }); + + eventLogger.logEvent({}); + await waitForLogEvent(systemLogger); + delay(WRITE_LOG_WAIT_MILLIS); // sleep a bit since event logging is async + expect(esContext.esAdapter.indexDocument).toHaveBeenCalled(); + }); + + test('handles failed initialization', async () => { + service.registerProviderActions('test-provider', ['test-action-1']); + eventLogger = service.getLogger({ + event: { provider: 'test-provider', action: 'test-action-1' }, + }); + esContext.waitTillReady.mockImplementation(async () => false); + + eventLogger.logEvent({}); + await waitForLogEvent(systemLogger); + delay(WRITE_LOG_WAIT_MILLIS); // sleep a bit longer since event logging is async + expect(esContext.esAdapter.indexDocument).not.toHaveBeenCalled(); + }); + test('method logEvent() writes expected default values', async () => { service.registerProviderActions('test-provider', ['test-action-1']); eventLogger = service.getLogger({ diff --git a/x-pack/plugins/event_log/server/event_logger.ts b/x-pack/plugins/event_log/server/event_logger.ts index 1a710a6fa48653..8730870f9620b4 100644 --- a/x-pack/plugins/event_log/server/event_logger.ts +++ b/x-pack/plugins/event_log/server/event_logger.ts @@ -183,7 +183,12 @@ function indexEventDoc(esContext: EsContext, doc: Doc): void { // whew, the thing that actually writes the event log document! async function indexLogEventDoc(esContext: EsContext, doc: unknown) { esContext.logger.debug(`writing to event log: ${JSON.stringify(doc)}`); - await esContext.waitTillReady(); + const success = await esContext.waitTillReady(); + if (!success) { + esContext.logger.debug(`event log did not initialize correctly, event not written`); + return; + } + await esContext.esAdapter.indexDocument(doc); esContext.logger.debug(`writing to event log complete`); } diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_selector/component_templates.scss b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_selector/component_templates.scss index 51e8a829e81b16..026e63b2b4caab 100644 --- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_selector/component_templates.scss +++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_selector/component_templates.scss @@ -7,7 +7,8 @@ $heightHeader: $euiSizeL * 2; .componentTemplates { - @include euiBottomShadowFlat; + border: $euiBorderThin; + border-top: none; height: 100%; &__header { @@ -20,6 +21,7 @@ $heightHeader: $euiSizeL * 2; &__searchBox { border-bottom: $euiBorderThin; + border-top: $euiBorderThin; box-shadow: none; max-width: initial; } diff --git a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_selector/component_templates_selector.scss b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_selector/component_templates_selector.scss index 61d5512da2cd9f..041fc1c8bf9a41 100644 --- a/x-pack/plugins/index_management/public/application/components/component_templates/component_template_selector/component_templates_selector.scss +++ b/x-pack/plugins/index_management/public/application/components/component_templates/component_template_selector/component_templates_selector.scss @@ -6,7 +6,7 @@ height: 480px; &__selection { - @include euiBottomShadowFlat; + border: $euiBorderThin; padding: 0 $euiSize $euiSize; color: $euiColorDarkShade; diff --git a/x-pack/plugins/index_management/public/application/components/template_form/steps/step_logistics.tsx b/x-pack/plugins/index_management/public/application/components/template_form/steps/step_logistics.tsx index ad98aee5fb5f12..f3d05ac38108a2 100644 --- a/x-pack/plugins/index_management/public/application/components/template_form/steps/step_logistics.tsx +++ b/x-pack/plugins/index_management/public/application/components/template_form/steps/step_logistics.tsx @@ -62,7 +62,7 @@ function getFieldsMeta(esDocsBase: string) { description: ( diff --git a/x-pack/plugins/infra/common/http_api/log_analysis/results/index.ts b/x-pack/plugins/infra/common/http_api/log_analysis/results/index.ts index cbd89db97236fb..a01042616a872b 100644 --- a/x-pack/plugins/infra/common/http_api/log_analysis/results/index.ts +++ b/x-pack/plugins/infra/common/http_api/log_analysis/results/index.ts @@ -10,3 +10,4 @@ export * from './log_entry_category_examples'; export * from './log_entry_rate'; export * from './log_entry_examples'; export * from './log_entry_anomalies'; +export * from './log_entry_anomalies_datasets'; diff --git a/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_anomalies.ts b/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_anomalies.ts index 639ac63f9b14d8..62b76a0ae475e8 100644 --- a/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_anomalies.ts +++ b/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_anomalies.ts @@ -128,6 +128,8 @@ export const getLogEntryAnomaliesRequestPayloadRT = rt.type({ pagination: paginationRT, // Sort properties sort: sortRT, + // Dataset filters + datasets: rt.array(rt.string), }), ]), }); diff --git a/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_anomalies_datasets.ts b/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_anomalies_datasets.ts new file mode 100644 index 00000000000000..56784dba1be44e --- /dev/null +++ b/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_anomalies_datasets.ts @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; + +import { + badRequestErrorRT, + forbiddenErrorRT, + timeRangeRT, + routeTimingMetadataRT, +} from '../../shared'; + +export const LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH = + '/api/infra/log_analysis/results/log_entry_anomalies_datasets'; + +/** + * request + */ + +export const getLogEntryAnomaliesDatasetsRequestPayloadRT = rt.type({ + data: rt.type({ + // the id of the source configuration + sourceId: rt.string, + // the time range to fetch the anomalies datasets from + timeRange: timeRangeRT, + }), +}); + +export type GetLogEntryAnomaliesDatasetsRequestPayload = rt.TypeOf< + typeof getLogEntryAnomaliesDatasetsRequestPayloadRT +>; + +/** + * response + */ + +export const getLogEntryAnomaliesDatasetsSuccessReponsePayloadRT = rt.intersection([ + rt.type({ + data: rt.type({ + datasets: rt.array(rt.string), + }), + }), + rt.partial({ + timing: routeTimingMetadataRT, + }), +]); + +export type GetLogEntryAnomaliesDatasetsSuccessResponsePayload = rt.TypeOf< + typeof getLogEntryAnomaliesDatasetsSuccessReponsePayloadRT +>; + +export const getLogEntryAnomaliesDatasetsResponsePayloadRT = rt.union([ + getLogEntryAnomaliesDatasetsSuccessReponsePayloadRT, + badRequestErrorRT, + forbiddenErrorRT, +]); + +export type GetLogEntryAnomaliesDatasetsReponsePayload = rt.TypeOf< + typeof getLogEntryAnomaliesDatasetsResponsePayloadRT +>; diff --git a/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_rate.ts b/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_rate.ts index b7e8a49735152b..20a8e5c378cece 100644 --- a/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_rate.ts +++ b/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_rate.ts @@ -16,11 +16,16 @@ export const LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH = */ export const getLogEntryRateRequestPayloadRT = rt.type({ - data: rt.type({ - bucketDuration: rt.number, - sourceId: rt.string, - timeRange: timeRangeRT, - }), + data: rt.intersection([ + rt.type({ + bucketDuration: rt.number, + sourceId: rt.string, + timeRange: timeRangeRT, + }), + rt.partial({ + datasets: rt.array(rt.string), + }), + ]), }); export type GetLogEntryRateRequestPayload = rt.TypeOf; diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/sections/top_categories/datasets_selector.tsx b/x-pack/plugins/infra/public/components/logging/log_analysis_results/datasets_selector.tsx similarity index 92% rename from x-pack/plugins/infra/public/pages/logs/log_entry_categories/sections/top_categories/datasets_selector.tsx rename to x-pack/plugins/infra/public/components/logging/log_analysis_results/datasets_selector.tsx index ab938ff1d13748..2236dc9e45da6e 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/sections/top_categories/datasets_selector.tsx +++ b/x-pack/plugins/infra/public/components/logging/log_analysis_results/datasets_selector.tsx @@ -8,7 +8,7 @@ import { EuiComboBox, EuiComboBoxOptionOption } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; import React, { useCallback, useMemo } from 'react'; -import { getFriendlyNameForPartitionId } from '../../../../../../common/log_analysis'; +import { getFriendlyNameForPartitionId } from '../../../../common/log_analysis'; type DatasetOptionProps = EuiComboBoxOptionOption; @@ -51,7 +51,7 @@ export const DatasetsSelector: React.FunctionComponent<{ }; const datasetFilterPlaceholder = i18n.translate( - 'xpack.infra.logs.logEntryCategories.datasetFilterPlaceholder', + 'xpack.infra.logs.analysis.datasetFilterPlaceholder', { defaultMessage: 'Filter by datasets', } diff --git a/x-pack/plugins/infra/public/components/logging/log_text_stream/log_entry_context_menu.tsx b/x-pack/plugins/infra/public/components/logging/log_text_stream/log_entry_context_menu.tsx index adc1ce4d8c9fd8..be140a810f1646 100644 --- a/x-pack/plugins/infra/public/components/logging/log_text_stream/log_entry_context_menu.tsx +++ b/x-pack/plugins/infra/public/components/logging/log_text_stream/log_entry_context_menu.tsx @@ -6,7 +6,13 @@ import React, { useMemo } from 'react'; import { i18n } from '@kbn/i18n'; -import { EuiButtonIcon, EuiPopover, EuiContextMenuPanel, EuiContextMenuItem } from '@elastic/eui'; +import { + EuiButton, + EuiIcon, + EuiPopover, + EuiContextMenuPanel, + EuiContextMenuItem, +} from '@elastic/eui'; import { euiStyled } from '../../../../../observability/public'; import { LogEntryColumnContent } from './log_entry_column'; @@ -50,12 +56,15 @@ export const LogEntryContextMenu: React.FC = ({ const button = ( - + style={{ minWidth: 'auto' }} + > + + ); @@ -88,8 +97,5 @@ const AbsoluteWrapper = euiStyled.div` `; const ButtonWrapper = euiStyled.div` - background: ${(props) => props.theme.eui.euiColorPrimary}; - border-radius: 50%; - padding: 4px; - transform: translateY(-6px); + transform: translate(-6px, -6px); `; diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/sections/top_categories/top_categories_section.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/sections/top_categories/top_categories_section.tsx index 37d26de6fce70e..ea23bc468bc76c 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_categories/sections/top_categories/top_categories_section.tsx +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_categories/sections/top_categories/top_categories_section.tsx @@ -14,7 +14,7 @@ import { BetaBadge } from '../../../../../components/beta_badge'; import { LoadingOverlayWrapper } from '../../../../../components/loading_overlay_wrapper'; import { RecreateJobButton } from '../../../../../components/logging/log_analysis_job_status'; import { AnalyzeInMlButton } from '../../../../../components/logging/log_analysis_results'; -import { DatasetsSelector } from './datasets_selector'; +import { DatasetsSelector } from '../../../../../components/logging/log_analysis_results/datasets_selector'; import { TopCategoriesTable } from './top_categories_table'; export const TopCategoriesSection: React.FunctionComponent<{ diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_results_content.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_results_content.tsx index f2a60541b3b3ce..fb1dc7717fed0b 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_results_content.tsx +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_results_content.tsx @@ -27,6 +27,7 @@ import { StringTimeRange, useLogAnalysisResultsUrlState, } from './use_log_entry_rate_results_url_state'; +import { DatasetsSelector } from '../../../components/logging/log_analysis_results/datasets_selector'; export const SORT_DEFAULTS = { direction: 'desc' as const, @@ -80,11 +81,14 @@ export const LogEntryRateResultsContent: React.FunctionComponent = () => { [queryTimeRange.value.endTime, queryTimeRange.value.startTime] ); + const [selectedDatasets, setSelectedDatasets] = useState([]); + const { getLogEntryRate, isLoading, logEntryRate } = useLogEntryRateResults({ sourceId, startTime: queryTimeRange.value.startTime, endTime: queryTimeRange.value.endTime, bucketDuration, + filteredDatasets: selectedDatasets, }); const { @@ -97,12 +101,15 @@ export const LogEntryRateResultsContent: React.FunctionComponent = () => { changePaginationOptions, sortOptions, paginationOptions, + datasets, + isLoadingDatasets, } = useLogEntryAnomaliesResults({ sourceId, startTime: queryTimeRange.value.startTime, endTime: queryTimeRange.value.endTime, defaultSortOptions: SORT_DEFAULTS, defaultPaginationOptions: PAGINATION_DEFAULTS, + filteredDatasets: selectedDatasets, }); const handleQueryTimeRangeChange = useCallback( @@ -175,7 +182,7 @@ export const LogEntryRateResultsContent: React.FunctionComponent = () => { useEffect(() => { getLogEntryRate(); - }, [getLogEntryRate, queryTimeRange.lastChangedTime]); + }, [getLogEntryRate, selectedDatasets, queryTimeRange.lastChangedTime]); useInterval( () => { @@ -191,7 +198,15 @@ export const LogEntryRateResultsContent: React.FunctionComponent = () => { - + + + + { const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH, { method: 'POST', @@ -32,6 +33,7 @@ export const callGetLogEntryAnomaliesAPI = async ( }, sort, pagination, + datasets, }, }) ), diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies_datasets.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies_datasets.ts new file mode 100644 index 00000000000000..24be5a646d1039 --- /dev/null +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies_datasets.ts @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { npStart } from '../../../../legacy_singletons'; +import { decodeOrThrow } from '../../../../../common/runtime_types'; +import { + getLogEntryAnomaliesDatasetsRequestPayloadRT, + getLogEntryAnomaliesDatasetsSuccessReponsePayloadRT, + LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH, +} from '../../../../../common/http_api/log_analysis'; + +export const callGetLogEntryAnomaliesDatasetsAPI = async ( + sourceId: string, + startTime: number, + endTime: number +) => { + const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH, { + method: 'POST', + body: JSON.stringify( + getLogEntryAnomaliesDatasetsRequestPayloadRT.encode({ + data: { + sourceId, + timeRange: { + startTime, + endTime, + }, + }, + }) + ), + }); + + return decodeOrThrow(getLogEntryAnomaliesDatasetsSuccessReponsePayloadRT)(response); +}; diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate.ts index 794139385f4671..77111d279309df 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate.ts @@ -19,7 +19,8 @@ export const callGetLogEntryRateAPI = async ( sourceId: string, startTime: number, endTime: number, - bucketDuration: number + bucketDuration: number, + datasets?: string[] ) => { const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, { method: 'POST', @@ -32,6 +33,7 @@ export const callGetLogEntryRateAPI = async ( endTime, }, bucketDuration, + datasets, }, }) ), diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts index cadb4c420c133d..52632e54390a9b 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts @@ -5,11 +5,17 @@ */ import { useMemo, useState, useCallback, useEffect, useReducer } from 'react'; - -import { LogEntryAnomaly } from '../../../../common/http_api'; -import { useTrackedPromise } from '../../../utils/use_tracked_promise'; +import { useMount } from 'react-use'; +import { useTrackedPromise, CanceledPromiseError } from '../../../utils/use_tracked_promise'; import { callGetLogEntryAnomaliesAPI } from './service_calls/get_log_entry_anomalies'; -import { Sort, Pagination, PaginationCursor } from '../../../../common/http_api/log_analysis'; +import { callGetLogEntryAnomaliesDatasetsAPI } from './service_calls/get_log_entry_anomalies_datasets'; +import { + Sort, + Pagination, + PaginationCursor, + GetLogEntryAnomaliesDatasetsSuccessResponsePayload, + LogEntryAnomaly, +} from '../../../../common/http_api/log_analysis'; export type SortOptions = Sort; export type PaginationOptions = Pick; @@ -19,6 +25,7 @@ export type FetchPreviousPage = () => void; export type ChangeSortOptions = (sortOptions: Sort) => void; export type ChangePaginationOptions = (paginationOptions: PaginationOptions) => void; export type LogEntryAnomalies = LogEntryAnomaly[]; +type LogEntryAnomaliesDatasets = GetLogEntryAnomaliesDatasetsSuccessResponsePayload['data']['datasets']; interface PaginationCursors { previousPageCursor: PaginationCursor; nextPageCursor: PaginationCursor; @@ -35,6 +42,7 @@ interface ReducerState { start: number; end: number; }; + filteredDatasets?: string[]; } type ReducerStateDefaults = Pick< @@ -49,7 +57,8 @@ type ReducerAction = | { type: 'fetchPreviousPage' } | { type: 'changeHasNextPage'; payload: { hasNextPage: boolean } } | { type: 'changeLastReceivedCursors'; payload: { lastReceivedCursors: PaginationCursors } } - | { type: 'changeTimeRange'; payload: { timeRange: { start: number; end: number } } }; + | { type: 'changeTimeRange'; payload: { timeRange: { start: number; end: number } } } + | { type: 'changeFilteredDatasets'; payload: { filteredDatasets?: string[] } }; const stateReducer = (state: ReducerState, action: ReducerAction): ReducerState => { const resetPagination = { @@ -101,6 +110,12 @@ const stateReducer = (state: ReducerState, action: ReducerAction): ReducerState ...resetPagination, ...action.payload, }; + case 'changeFilteredDatasets': + return { + ...state, + ...resetPagination, + ...action.payload, + }; default: return state; } @@ -122,18 +137,23 @@ export const useLogEntryAnomaliesResults = ({ sourceId, defaultSortOptions, defaultPaginationOptions, + onGetLogEntryAnomaliesDatasetsError, + filteredDatasets, }: { endTime: number; startTime: number; sourceId: string; defaultSortOptions: Sort; defaultPaginationOptions: Pick; + onGetLogEntryAnomaliesDatasetsError?: (error: Error) => void; + filteredDatasets?: string[]; }) => { const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => { return { ...stateDefaults, paginationOptions: defaultPaginationOptions, sortOptions: defaultSortOptions, + filteredDatasets, timeRange: { start: startTime, end: endTime, @@ -154,6 +174,7 @@ export const useLogEntryAnomaliesResults = ({ sortOptions, paginationOptions, paginationCursor, + filteredDatasets: queryFilteredDatasets, } = reducerState; return await callGetLogEntryAnomaliesAPI( sourceId, @@ -163,7 +184,8 @@ export const useLogEntryAnomaliesResults = ({ { ...paginationOptions, cursor: paginationCursor, - } + }, + queryFilteredDatasets ); }, onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => { @@ -192,6 +214,7 @@ export const useLogEntryAnomaliesResults = ({ reducerState.sortOptions, reducerState.paginationOptions, reducerState.paginationCursor, + reducerState.filteredDatasets, ] ); @@ -220,6 +243,14 @@ export const useLogEntryAnomaliesResults = ({ }); }, [startTime, endTime]); + // Selected datasets have changed + useEffect(() => { + dispatch({ + type: 'changeFilteredDatasets', + payload: { filteredDatasets }, + }); + }, [filteredDatasets]); + useEffect(() => { getLogEntryAnomalies(); }, [getLogEntryAnomalies]); @@ -246,10 +277,53 @@ export const useLogEntryAnomaliesResults = ({ [getLogEntryAnomaliesRequest.state] ); + // Anomalies datasets + const [logEntryAnomaliesDatasets, setLogEntryAnomaliesDatasets] = useState< + LogEntryAnomaliesDatasets + >([]); + + const [getLogEntryAnomaliesDatasetsRequest, getLogEntryAnomaliesDatasets] = useTrackedPromise( + { + cancelPreviousOn: 'creation', + createPromise: async () => { + return await callGetLogEntryAnomaliesDatasetsAPI(sourceId, startTime, endTime); + }, + onResolve: ({ data: { datasets } }) => { + setLogEntryAnomaliesDatasets(datasets); + }, + onReject: (error) => { + if ( + error instanceof Error && + !(error instanceof CanceledPromiseError) && + onGetLogEntryAnomaliesDatasetsError + ) { + onGetLogEntryAnomaliesDatasetsError(error); + } + }, + }, + [endTime, sourceId, startTime] + ); + + const isLoadingDatasets = useMemo(() => getLogEntryAnomaliesDatasetsRequest.state === 'pending', [ + getLogEntryAnomaliesDatasetsRequest.state, + ]); + + const hasFailedLoadingDatasets = useMemo( + () => getLogEntryAnomaliesDatasetsRequest.state === 'rejected', + [getLogEntryAnomaliesDatasetsRequest.state] + ); + + useMount(() => { + getLogEntryAnomaliesDatasets(); + }); + return { logEntryAnomalies, getLogEntryAnomalies, isLoadingLogEntryAnomalies, + isLoadingDatasets, + hasFailedLoadingDatasets, + datasets: logEntryAnomaliesDatasets, hasFailedLoadingLogEntryAnomalies, changeSortOptions, sortOptions: reducerState.sortOptions, diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_results.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_results.ts index 1cd27c64af53f8..a52dab58cb018d 100644 --- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_results.ts +++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_results.ts @@ -41,11 +41,13 @@ export const useLogEntryRateResults = ({ startTime, endTime, bucketDuration = 15 * 60 * 1000, + filteredDatasets, }: { sourceId: string; startTime: number; endTime: number; bucketDuration: number; + filteredDatasets?: string[]; }) => { const [logEntryRate, setLogEntryRate] = useState(null); @@ -53,7 +55,13 @@ export const useLogEntryRateResults = ({ { cancelPreviousOn: 'resolution', createPromise: async () => { - return await callGetLogEntryRateAPI(sourceId, startTime, endTime, bucketDuration); + return await callGetLogEntryRateAPI( + sourceId, + startTime, + endTime, + bucketDuration, + filteredDatasets + ); }, onResolve: ({ data }) => { setLogEntryRate({ @@ -68,7 +76,7 @@ export const useLogEntryRateResults = ({ setLogEntryRate(null); }, }, - [sourceId, startTime, endTime, bucketDuration] + [sourceId, startTime, endTime, bucketDuration, filteredDatasets] ); const isLoading = useMemo(() => getLogEntryRateRequest.state === 'pending', [ diff --git a/x-pack/plugins/infra/server/infra_server.ts b/x-pack/plugins/infra/server/infra_server.ts index 6596e07ebaca5b..c080618f2a563e 100644 --- a/x-pack/plugins/infra/server/infra_server.ts +++ b/x-pack/plugins/infra/server/infra_server.ts @@ -19,6 +19,7 @@ import { initValidateLogAnalysisDatasetsRoute, initValidateLogAnalysisIndicesRoute, initGetLogEntryAnomaliesRoute, + initGetLogEntryAnomaliesDatasetsRoute, } from './routes/log_analysis'; import { initMetricExplorerRoute } from './routes/metrics_explorer'; import { initMetadataRoute } from './routes/metadata'; @@ -53,6 +54,7 @@ export const initInfraServer = (libs: InfraBackendLibs) => { initGetLogEntryCategoryExamplesRoute(libs); initGetLogEntryRateRoute(libs); initGetLogEntryAnomaliesRoute(libs); + initGetLogEntryAnomaliesDatasetsRoute(libs); initSnapshotRoute(libs); initNodeDetailsRoute(libs); initSourceRoute(libs); diff --git a/x-pack/plugins/infra/server/lib/log_analysis/common.ts b/x-pack/plugins/infra/server/lib/log_analysis/common.ts index 0c0b0a0f19982f..218281d875a46b 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/common.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/common.ts @@ -4,10 +4,19 @@ * you may not use this file except in compliance with the Elastic License. */ -import type { MlAnomalyDetectors } from '../../types'; -import { startTracingSpan } from '../../../common/performance_tracing'; +import type { MlAnomalyDetectors, MlSystem } from '../../types'; import { NoLogAnalysisMlJobError } from './errors'; +import { + CompositeDatasetKey, + createLogEntryDatasetsQuery, + LogEntryDatasetBucket, + logEntryDatasetsResponseRT, +} from './queries/log_entry_data_sets'; +import { decodeOrThrow } from '../../../common/runtime_types'; +import { NoLogAnalysisResultsIndexError } from './errors'; +import { startTracingSpan, TracingSpan } from '../../../common/performance_tracing'; + export async function fetchMlJob(mlAnomalyDetectors: MlAnomalyDetectors, jobId: string) { const finalizeMlGetJobSpan = startTracingSpan('Fetch ml job from ES'); const { @@ -27,3 +36,63 @@ export async function fetchMlJob(mlAnomalyDetectors: MlAnomalyDetectors, jobId: }, }; } + +const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000; + +// Finds datasets related to ML job ids +export async function getLogEntryDatasets( + mlSystem: MlSystem, + startTime: number, + endTime: number, + jobIds: string[] +) { + const finalizeLogEntryDatasetsSpan = startTracingSpan('get data sets'); + + let logEntryDatasetBuckets: LogEntryDatasetBucket[] = []; + let afterLatestBatchKey: CompositeDatasetKey | undefined; + let esSearchSpans: TracingSpan[] = []; + + while (true) { + const finalizeEsSearchSpan = startTracingSpan('fetch log entry dataset batch from ES'); + + const logEntryDatasetsResponse = decodeOrThrow(logEntryDatasetsResponseRT)( + await mlSystem.mlAnomalySearch( + createLogEntryDatasetsQuery( + jobIds, + startTime, + endTime, + COMPOSITE_AGGREGATION_BATCH_SIZE, + afterLatestBatchKey + ) + ) + ); + + if (logEntryDatasetsResponse._shards.total === 0) { + throw new NoLogAnalysisResultsIndexError( + `Failed to find ml indices for jobs: ${jobIds.join(', ')}.` + ); + } + + const { + after_key: afterKey, + buckets: latestBatchBuckets, + } = logEntryDatasetsResponse.aggregations.dataset_buckets; + + logEntryDatasetBuckets = [...logEntryDatasetBuckets, ...latestBatchBuckets]; + afterLatestBatchKey = afterKey; + esSearchSpans = [...esSearchSpans, finalizeEsSearchSpan()]; + + if (latestBatchBuckets.length < COMPOSITE_AGGREGATION_BATCH_SIZE) { + break; + } + } + + const logEntryDatasetsSpan = finalizeLogEntryDatasetsSpan(); + + return { + data: logEntryDatasetBuckets.map((logEntryDatasetBucket) => logEntryDatasetBucket.key.dataset), + timing: { + spans: [logEntryDatasetsSpan, ...esSearchSpans], + }, + }; +} diff --git a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_anomalies.ts b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_anomalies.ts index 12ae516564d66b..950de4261bda0a 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_anomalies.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_anomalies.ts @@ -7,15 +7,19 @@ import { RequestHandlerContext } from 'src/core/server'; import { InfraRequestHandlerContext } from '../../types'; import { TracingSpan, startTracingSpan } from '../../../common/performance_tracing'; -import { fetchMlJob } from './common'; +import { fetchMlJob, getLogEntryDatasets } from './common'; import { getJobId, logEntryCategoriesJobTypes, logEntryRateJobTypes, jobCustomSettingsRT, } from '../../../common/log_analysis'; -import { Sort, Pagination } from '../../../common/http_api/log_analysis'; -import type { MlSystem } from '../../types'; +import { + Sort, + Pagination, + GetLogEntryAnomaliesRequestPayload, +} from '../../../common/http_api/log_analysis'; +import type { MlSystem, MlAnomalyDetectors } from '../../types'; import { createLogEntryAnomaliesQuery, logEntryAnomaliesResponseRT } from './queries'; import { InsufficientAnomalyMlJobsConfigured, @@ -43,22 +47,13 @@ interface MappedAnomalyHit { categoryId?: string; } -export async function getLogEntryAnomalies( - context: RequestHandlerContext & { infra: Required }, +async function getCompatibleAnomaliesJobIds( + spaceId: string, sourceId: string, - startTime: number, - endTime: number, - sort: Sort, - pagination: Pagination + mlAnomalyDetectors: MlAnomalyDetectors ) { - const finalizeLogEntryAnomaliesSpan = startTracingSpan('get log entry anomalies'); - - const logRateJobId = getJobId(context.infra.spaceId, sourceId, logEntryRateJobTypes[0]); - const logCategoriesJobId = getJobId( - context.infra.spaceId, - sourceId, - logEntryCategoriesJobTypes[0] - ); + const logRateJobId = getJobId(spaceId, sourceId, logEntryRateJobTypes[0]); + const logCategoriesJobId = getJobId(spaceId, sourceId, logEntryCategoriesJobTypes[0]); const jobIds: string[] = []; let jobSpans: TracingSpan[] = []; @@ -66,7 +61,7 @@ export async function getLogEntryAnomalies( try { const { timing: { spans }, - } = await fetchMlJob(context.infra.mlAnomalyDetectors, logRateJobId); + } = await fetchMlJob(mlAnomalyDetectors, logRateJobId); jobIds.push(logRateJobId); jobSpans = [...jobSpans, ...spans]; } catch (e) { @@ -76,13 +71,39 @@ export async function getLogEntryAnomalies( try { const { timing: { spans }, - } = await fetchMlJob(context.infra.mlAnomalyDetectors, logCategoriesJobId); + } = await fetchMlJob(mlAnomalyDetectors, logCategoriesJobId); jobIds.push(logCategoriesJobId); jobSpans = [...jobSpans, ...spans]; } catch (e) { // Job wasn't found } + return { + jobIds, + timing: { spans: jobSpans }, + }; +} + +export async function getLogEntryAnomalies( + context: RequestHandlerContext & { infra: Required }, + sourceId: string, + startTime: number, + endTime: number, + sort: Sort, + pagination: Pagination, + datasets: GetLogEntryAnomaliesRequestPayload['data']['datasets'] +) { + const finalizeLogEntryAnomaliesSpan = startTracingSpan('get log entry anomalies'); + + const { + jobIds, + timing: { spans: jobSpans }, + } = await getCompatibleAnomaliesJobIds( + context.infra.spaceId, + sourceId, + context.infra.mlAnomalyDetectors + ); + if (jobIds.length === 0) { throw new InsufficientAnomalyMlJobsConfigured( 'Log rate or categorisation ML jobs need to be configured to search anomalies' @@ -100,16 +121,17 @@ export async function getLogEntryAnomalies( startTime, endTime, sort, - pagination + pagination, + datasets ); const data = anomalies.map((anomaly) => { const { jobId } = anomaly; - if (jobId === logRateJobId) { - return parseLogRateAnomalyResult(anomaly, logRateJobId); + if (!anomaly.categoryId) { + return parseLogRateAnomalyResult(anomaly, jobId); } else { - return parseCategoryAnomalyResult(anomaly, logCategoriesJobId); + return parseCategoryAnomalyResult(anomaly, jobId); } }); @@ -181,7 +203,8 @@ async function fetchLogEntryAnomalies( startTime: number, endTime: number, sort: Sort, - pagination: Pagination + pagination: Pagination, + datasets: GetLogEntryAnomaliesRequestPayload['data']['datasets'] ) { // We'll request 1 extra entry on top of our pageSize to determine if there are // more entries to be fetched. This avoids scenarios where the client side can't @@ -193,7 +216,7 @@ async function fetchLogEntryAnomalies( const results = decodeOrThrow(logEntryAnomaliesResponseRT)( await mlSystem.mlAnomalySearch( - createLogEntryAnomaliesQuery(jobIds, startTime, endTime, sort, expandedPagination) + createLogEntryAnomaliesQuery(jobIds, startTime, endTime, sort, expandedPagination, datasets) ) ); @@ -396,3 +419,43 @@ export async function fetchLogEntryExamples( }, }; } + +export async function getLogEntryAnomaliesDatasets( + context: { + infra: { + mlSystem: MlSystem; + mlAnomalyDetectors: MlAnomalyDetectors; + spaceId: string; + }; + }, + sourceId: string, + startTime: number, + endTime: number +) { + const { + jobIds, + timing: { spans: jobSpans }, + } = await getCompatibleAnomaliesJobIds( + context.infra.spaceId, + sourceId, + context.infra.mlAnomalyDetectors + ); + + if (jobIds.length === 0) { + throw new InsufficientAnomalyMlJobsConfigured( + 'Log rate or categorisation ML jobs need to be configured to search for anomaly datasets' + ); + } + + const { + data: datasets, + timing: { spans: datasetsSpans }, + } = await getLogEntryDatasets(context.infra.mlSystem, startTime, endTime, jobIds); + + return { + datasets, + timing: { + spans: [...jobSpans, ...datasetsSpans], + }, + }; +} diff --git a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_categories_analysis.ts b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_categories_analysis.ts index 6d00ba56e0e662..a455a03d936a5a 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_categories_analysis.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_categories_analysis.ts @@ -12,7 +12,7 @@ import { jobCustomSettingsRT, logEntryCategoriesJobTypes, } from '../../../common/log_analysis'; -import { startTracingSpan, TracingSpan } from '../../../common/performance_tracing'; +import { startTracingSpan } from '../../../common/performance_tracing'; import { decodeOrThrow } from '../../../common/runtime_types'; import type { MlAnomalyDetectors, MlSystem } from '../../types'; import { @@ -33,20 +33,12 @@ import { createLogEntryCategoryHistogramsQuery, logEntryCategoryHistogramsResponseRT, } from './queries/log_entry_category_histograms'; -import { - CompositeDatasetKey, - createLogEntryDatasetsQuery, - LogEntryDatasetBucket, - logEntryDatasetsResponseRT, -} from './queries/log_entry_data_sets'; import { createTopLogEntryCategoriesQuery, topLogEntryCategoriesResponseRT, } from './queries/top_log_entry_categories'; import { InfraSource } from '../sources'; -import { fetchMlJob } from './common'; - -const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000; +import { fetchMlJob, getLogEntryDatasets } from './common'; export async function getTopLogEntryCategories( context: { @@ -129,61 +121,15 @@ export async function getLogEntryCategoryDatasets( startTime: number, endTime: number ) { - const finalizeLogEntryDatasetsSpan = startTracingSpan('get data sets'); - const logEntryCategoriesCountJobId = getJobId( context.infra.spaceId, sourceId, logEntryCategoriesJobTypes[0] ); - let logEntryDatasetBuckets: LogEntryDatasetBucket[] = []; - let afterLatestBatchKey: CompositeDatasetKey | undefined; - let esSearchSpans: TracingSpan[] = []; - - while (true) { - const finalizeEsSearchSpan = startTracingSpan('fetch category dataset batch from ES'); - - const logEntryDatasetsResponse = decodeOrThrow(logEntryDatasetsResponseRT)( - await context.infra.mlSystem.mlAnomalySearch( - createLogEntryDatasetsQuery( - logEntryCategoriesCountJobId, - startTime, - endTime, - COMPOSITE_AGGREGATION_BATCH_SIZE, - afterLatestBatchKey - ) - ) - ); - - if (logEntryDatasetsResponse._shards.total === 0) { - throw new NoLogAnalysisResultsIndexError( - `Failed to find ml result index for job ${logEntryCategoriesCountJobId}.` - ); - } - - const { - after_key: afterKey, - buckets: latestBatchBuckets, - } = logEntryDatasetsResponse.aggregations.dataset_buckets; + const jobIds = [logEntryCategoriesCountJobId]; - logEntryDatasetBuckets = [...logEntryDatasetBuckets, ...latestBatchBuckets]; - afterLatestBatchKey = afterKey; - esSearchSpans = [...esSearchSpans, finalizeEsSearchSpan()]; - - if (latestBatchBuckets.length < COMPOSITE_AGGREGATION_BATCH_SIZE) { - break; - } - } - - const logEntryDatasetsSpan = finalizeLogEntryDatasetsSpan(); - - return { - data: logEntryDatasetBuckets.map((logEntryDatasetBucket) => logEntryDatasetBucket.key.dataset), - timing: { - spans: [logEntryDatasetsSpan, ...esSearchSpans], - }, - }; + return await getLogEntryDatasets(context.infra.mlSystem, startTime, endTime, jobIds); } export async function getLogEntryCategoryExamples( diff --git a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_rate_analysis.ts b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_rate_analysis.ts index 0323980dcd013e..7bfc85ba78a0e9 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_rate_analysis.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_rate_analysis.ts @@ -30,7 +30,8 @@ export async function getLogEntryRateBuckets( sourceId: string, startTime: number, endTime: number, - bucketDuration: number + bucketDuration: number, + datasets?: string[] ) { const logRateJobId = getJobId(context.infra.spaceId, sourceId, 'log-entry-rate'); let mlModelPlotBuckets: LogRateModelPlotBucket[] = []; @@ -44,7 +45,8 @@ export async function getLogEntryRateBuckets( endTime, bucketDuration, COMPOSITE_AGGREGATION_BATCH_SIZE, - afterLatestBatchKey + afterLatestBatchKey, + datasets ) ); diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/common.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/common.ts index 87394028095dec..63e39ef022392a 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/queries/common.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/common.ts @@ -55,3 +55,14 @@ export const createCategoryIdFilters = (categoryIds: number[]) => [ }, }, ]; + +export const createDatasetsFilters = (datasets?: string[]) => + datasets && datasets.length > 0 + ? [ + { + terms: { + partition_field_value: datasets, + }, + }, + ] + : []; diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_anomalies.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_anomalies.ts index fc72776ea5cacd..c722544c509aa0 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_anomalies.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_anomalies.ts @@ -11,8 +11,13 @@ import { createTimeRangeFilters, createResultTypeFilters, defaultRequestParameters, + createDatasetsFilters, } from './common'; -import { Sort, Pagination } from '../../../../common/http_api/log_analysis'; +import { + Sort, + Pagination, + GetLogEntryAnomaliesRequestPayload, +} from '../../../../common/http_api/log_analysis'; // TODO: Reassess validity of this against ML docs const TIEBREAKER_FIELD = '_doc'; @@ -28,7 +33,8 @@ export const createLogEntryAnomaliesQuery = ( startTime: number, endTime: number, sort: Sort, - pagination: Pagination + pagination: Pagination, + datasets: GetLogEntryAnomaliesRequestPayload['data']['datasets'] ) => { const { field } = sort; const { pageSize } = pagination; @@ -37,6 +43,7 @@ export const createLogEntryAnomaliesQuery = ( ...createJobIdsFilters(jobIds), ...createTimeRangeFilters(startTime, endTime), ...createResultTypeFilters(['record']), + ...createDatasetsFilters(datasets), ]; const sourceFields = [ diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_data_sets.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_data_sets.ts index dd22bedae8b2ae..7627ccd8c4996d 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_data_sets.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_data_sets.ts @@ -7,14 +7,14 @@ import * as rt from 'io-ts'; import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types'; import { - createJobIdFilters, + createJobIdsFilters, createResultTypeFilters, createTimeRangeFilters, defaultRequestParameters, } from './common'; export const createLogEntryDatasetsQuery = ( - logEntryAnalysisJobId: string, + jobIds: string[], startTime: number, endTime: number, size: number, @@ -25,7 +25,7 @@ export const createLogEntryDatasetsQuery = ( query: { bool: { filter: [ - ...createJobIdFilters(logEntryAnalysisJobId), + ...createJobIdsFilters(jobIds), ...createTimeRangeFilters(startTime, endTime), ...createResultTypeFilters(['model_plot']), ], diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_rate.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_rate.ts index 8d9c586b2ef67b..52edcf09cdfc27 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_rate.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_rate.ts @@ -10,6 +10,7 @@ import { createResultTypeFilters, createTimeRangeFilters, defaultRequestParameters, + createDatasetsFilters, } from './common'; export const createLogEntryRateQuery = ( @@ -18,7 +19,8 @@ export const createLogEntryRateQuery = ( endTime: number, bucketDuration: number, size: number, - afterKey?: CompositeTimestampPartitionKey + afterKey?: CompositeTimestampPartitionKey, + datasets?: string[] ) => ({ ...defaultRequestParameters, body: { @@ -28,6 +30,7 @@ export const createLogEntryRateQuery = ( ...createJobIdFilters(logRateJobId), ...createTimeRangeFilters(startTime, endTime), ...createResultTypeFilters(['model_plot', 'record']), + ...createDatasetsFilters(datasets), { term: { detector_index: { diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/top_log_entry_categories.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/top_log_entry_categories.ts index 6fa7156240508e..355dde9ec7c4a5 100644 --- a/x-pack/plugins/infra/server/lib/log_analysis/queries/top_log_entry_categories.ts +++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/top_log_entry_categories.ts @@ -11,6 +11,7 @@ import { createResultTypeFilters, createTimeRangeFilters, defaultRequestParameters, + createDatasetsFilters, } from './common'; export const createTopLogEntryCategoriesQuery = ( @@ -122,17 +123,6 @@ export const createTopLogEntryCategoriesQuery = ( size: 0, }); -const createDatasetsFilters = (datasets: string[]) => - datasets.length > 0 - ? [ - { - terms: { - partition_field_value: datasets, - }, - }, - ] - : []; - const metricAggregationRT = rt.type({ value: rt.union([rt.number, rt.null]), }); diff --git a/x-pack/plugins/infra/server/plugin.ts b/x-pack/plugins/infra/server/plugin.ts index 5b9fbc2829c721..7cd6383a9b2e5e 100644 --- a/x-pack/plugins/infra/server/plugin.ts +++ b/x-pack/plugins/infra/server/plugin.ts @@ -152,12 +152,9 @@ export class InfraServerPlugin { core.http.registerRouteHandlerContext( 'infra', (context, request): InfraRequestHandlerContext => { - const mlSystem = - context.ml && - plugins.ml?.mlSystemProvider(context.ml?.mlClient.callAsCurrentUser, request); + const mlSystem = context.ml && plugins.ml?.mlSystemProvider(context.ml?.mlClient, request); const mlAnomalyDetectors = - context.ml && - plugins.ml?.anomalyDetectorsProvider(context.ml?.mlClient.callAsCurrentUser, request); + context.ml && plugins.ml?.anomalyDetectorsProvider(context.ml?.mlClient, request); const spaceId = plugins.spaces?.spacesService.getSpaceId(request) || 'default'; return { diff --git a/x-pack/plugins/infra/server/routes/log_analysis/results/index.ts b/x-pack/plugins/infra/server/routes/log_analysis/results/index.ts index cbd89db97236fb..a01042616a872b 100644 --- a/x-pack/plugins/infra/server/routes/log_analysis/results/index.ts +++ b/x-pack/plugins/infra/server/routes/log_analysis/results/index.ts @@ -10,3 +10,4 @@ export * from './log_entry_category_examples'; export * from './log_entry_rate'; export * from './log_entry_examples'; export * from './log_entry_anomalies'; +export * from './log_entry_anomalies_datasets'; diff --git a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_anomalies.ts b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_anomalies.ts index f4911658ea4969..d79c9b9dd2c786 100644 --- a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_anomalies.ts +++ b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_anomalies.ts @@ -34,6 +34,7 @@ export const initGetLogEntryAnomaliesRoute = ({ framework }: InfraBackendLibs) = timeRange: { startTime, endTime }, sort: sortParam, pagination: paginationParam, + datasets, }, } = request.body; @@ -53,7 +54,8 @@ export const initGetLogEntryAnomaliesRoute = ({ framework }: InfraBackendLibs) = startTime, endTime, sort, - pagination + pagination, + datasets ); return response.ok({ diff --git a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_anomalies_datasets.ts b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_anomalies_datasets.ts new file mode 100644 index 00000000000000..d3d0862eee9aa3 --- /dev/null +++ b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_anomalies_datasets.ts @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import Boom from 'boom'; +import { + getLogEntryAnomaliesDatasetsRequestPayloadRT, + getLogEntryAnomaliesDatasetsSuccessReponsePayloadRT, + LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH, +} from '../../../../common/http_api/log_analysis'; +import { createValidationFunction } from '../../../../common/runtime_types'; +import type { InfraBackendLibs } from '../../../lib/infra_types'; +import { + getLogEntryAnomaliesDatasets, + NoLogAnalysisResultsIndexError, +} from '../../../lib/log_analysis'; +import { assertHasInfraMlPlugins } from '../../../utils/request_context'; + +export const initGetLogEntryAnomaliesDatasetsRoute = ({ framework }: InfraBackendLibs) => { + framework.registerRoute( + { + method: 'post', + path: LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_DATASETS_PATH, + validate: { + body: createValidationFunction(getLogEntryAnomaliesDatasetsRequestPayloadRT), + }, + }, + framework.router.handleLegacyErrors(async (requestContext, request, response) => { + const { + data: { + sourceId, + timeRange: { startTime, endTime }, + }, + } = request.body; + + try { + assertHasInfraMlPlugins(requestContext); + + const { datasets, timing } = await getLogEntryAnomaliesDatasets( + requestContext, + sourceId, + startTime, + endTime + ); + + return response.ok({ + body: getLogEntryAnomaliesDatasetsSuccessReponsePayloadRT.encode({ + data: { + datasets, + }, + timing, + }), + }); + } catch (error) { + if (Boom.isBoom(error)) { + throw error; + } + + if (error instanceof NoLogAnalysisResultsIndexError) { + return response.notFound({ body: { message: error.message } }); + } + + return response.customError({ + statusCode: error.statusCode ?? 500, + body: { + message: error.message ?? 'An unexpected error occurred', + }, + }); + } + }) + ); +}; diff --git a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate.ts b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate.ts index ae86102980c166..3b05f6ed23aaec 100644 --- a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate.ts +++ b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate.ts @@ -27,7 +27,7 @@ export const initGetLogEntryRateRoute = ({ framework }: InfraBackendLibs) => { }, framework.router.handleLegacyErrors(async (requestContext, request, response) => { const { - data: { sourceId, timeRange, bucketDuration }, + data: { sourceId, timeRange, bucketDuration, datasets }, } = request.body; try { @@ -38,7 +38,8 @@ export const initGetLogEntryRateRoute = ({ framework }: InfraBackendLibs) => { sourceId, timeRange.startTime, timeRange.endTime, - bucketDuration + bucketDuration, + datasets ); return response.ok({ diff --git a/x-pack/plugins/maps/public/classes/sources/ems_tms_source/ems_base_map_layer_wizard.tsx b/x-pack/plugins/maps/public/classes/sources/ems_tms_source/ems_base_map_layer_wizard.tsx index 49d262cbad1a10..5cc2a1225bbd78 100644 --- a/x-pack/plugins/maps/public/classes/sources/ems_tms_source/ems_base_map_layer_wizard.tsx +++ b/x-pack/plugins/maps/public/classes/sources/ems_tms_source/ems_base_map_layer_wizard.tsx @@ -5,6 +5,7 @@ */ import React from 'react'; import { i18n } from '@kbn/i18n'; +import { EuiPanel } from '@elastic/eui'; import { LayerWizard, RenderWizardArguments } from '../../layers/layer_wizard_registry'; // @ts-ignore import { EMSTMSSource, sourceTitle } from './ems_tms_source'; @@ -32,7 +33,11 @@ export const emsBaseMapLayerWizardConfig: LayerWizard = { previewLayers([layerDescriptor]); }; - return ; + return ( + + + + ); }, title: sourceTitle, }; diff --git a/x-pack/plugins/maps/public/classes/sources/ems_tms_source/tile_service_select.js b/x-pack/plugins/maps/public/classes/sources/ems_tms_source/tile_service_select.js index 2b54e00cae7394..1eff4bf3786f46 100644 --- a/x-pack/plugins/maps/public/classes/sources/ems_tms_source/tile_service_select.js +++ b/x-pack/plugins/maps/public/classes/sources/ems_tms_source/tile_service_select.js @@ -5,7 +5,7 @@ */ import React from 'react'; -import { EuiSelect, EuiFormRow, EuiPanel } from '@elastic/eui'; +import { EuiSelect, EuiFormRow } from '@elastic/eui'; import { getEmsTmsServices } from '../../../meta'; import { getEmsUnavailableMessage } from '../../../components/ems_unavailable_message'; @@ -71,25 +71,23 @@ export class TileServiceSelect extends React.Component { } return ( - - - - - + + + ); } } diff --git a/x-pack/plugins/maps/public/classes/sources/ems_tms_source/update_source_editor.js b/x-pack/plugins/maps/public/classes/sources/ems_tms_source/update_source_editor.js index 4d567b8dbb32a9..f5ef7096d48dd5 100644 --- a/x-pack/plugins/maps/public/classes/sources/ems_tms_source/update_source_editor.js +++ b/x-pack/plugins/maps/public/classes/sources/ems_tms_source/update_source_editor.js @@ -26,9 +26,7 @@ export function UpdateSourceEditor({ onChange, config }) { /> - - diff --git a/x-pack/plugins/maps/public/classes/styles/vector/components/color/color_map_select.js b/x-pack/plugins/maps/public/classes/styles/vector/components/color/color_map_select.js index a7d849265d815f..69cdb00a01c9c2 100644 --- a/x-pack/plugins/maps/public/classes/styles/vector/components/color/color_map_select.js +++ b/x-pack/plugins/maps/public/classes/styles/vector/components/color/color_map_select.js @@ -161,7 +161,7 @@ export class ColorMapSelect extends Component { return ( - + {toggle} diff --git a/x-pack/plugins/maps/public/connected_components/add_layer_panel/flyout_body/layer_wizard_select.tsx b/x-pack/plugins/maps/public/connected_components/add_layer_panel/flyout_body/layer_wizard_select.tsx index f0195bc5dee2f2..6f3a88ce905ceb 100644 --- a/x-pack/plugins/maps/public/connected_components/add_layer_panel/flyout_body/layer_wizard_select.tsx +++ b/x-pack/plugins/maps/public/connected_components/add_layer_panel/flyout_body/layer_wizard_select.tsx @@ -115,7 +115,7 @@ export class LayerWizardSelect extends Component { }); return ( - + { { return ( <> {this._renderCategoryFacets()} + {wizardCards} diff --git a/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/__snapshots__/join_editor.test.tsx.snap b/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/__snapshots__/join_editor.test.tsx.snap index 00d7f44d6273fe..92330c1d1ddce5 100644 --- a/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/__snapshots__/join_editor.test.tsx.snap +++ b/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/__snapshots__/join_editor.test.tsx.snap @@ -85,7 +85,7 @@ exports[`Should render join editor 1`] = ` > diff --git a/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/join_editor.tsx b/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/join_editor.tsx index c589604e851120..2065668858e22b 100644 --- a/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/join_editor.tsx +++ b/x-pack/plugins/maps/public/connected_components/layer_panel/join_editor/join_editor.tsx @@ -85,7 +85,7 @@ export function JoinEditor({ joins, layer, onChange, leftJoinFields, layerDispla ); globalFilterCheckbox = ( - + + + ); } diff --git a/x-pack/plugins/ml/common/constants/field_histograms.ts b/x-pack/plugins/ml/common/constants/field_histograms.ts new file mode 100644 index 00000000000000..5c86c00ac666f1 --- /dev/null +++ b/x-pack/plugins/ml/common/constants/field_histograms.ts @@ -0,0 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +// Default sampler shard size used for field histograms +export const DEFAULT_SAMPLER_SHARD_SIZE = 5000; diff --git a/x-pack/plugins/ml/common/types/capabilities.ts b/x-pack/plugins/ml/common/types/capabilities.ts index f2b8159b6b83d1..b46dd87eec15fc 100644 --- a/x-pack/plugins/ml/common/types/capabilities.ts +++ b/x-pack/plugins/ml/common/types/capabilities.ts @@ -5,6 +5,7 @@ */ import { KibanaRequest } from 'kibana/server'; +import { PLUGIN_ID } from '../constants/app'; export const userMlCapabilities = { canAccessML: false, @@ -69,16 +70,31 @@ export function getDefaultCapabilities(): MlCapabilities { export function getPluginPrivileges() { const userMlCapabilitiesKeys = Object.keys(userMlCapabilities); const adminMlCapabilitiesKeys = Object.keys(adminMlCapabilities); - const allMlCapabilities = [...adminMlCapabilitiesKeys, ...userMlCapabilitiesKeys]; + const allMlCapabilitiesKeys = [...adminMlCapabilitiesKeys, ...userMlCapabilitiesKeys]; + // TODO: include ML in base privileges for the `8.0` release: https://github.com/elastic/kibana/issues/71422 + const privilege = { + app: [PLUGIN_ID, 'kibana'], + excludeFromBasePrivileges: true, + management: { + insightsAndAlerting: ['jobsListLink'], + }, + catalogue: [PLUGIN_ID], + savedObject: { + all: [], + read: ['index-pattern', 'search'], + }, + }; return { + admin: { + ...privilege, + api: allMlCapabilitiesKeys.map((k) => `ml:${k}`), + ui: allMlCapabilitiesKeys, + }, user: { - ui: userMlCapabilitiesKeys, + ...privilege, api: userMlCapabilitiesKeys.map((k) => `ml:${k}`), - }, - admin: { - ui: allMlCapabilities, - api: allMlCapabilities.map((k) => `ml:${k}`), + ui: userMlCapabilitiesKeys, }, }; } diff --git a/x-pack/plugins/ml/common/types/kibana.ts b/x-pack/plugins/ml/common/types/kibana.ts index 4a2edfebd1bac1..f88b843015f176 100644 --- a/x-pack/plugins/ml/common/types/kibana.ts +++ b/x-pack/plugins/ml/common/types/kibana.ts @@ -11,8 +11,6 @@ import { IndexPatternAttributes } from 'src/plugins/data/common'; export type IndexPatternTitle = string; -export type callWithRequestType = (action: string, params?: any) => Promise; - export interface Route { id: string; k7Breadcrumbs: () => any; diff --git a/x-pack/plugins/ml/jsconfig.json b/x-pack/plugins/ml/jsconfig.json deleted file mode 100644 index 22e52d752250bb..00000000000000 --- a/x-pack/plugins/ml/jsconfig.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "compilerOptions": { - "target": "es6", - "module": "commonjs", - "baseUrl": "../../../.", - "paths": { - "ui/*": ["src/legacy/ui/public/*"], - "plugins/ml/*": ["x-pack/plugins/ml/public/*"] - } - }, - "exclude": ["node_modules", "build"] -} diff --git a/x-pack/plugins/ml/public/application/app.tsx b/x-pack/plugins/ml/public/application/app.tsx index 9d5125532e5b8f..cf645404860f5a 100644 --- a/x-pack/plugins/ml/public/application/app.tsx +++ b/x-pack/plugins/ml/public/application/app.tsx @@ -20,7 +20,7 @@ import { MlRouter } from './routing'; import { mlApiServicesProvider } from './services/ml_api_service'; import { HttpService } from './services/http_service'; -type MlDependencies = MlSetupDependencies & MlStartDependencies; +export type MlDependencies = Omit & MlStartDependencies; interface AppProps { coreStart: CoreStart; diff --git a/x-pack/plugins/ml/public/application/capabilities/check_capabilities.ts b/x-pack/plugins/ml/public/application/capabilities/check_capabilities.ts index 65ea03caef5265..56b372ff399197 100644 --- a/x-pack/plugins/ml/public/application/capabilities/check_capabilities.ts +++ b/x-pack/plugins/ml/public/application/capabilities/check_capabilities.ts @@ -16,8 +16,8 @@ let _capabilities: MlCapabilities = getDefaultCapabilities(); export function checkGetManagementMlJobsResolver() { return new Promise<{ mlFeatureEnabledInSpace: boolean }>((resolve, reject) => { - getManageMlCapabilities().then( - ({ capabilities, isPlatinumOrTrialLicense, mlFeatureEnabledInSpace }) => { + getManageMlCapabilities() + .then(({ capabilities, isPlatinumOrTrialLicense, mlFeatureEnabledInSpace }) => { _capabilities = capabilities; // Loop through all capabilities to ensure they are all set to true. const isManageML = Object.values(_capabilities).every((p) => p === true); @@ -28,62 +28,80 @@ export function checkGetManagementMlJobsResolver() { window.location.href = ACCESS_DENIED_PATH; return reject(); } - } - ); + }) + .catch((e) => { + window.location.href = ACCESS_DENIED_PATH; + return reject(); + }); }); } export function checkGetJobsCapabilitiesResolver(): Promise { return new Promise((resolve, reject) => { - getCapabilities().then(({ capabilities, isPlatinumOrTrialLicense }) => { - _capabilities = capabilities; - // the minimum privilege for using ML with a platinum or trial license is being able to get the transforms list. - // all other functionality is controlled by the return capabilities object. - // if the license is basic (isPlatinumOrTrialLicense === false) then do not redirect, - // allow the promise to resolve as the separate license check will redirect then user to - // a basic feature - if (_capabilities.canGetJobs || isPlatinumOrTrialLicense === false) { - return resolve(_capabilities); - } else { + getCapabilities() + .then(({ capabilities, isPlatinumOrTrialLicense }) => { + _capabilities = capabilities; + // the minimum privilege for using ML with a platinum or trial license is being able to get the transforms list. + // all other functionality is controlled by the return capabilities object. + // if the license is basic (isPlatinumOrTrialLicense === false) then do not redirect, + // allow the promise to resolve as the separate license check will redirect then user to + // a basic feature + if (_capabilities.canGetJobs || isPlatinumOrTrialLicense === false) { + return resolve(_capabilities); + } else { + window.location.href = '#/access-denied'; + return reject(); + } + }) + .catch((e) => { window.location.href = '#/access-denied'; return reject(); - } - }); + }); }); } export function checkCreateJobsCapabilitiesResolver(): Promise { return new Promise((resolve, reject) => { - getCapabilities().then(({ capabilities, isPlatinumOrTrialLicense }) => { - _capabilities = capabilities; - // if the license is basic (isPlatinumOrTrialLicense === false) then do not redirect, - // allow the promise to resolve as the separate license check will redirect then user to - // a basic feature - if (_capabilities.canCreateJob || isPlatinumOrTrialLicense === false) { - return resolve(_capabilities); - } else { - // if the user has no permission to create a job, - // redirect them back to the Transforms Management page + getCapabilities() + .then(({ capabilities, isPlatinumOrTrialLicense }) => { + _capabilities = capabilities; + // if the license is basic (isPlatinumOrTrialLicense === false) then do not redirect, + // allow the promise to resolve as the separate license check will redirect then user to + // a basic feature + if (_capabilities.canCreateJob || isPlatinumOrTrialLicense === false) { + return resolve(_capabilities); + } else { + // if the user has no permission to create a job, + // redirect them back to the Transforms Management page + window.location.href = '#/jobs'; + return reject(); + } + }) + .catch((e) => { window.location.href = '#/jobs'; return reject(); - } - }); + }); }); } export function checkFindFileStructurePrivilegeResolver(): Promise { return new Promise((resolve, reject) => { - getCapabilities().then(({ capabilities }) => { - _capabilities = capabilities; - // the minimum privilege for using ML with a basic license is being able to use the datavisualizer. - // all other functionality is controlled by the return _capabilities object - if (_capabilities.canFindFileStructure) { - return resolve(_capabilities); - } else { + getCapabilities() + .then(({ capabilities }) => { + _capabilities = capabilities; + // the minimum privilege for using ML with a basic license is being able to use the datavisualizer. + // all other functionality is controlled by the return _capabilities object + if (_capabilities.canFindFileStructure) { + return resolve(_capabilities); + } else { + window.location.href = '#/access-denied'; + return reject(); + } + }) + .catch((e) => { window.location.href = '#/access-denied'; return reject(); - } - }); + }); }); } diff --git a/x-pack/plugins/ml/public/application/components/data_grid/data_grid.tsx b/x-pack/plugins/ml/public/application/components/data_grid/data_grid.tsx index 9af7a869e0e568..d4be2eab13d26b 100644 --- a/x-pack/plugins/ml/public/application/components/data_grid/data_grid.tsx +++ b/x-pack/plugins/ml/public/application/components/data_grid/data_grid.tsx @@ -20,10 +20,13 @@ import { EuiFlexItem, EuiSpacer, EuiTitle, + EuiToolTip, } from '@elastic/eui'; import { CoreSetup } from 'src/core/public'; +import { DEFAULT_SAMPLER_SHARD_SIZE } from '../../../../common/constants/field_histograms'; + import { INDEX_STATUS } from '../../data_frame_analytics/common'; import { euiDataGridStyle, euiDataGridToolbarSettings } from './common'; @@ -193,21 +196,31 @@ export const DataGrid: FC = memo( ...(chartsButtonVisible ? { additionalControls: ( - - {i18n.translate('xpack.ml.dataGrid.histogramButtonText', { - defaultMessage: 'Histogram charts', + + > + + {i18n.translate('xpack.ml.dataGrid.histogramButtonText', { + defaultMessage: 'Histogram charts', + })} + + ), } : {}), diff --git a/x-pack/plugins/ml/public/application/components/data_grid/index.ts b/x-pack/plugins/ml/public/application/components/data_grid/index.ts index 80bc6b861f7425..4bbd3595e5a7e4 100644 --- a/x-pack/plugins/ml/public/application/components/data_grid/index.ts +++ b/x-pack/plugins/ml/public/application/components/data_grid/index.ts @@ -12,7 +12,7 @@ export { showDataGridColumnChartErrorMessageToast, useRenderCellValue, } from './common'; -export { fetchChartsData, ChartData } from './use_column_chart'; +export { getFieldType, ChartData } from './use_column_chart'; export { useDataGrid } from './use_data_grid'; export { DataGrid } from './data_grid'; export { diff --git a/x-pack/plugins/ml/public/application/components/data_grid/use_column_chart.test.ts b/x-pack/plugins/ml/public/application/components/data_grid/use_column_chart.test.ts new file mode 100644 index 00000000000000..1b35ef238d09e7 --- /dev/null +++ b/x-pack/plugins/ml/public/application/components/data_grid/use_column_chart.test.ts @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { getFieldType } from './use_column_chart'; + +describe('getFieldType()', () => { + it('should return the Kibana field type for a given EUI data grid schema', () => { + expect(getFieldType('text')).toBe('string'); + expect(getFieldType('datetime')).toBe('date'); + expect(getFieldType('numeric')).toBe('number'); + expect(getFieldType('boolean')).toBe('boolean'); + expect(getFieldType('json')).toBe('object'); + expect(getFieldType('non-aggregatable')).toBe(undefined); + }); +}); diff --git a/x-pack/plugins/ml/public/application/components/data_grid/use_column_chart.tsx b/x-pack/plugins/ml/public/application/components/data_grid/use_column_chart.tsx index 6b207a999eb52a..a762c44e243bf0 100644 --- a/x-pack/plugins/ml/public/application/components/data_grid/use_column_chart.tsx +++ b/x-pack/plugins/ml/public/application/components/data_grid/use_column_chart.tsx @@ -16,8 +16,6 @@ import { i18n } from '@kbn/i18n'; import { KBN_FIELD_TYPES } from '../../../../../../../src/plugins/data/public'; -import { stringHash } from '../../../../common/util/string_utils'; - import { NON_AGGREGATABLE } from './common'; export const hoveredRow$ = new BehaviorSubject(null); @@ -40,7 +38,7 @@ const getXScaleType = (kbnFieldType: KBN_FIELD_TYPES | undefined): XScaleType => } }; -const getFieldType = (schema: EuiDataGridColumn['schema']): KBN_FIELD_TYPES | undefined => { +export const getFieldType = (schema: EuiDataGridColumn['schema']): KBN_FIELD_TYPES | undefined => { if (schema === NON_AGGREGATABLE) { return undefined; } @@ -67,188 +65,6 @@ const getFieldType = (schema: EuiDataGridColumn['schema']): KBN_FIELD_TYPES | un return fieldType; }; -interface NumericColumnStats { - interval: number; - min: number; - max: number; -} -type NumericColumnStatsMap = Record; -const getAggIntervals = async ( - indexPatternTitle: string, - esSearch: (payload: any) => Promise, - query: any, - columnTypes: EuiDataGridColumn[] -): Promise => { - const numericColumns = columnTypes.filter((cT) => { - const fieldType = getFieldType(cT.schema); - return fieldType === KBN_FIELD_TYPES.NUMBER || fieldType === KBN_FIELD_TYPES.DATE; - }); - - if (numericColumns.length === 0) { - return {}; - } - - const minMaxAggs = numericColumns.reduce((aggs, c) => { - const id = stringHash(c.id); - aggs[id] = { - stats: { - field: c.id, - }, - }; - return aggs; - }, {} as Record); - - const respStats = await esSearch({ - index: indexPatternTitle, - size: 0, - body: { - query, - aggs: minMaxAggs, - size: 0, - }, - }); - - return Object.keys(respStats.aggregations).reduce((p, aggName) => { - const stats = [respStats.aggregations[aggName].min, respStats.aggregations[aggName].max]; - if (!stats.includes(null)) { - const delta = respStats.aggregations[aggName].max - respStats.aggregations[aggName].min; - - let aggInterval = 1; - - if (delta > MAX_CHART_COLUMNS) { - aggInterval = Math.round(delta / MAX_CHART_COLUMNS); - } - - if (delta <= 1) { - aggInterval = delta / MAX_CHART_COLUMNS; - } - - p[aggName] = { interval: aggInterval, min: stats[0], max: stats[1] }; - } - - return p; - }, {} as NumericColumnStatsMap); -}; - -interface AggHistogram { - histogram: { - field: string; - interval: number; - }; -} - -interface AggCardinality { - cardinality: { - field: string; - }; -} - -interface AggTerms { - terms: { - field: string; - size: number; - }; -} - -type ChartRequestAgg = AggHistogram | AggCardinality | AggTerms; - -export const fetchChartsData = async ( - indexPatternTitle: string, - esSearch: (payload: any) => Promise, - query: any, - columnTypes: EuiDataGridColumn[] -): Promise => { - const aggIntervals = await getAggIntervals(indexPatternTitle, esSearch, query, columnTypes); - - const chartDataAggs = columnTypes.reduce((aggs, c) => { - const fieldType = getFieldType(c.schema); - const id = stringHash(c.id); - if (fieldType === KBN_FIELD_TYPES.NUMBER || fieldType === KBN_FIELD_TYPES.DATE) { - if (aggIntervals[id] !== undefined) { - aggs[`${id}_histogram`] = { - histogram: { - field: c.id, - interval: aggIntervals[id].interval !== 0 ? aggIntervals[id].interval : 1, - }, - }; - } - } else if (fieldType === KBN_FIELD_TYPES.STRING || fieldType === KBN_FIELD_TYPES.BOOLEAN) { - if (fieldType === KBN_FIELD_TYPES.STRING) { - aggs[`${id}_cardinality`] = { - cardinality: { - field: c.id, - }, - }; - } - aggs[`${id}_terms`] = { - terms: { - field: c.id, - size: MAX_CHART_COLUMNS, - }, - }; - } - return aggs; - }, {} as Record); - - if (Object.keys(chartDataAggs).length === 0) { - return []; - } - - const respChartsData = await esSearch({ - index: indexPatternTitle, - size: 0, - body: { - query, - aggs: chartDataAggs, - size: 0, - }, - }); - - const chartsData: ChartData[] = columnTypes.map( - (c): ChartData => { - const fieldType = getFieldType(c.schema); - const id = stringHash(c.id); - - if (fieldType === KBN_FIELD_TYPES.NUMBER || fieldType === KBN_FIELD_TYPES.DATE) { - if (aggIntervals[id] === undefined) { - return { - type: 'numeric', - data: [], - interval: 0, - stats: [0, 0], - id: c.id, - }; - } - - return { - data: respChartsData.aggregations[`${id}_histogram`].buckets, - interval: aggIntervals[id].interval, - stats: [aggIntervals[id].min, aggIntervals[id].max], - type: 'numeric', - id: c.id, - }; - } else if (fieldType === KBN_FIELD_TYPES.STRING || fieldType === KBN_FIELD_TYPES.BOOLEAN) { - return { - type: fieldType === KBN_FIELD_TYPES.STRING ? 'ordinal' : 'boolean', - cardinality: - fieldType === KBN_FIELD_TYPES.STRING - ? respChartsData.aggregations[`${id}_cardinality`].value - : 2, - data: respChartsData.aggregations[`${id}_terms`].buckets, - id: c.id, - }; - } - - return { - type: 'unsupported', - id: c.id, - }; - } - ); - - return chartsData; -}; - interface NumericDataItem { key: number; key_as_string?: string; diff --git a/x-pack/plugins/ml/public/application/components/job_selector/job_selector_flyout.tsx b/x-pack/plugins/ml/public/application/components/job_selector/job_selector_flyout.tsx index 803281bcd0ce9d..62a74ed142ccf0 100644 --- a/x-pack/plugins/ml/public/application/components/job_selector/job_selector_flyout.tsx +++ b/x-pack/plugins/ml/public/application/components/job_selector/job_selector_flyout.tsx @@ -193,7 +193,6 @@ export const JobSelectorFlyout: FC = ({ ref={flyoutEl} onClose={onFlyoutClose} aria-labelledby="jobSelectorFlyout" - size="l" data-test-subj="mlFlyoutJobSelector" > diff --git a/x-pack/plugins/ml/public/application/components/job_selector/use_job_selection.ts b/x-pack/plugins/ml/public/application/components/job_selector/use_job_selection.ts index 74c238a0895ca9..0717348d1db229 100644 --- a/x-pack/plugins/ml/public/application/components/job_selector/use_job_selection.ts +++ b/x-pack/plugins/ml/public/application/components/job_selector/use_job_selection.ts @@ -5,16 +5,16 @@ */ import { difference } from 'lodash'; -import { useEffect } from 'react'; +import { useEffect, useMemo } from 'react'; import { i18n } from '@kbn/i18n'; -import { getToastNotifications } from '../../util/dependency_cache'; import { MlJobWithTimeRange } from '../../../../common/types/anomaly_detection_jobs'; import { useUrlState } from '../../util/url_state'; import { getTimeRangeFromSelection } from './job_select_service_utils'; +import { useNotifications } from '../../contexts/kibana'; // check that the ids read from the url exist by comparing them to the // jobs loaded via mlJobsService. @@ -25,49 +25,53 @@ function getInvalidJobIds(jobs: MlJobWithTimeRange[], ids: string[]) { }); } -function warnAboutInvalidJobIds(invalidIds: string[]) { - if (invalidIds.length > 0) { - const toastNotifications = getToastNotifications(); - toastNotifications.addWarning( - i18n.translate('xpack.ml.jobSelect.requestedJobsDoesNotExistWarningMessage', { - defaultMessage: `Requested -{invalidIdsLength, plural, one {job {invalidIds} does not exist} other {jobs {invalidIds} do not exist}}`, - values: { - invalidIdsLength: invalidIds.length, - invalidIds: invalidIds.join(), - }, - }) - ); - } -} - export interface JobSelection { jobIds: string[]; selectedGroups: string[]; } -export const useJobSelection = (jobs: MlJobWithTimeRange[], dateFormatTz: string) => { +export const useJobSelection = (jobs: MlJobWithTimeRange[]) => { const [globalState, setGlobalState] = useUrlState('_g'); + const { toasts: toastNotifications } = useNotifications(); - const jobSelection: JobSelection = { jobIds: [], selectedGroups: [] }; + const tmpIds = useMemo(() => { + const ids = globalState?.ml?.jobIds || []; + return (typeof ids === 'string' ? [ids] : ids).map((id: string) => String(id)); + }, [globalState?.ml?.jobIds]); - const ids = globalState?.ml?.jobIds || []; - const tmpIds = (typeof ids === 'string' ? [ids] : ids).map((id: string) => String(id)); - const invalidIds = getInvalidJobIds(jobs, tmpIds); - const validIds = difference(tmpIds, invalidIds); - validIds.sort(); + const invalidIds = useMemo(() => { + return getInvalidJobIds(jobs, tmpIds); + }, [tmpIds]); - jobSelection.jobIds = validIds; - jobSelection.selectedGroups = globalState?.ml?.groups ?? []; + const validIds = useMemo(() => { + const res = difference(tmpIds, invalidIds); + res.sort(); + return res; + }, [tmpIds, invalidIds]); + + const jobSelection: JobSelection = useMemo(() => { + const selectedGroups = globalState?.ml?.groups ?? []; + return { jobIds: validIds, selectedGroups }; + }, [validIds, globalState?.ml?.groups]); useEffect(() => { - warnAboutInvalidJobIds(invalidIds); + if (invalidIds.length > 0) { + toastNotifications.addWarning( + i18n.translate('xpack.ml.jobSelect.requestedJobsDoesNotExistWarningMessage', { + defaultMessage: `Requested +{invalidIdsLength, plural, one {job {invalidIds} does not exist} other {jobs {invalidIds} do not exist}}`, + values: { + invalidIdsLength: invalidIds.length, + invalidIds: invalidIds.join(), + }, + }) + ); + } }, [invalidIds]); useEffect(() => { // if there are no valid ids, warn and then select the first job if (validIds.length === 0 && jobs.length > 0) { - const toastNotifications = getToastNotifications(); toastNotifications.addWarning( i18n.translate('xpack.ml.jobSelect.noJobsSelectedWarningMessage', { defaultMessage: 'No jobs selected, auto selecting first job', diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/hooks/use_index_data.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/hooks/use_index_data.ts index ee0e5c1955eadd..2cecffc9932570 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/hooks/use_index_data.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_creation/hooks/use_index_data.ts @@ -4,15 +4,18 @@ * you may not use this file except in compliance with the Elastic License. */ -import { useEffect } from 'react'; +import { useEffect, useMemo } from 'react'; import { EuiDataGridColumn } from '@elastic/eui'; import { CoreSetup } from 'src/core/public'; import { IndexPattern } from '../../../../../../../../../src/plugins/data/public'; + +import { DataLoader } from '../../../../datavisualizer/index_based/data_loader'; + import { - fetchChartsData, + getFieldType, getDataGridSchemaFromKibanaFieldType, getFieldsFromKibanaIndexPattern, showDataGridColumnChartErrorMessageToast, @@ -103,13 +106,20 @@ export const useIndexData = ( // eslint-disable-next-line react-hooks/exhaustive-deps }, [indexPattern.title, JSON.stringify([query, pagination, sortingColumns])]); + const dataLoader = useMemo(() => new DataLoader(indexPattern, toastNotifications), [ + indexPattern, + ]); + const fetchColumnChartsData = async function () { try { - const columnChartsData = await fetchChartsData( - indexPattern.title, - ml.esSearch, - query, - columns.filter((cT) => dataGrid.visibleColumns.includes(cT.id)) + const columnChartsData = await dataLoader.loadFieldHistograms( + columns + .filter((cT) => dataGrid.visibleColumns.includes(cT.id)) + .map((cT) => ({ + fieldName: cT.id, + type: getFieldType(cT.schema), + })), + query ); dataGrid.setColumnCharts(columnChartsData); } catch (e) { diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_exploration/components/exploration_results_table/use_exploration_results.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_exploration/components/exploration_results_table/use_exploration_results.ts index 796670f6a864df..98dd40986e32b6 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_exploration/components/exploration_results_table/use_exploration_results.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_exploration/components/exploration_results_table/use_exploration_results.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { useEffect } from 'react'; +import { useEffect, useMemo } from 'react'; import { EuiDataGridColumn } from '@elastic/eui'; @@ -12,16 +12,17 @@ import { CoreSetup } from 'src/core/public'; import { IndexPattern } from '../../../../../../../../../../src/plugins/data/public'; +import { DataLoader } from '../../../../../datavisualizer/index_based/data_loader'; + import { - fetchChartsData, getDataGridSchemasFromFieldTypes, + getFieldType, showDataGridColumnChartErrorMessageToast, useDataGrid, useRenderCellValue, UseIndexDataReturnType, } from '../../../../../components/data_grid'; import { SavedSearchQuery } from '../../../../../contexts/ml'; -import { ml } from '../../../../../services/ml_api_service'; import { getIndexData, getIndexFields, DataFrameAnalyticsConfig } from '../../../../common'; import { @@ -72,14 +73,23 @@ export const useExplorationResults = ( // eslint-disable-next-line react-hooks/exhaustive-deps }, [jobConfig && jobConfig.id, dataGrid.pagination, searchQuery, dataGrid.sortingColumns]); + const dataLoader = useMemo( + () => + indexPattern !== undefined ? new DataLoader(indexPattern, toastNotifications) : undefined, + [indexPattern] + ); + const fetchColumnChartsData = async function () { try { - if (jobConfig !== undefined) { - const columnChartsData = await fetchChartsData( - jobConfig.dest.index, - ml.esSearch, - searchQuery, - columns.filter((cT) => dataGrid.visibleColumns.includes(cT.id)) + if (jobConfig !== undefined && dataLoader !== undefined) { + const columnChartsData = await dataLoader.loadFieldHistograms( + columns + .filter((cT) => dataGrid.visibleColumns.includes(cT.id)) + .map((cT) => ({ + fieldName: cT.id, + type: getFieldType(cT.schema), + })), + searchQuery ); dataGrid.setColumnCharts(columnChartsData); } diff --git a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_exploration/components/outlier_exploration/use_outlier_data.ts b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_exploration/components/outlier_exploration/use_outlier_data.ts index beb6836bf801fa..90294a09c0adc3 100644 --- a/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_exploration/components/outlier_exploration/use_outlier_data.ts +++ b/x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_exploration/components/outlier_exploration/use_outlier_data.ts @@ -4,19 +4,21 @@ * you may not use this file except in compliance with the Elastic License. */ -import { useEffect } from 'react'; +import { useEffect, useMemo } from 'react'; import { EuiDataGridColumn } from '@elastic/eui'; import { IndexPattern } from '../../../../../../../../../../src/plugins/data/public'; +import { DataLoader } from '../../../../../datavisualizer/index_based/data_loader'; + import { useColorRange, COLOR_RANGE, COLOR_RANGE_SCALE, } from '../../../../../components/color_range_legend'; import { - fetchChartsData, + getFieldType, getDataGridSchemasFromFieldTypes, showDataGridColumnChartErrorMessageToast, useDataGrid, @@ -24,7 +26,6 @@ import { UseIndexDataReturnType, } from '../../../../../components/data_grid'; import { SavedSearchQuery } from '../../../../../contexts/ml'; -import { ml } from '../../../../../services/ml_api_service'; import { getToastNotifications } from '../../../../../util/dependency_cache'; import { getIndexData, getIndexFields, DataFrameAnalyticsConfig } from '../../../../common'; @@ -79,14 +80,25 @@ export const useOutlierData = ( // eslint-disable-next-line react-hooks/exhaustive-deps }, [jobConfig && jobConfig.id, dataGrid.pagination, searchQuery, dataGrid.sortingColumns]); + const dataLoader = useMemo( + () => + indexPattern !== undefined + ? new DataLoader(indexPattern, getToastNotifications()) + : undefined, + [indexPattern] + ); + const fetchColumnChartsData = async function () { try { - if (jobConfig !== undefined) { - const columnChartsData = await fetchChartsData( - jobConfig.dest.index, - ml.esSearch, - searchQuery, - columns.filter((cT) => dataGrid.visibleColumns.includes(cT.id)) + if (jobConfig !== undefined && dataLoader !== undefined) { + const columnChartsData = await dataLoader.loadFieldHistograms( + columns + .filter((cT) => dataGrid.visibleColumns.includes(cT.id)) + .map((cT) => ({ + fieldName: cT.id, + type: getFieldType(cT.schema), + })), + searchQuery ); dataGrid.setColumnCharts(columnChartsData); } diff --git a/x-pack/plugins/ml/public/application/datavisualizer/index_based/common/index.ts b/x-pack/plugins/ml/public/application/datavisualizer/index_based/common/index.ts index 5618f701e4c5fd..50278c300d1032 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/index_based/common/index.ts +++ b/x-pack/plugins/ml/public/application/datavisualizer/index_based/common/index.ts @@ -5,4 +5,4 @@ */ export { FieldVisConfig } from './field_vis_config'; -export { FieldRequestConfig } from './request'; +export { FieldHistogramRequestConfig, FieldRequestConfig } from './request'; diff --git a/x-pack/plugins/ml/public/application/datavisualizer/index_based/common/request.ts b/x-pack/plugins/ml/public/application/datavisualizer/index_based/common/request.ts index 9a886cbc899c24..fd4888b8729c18 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/index_based/common/request.ts +++ b/x-pack/plugins/ml/public/application/datavisualizer/index_based/common/request.ts @@ -4,6 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ +import { KBN_FIELD_TYPES } from '../../../../../../../../src/plugins/data/public'; + import { ML_JOB_FIELD_TYPES } from '../../../../../common/constants/field_types'; export interface FieldRequestConfig { @@ -11,3 +13,8 @@ export interface FieldRequestConfig { type: ML_JOB_FIELD_TYPES; cardinality: number; } + +export interface FieldHistogramRequestConfig { + fieldName: string; + type?: KBN_FIELD_TYPES; +} diff --git a/x-pack/plugins/ml/public/application/datavisualizer/index_based/data_loader/data_loader.ts b/x-pack/plugins/ml/public/application/datavisualizer/index_based/data_loader/data_loader.ts index a08821c65bfe79..34f86ffa187883 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/index_based/data_loader/data_loader.ts +++ b/x-pack/plugins/ml/public/application/datavisualizer/index_based/data_loader/data_loader.ts @@ -6,15 +6,17 @@ import { i18n } from '@kbn/i18n'; -import { getToastNotifications } from '../../../util/dependency_cache'; +import { CoreSetup } from 'src/core/public'; + import { IndexPattern } from '../../../../../../../../src/plugins/data/public'; import { SavedSearchQuery } from '../../../contexts/ml'; import { OMIT_FIELDS } from '../../../../../common/constants/field_types'; import { IndexPatternTitle } from '../../../../../common/types/kibana'; +import { DEFAULT_SAMPLER_SHARD_SIZE } from '../../../../../common/constants/field_histograms'; import { ml } from '../../../services/ml_api_service'; -import { FieldRequestConfig } from '../common'; +import { FieldHistogramRequestConfig, FieldRequestConfig } from '../common'; // Maximum number of examples to obtain for text type fields. const MAX_EXAMPLES_DEFAULT: number = 10; @@ -23,10 +25,15 @@ export class DataLoader { private _indexPattern: IndexPattern; private _indexPatternTitle: IndexPatternTitle = ''; private _maxExamples: number = MAX_EXAMPLES_DEFAULT; + private _toastNotifications: CoreSetup['notifications']['toasts']; - constructor(indexPattern: IndexPattern, kibanaConfig: any) { + constructor( + indexPattern: IndexPattern, + toastNotifications: CoreSetup['notifications']['toasts'] + ) { this._indexPattern = indexPattern; this._indexPatternTitle = indexPattern.title; + this._toastNotifications = toastNotifications; } async loadOverallData( @@ -90,10 +97,24 @@ export class DataLoader { return stats; } + async loadFieldHistograms( + fields: FieldHistogramRequestConfig[], + query: string | SavedSearchQuery, + samplerShardSize = DEFAULT_SAMPLER_SHARD_SIZE + ): Promise { + const stats = await ml.getVisualizerFieldHistograms({ + indexPatternTitle: this._indexPatternTitle, + query, + fields, + samplerShardSize, + }); + + return stats; + } + displayError(err: any) { - const toastNotifications = getToastNotifications(); if (err.statusCode === 500) { - toastNotifications.addDanger( + this._toastNotifications.addDanger( i18n.translate('xpack.ml.datavisualizer.dataLoader.internalServerErrorMessage', { defaultMessage: 'Error loading data in index {index}. {message}. ' + @@ -105,7 +126,7 @@ export class DataLoader { }) ); } else { - toastNotifications.addDanger( + this._toastNotifications.addDanger( i18n.translate('xpack.ml.datavisualizer.page.errorLoadingDataMessage', { defaultMessage: 'Error loading data in index {index}. {message}', values: { diff --git a/x-pack/plugins/ml/public/application/datavisualizer/index_based/page.tsx b/x-pack/plugins/ml/public/application/datavisualizer/index_based/page.tsx index 97b4043c9fd644..3c332d305d7e99 100644 --- a/x-pack/plugins/ml/public/application/datavisualizer/index_based/page.tsx +++ b/x-pack/plugins/ml/public/application/datavisualizer/index_based/page.tsx @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import React, { FC, Fragment, useEffect, useState } from 'react'; +import React, { FC, Fragment, useEffect, useMemo, useState } from 'react'; import { merge } from 'rxjs'; import { i18n } from '@kbn/i18n'; @@ -43,6 +43,7 @@ import { kbnTypeToMLJobType } from '../../util/field_types_utils'; import { useTimefilter } from '../../contexts/kibana'; import { timeBasedIndexCheck, getQueryFromSavedSearch } from '../../util/index_utils'; import { getTimeBucketsFromCache } from '../../util/time_buckets'; +import { getToastNotifications } from '../../util/dependency_cache'; import { useUrlState } from '../../util/url_state'; import { FieldRequestConfig, FieldVisConfig } from './common'; import { ActionsPanel } from './components/actions_panel'; @@ -107,7 +108,10 @@ export const Page: FC = () => { autoRefreshSelector: true, }); - const dataLoader = new DataLoader(currentIndexPattern, kibanaConfig); + const dataLoader = useMemo(() => new DataLoader(currentIndexPattern, getToastNotifications()), [ + currentIndexPattern, + ]); + const [globalState, setGlobalState] = useUrlState('_g'); useEffect(() => { if (globalState?.time !== undefined) { diff --git a/x-pack/plugins/ml/public/application/explorer/__snapshots__/explorer_swimlane.test.tsx.snap b/x-pack/plugins/ml/public/application/explorer/__snapshots__/explorer_swimlane.test.tsx.snap index 16b5ecc8a4600a..4adaac1319d537 100644 --- a/x-pack/plugins/ml/public/application/explorer/__snapshots__/explorer_swimlane.test.tsx.snap +++ b/x-pack/plugins/ml/public/application/explorer/__snapshots__/explorer_swimlane.test.tsx.snap @@ -1,3 +1,3 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`ExplorerSwimlane Overall swimlane 1`] = `"
Overall
2017-02-07T00:00:00Z2017-02-07T00:30:00Z2017-02-07T01:00:00Z2017-02-07T01:30:00Z2017-02-07T02:00:00Z2017-02-07T02:30:00Z2017-02-07T03:00:00Z2017-02-07T03:30:00Z2017-02-07T04:00:00Z2017-02-07T04:30:00Z2017-02-07T05:00:00Z2017-02-07T05:30:00Z2017-02-07T06:00:00Z2017-02-07T06:30:00Z2017-02-07T07:00:00Z2017-02-07T07:30:00Z2017-02-07T08:00:00Z2017-02-07T08:30:00Z2017-02-07T09:00:00Z2017-02-07T09:30:00Z2017-02-07T10:00:00Z2017-02-07T10:30:00Z2017-02-07T11:00:00Z2017-02-07T11:30:00Z2017-02-07T12:00:00Z2017-02-07T12:30:00Z2017-02-07T13:00:00Z2017-02-07T13:30:00Z2017-02-07T14:00:00Z2017-02-07T14:30:00Z2017-02-07T15:00:00Z2017-02-07T15:30:00Z2017-02-07T16:00:00Z
"`; +exports[`ExplorerSwimlane Overall swimlane 1`] = `"
Overall
2017-02-07T00:00:00Z2017-02-07T00:30:00Z2017-02-07T01:00:00Z2017-02-07T01:30:00Z2017-02-07T02:00:00Z2017-02-07T02:30:00Z2017-02-07T03:00:00Z2017-02-07T03:30:00Z2017-02-07T04:00:00Z2017-02-07T04:30:00Z2017-02-07T05:00:00Z2017-02-07T05:30:00Z2017-02-07T06:00:00Z2017-02-07T06:30:00Z2017-02-07T07:00:00Z2017-02-07T07:30:00Z2017-02-07T08:00:00Z2017-02-07T08:30:00Z2017-02-07T09:00:00Z2017-02-07T09:30:00Z2017-02-07T10:00:00Z2017-02-07T10:30:00Z2017-02-07T11:00:00Z2017-02-07T11:30:00Z2017-02-07T12:00:00Z2017-02-07T12:30:00Z2017-02-07T13:00:00Z2017-02-07T13:30:00Z2017-02-07T14:00:00Z2017-02-07T14:30:00Z2017-02-07T15:00:00Z2017-02-07T15:30:00Z2017-02-07T16:00:00Z
"`; diff --git a/x-pack/plugins/ml/public/application/explorer/anomaly_timeline.tsx b/x-pack/plugins/ml/public/application/explorer/anomaly_timeline.tsx index e00e2e1e1e2eb1..45dada84de20a7 100644 --- a/x-pack/plugins/ml/public/application/explorer/anomaly_timeline.tsx +++ b/x-pack/plugins/ml/public/application/explorer/anomaly_timeline.tsx @@ -4,9 +4,8 @@ * you may not use this file except in compliance with the Elastic License. */ -import React, { FC, useCallback, useMemo, useRef, useState } from 'react'; +import React, { FC, useMemo, useState } from 'react'; import { isEqual } from 'lodash'; -import DragSelect from 'dragselect'; import { EuiPanel, EuiPopover, @@ -22,21 +21,17 @@ import { } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; import { FormattedMessage } from '@kbn/i18n/react'; -import { DRAG_SELECT_ACTION, SWIMLANE_TYPE, VIEW_BY_JOB_LABEL } from './explorer_constants'; +import { OVERALL_LABEL, SWIMLANE_TYPE, VIEW_BY_JOB_LABEL } from './explorer_constants'; import { AddToDashboardControl } from './add_to_dashboard_control'; import { useMlKibana } from '../contexts/kibana'; import { TimeBuckets } from '../util/time_buckets'; import { UI_SETTINGS } from '../../../../../../src/plugins/data/common'; -import { - ALLOW_CELL_RANGE_SELECTION, - dragSelect$, - explorerService, -} from './explorer_dashboard_service'; +import { explorerService } from './explorer_dashboard_service'; import { ExplorerState } from './reducers/explorer_reducer'; import { hasMatchingPoints } from './has_matching_points'; import { ExplorerNoInfluencersFound } from './components/explorer_no_influencers_found/explorer_no_influencers_found'; import { SwimlaneContainer } from './swimlane_container'; -import { OverallSwimlaneData, ViewBySwimLaneData } from './explorer_utils'; +import { AppStateSelectedCells, OverallSwimlaneData, ViewBySwimLaneData } from './explorer_utils'; import { NoOverallData } from './components/no_overall_data'; function mapSwimlaneOptionsToEuiOptions(options: string[]) { @@ -63,10 +58,6 @@ export const AnomalyTimeline: FC = React.memo( const [isMenuOpen, setIsMenuOpen] = useState(false); const [isAddDashboardsActive, setIsAddDashboardActive] = useState(false); - const isSwimlaneSelectActive = useRef(false); - // make sure dragSelect is only available if the mouse pointer is actually over a swimlane - const disableDragSelectOnMouseLeave = useRef(true); - const canEditDashboards = capabilities.dashboard?.createNew ?? false; const timeBuckets = useMemo(() => { @@ -78,48 +69,6 @@ export const AnomalyTimeline: FC = React.memo( }); }, [uiSettings]); - const dragSelect = useMemo( - () => - new DragSelect({ - selectorClass: 'ml-swimlane-selector', - selectables: document.querySelectorAll('.sl-cell'), - callback(elements) { - if (elements.length > 1 && !ALLOW_CELL_RANGE_SELECTION) { - elements = [elements[0]]; - } - - if (elements.length > 0) { - dragSelect$.next({ - action: DRAG_SELECT_ACTION.NEW_SELECTION, - elements, - }); - } - - disableDragSelectOnMouseLeave.current = true; - }, - onDragStart(e) { - let target = e.target as HTMLElement; - while (target && target !== document.body && !target.classList.contains('sl-cell')) { - target = target.parentNode as HTMLElement; - } - if (ALLOW_CELL_RANGE_SELECTION && target !== document.body) { - dragSelect$.next({ - action: DRAG_SELECT_ACTION.DRAG_START, - }); - disableDragSelectOnMouseLeave.current = false; - } - }, - onElementSelect() { - if (ALLOW_CELL_RANGE_SELECTION) { - dragSelect$.next({ - action: DRAG_SELECT_ACTION.ELEMENT_SELECT, - }); - } - }, - }), - [] - ); - const { filterActive, filteredFields, @@ -138,42 +87,6 @@ export const AnomalyTimeline: FC = React.memo( loading, } = explorerState; - const setSwimlaneSelectActive = useCallback((active: boolean) => { - if (isSwimlaneSelectActive.current && !active && disableDragSelectOnMouseLeave.current) { - dragSelect.stop(); - isSwimlaneSelectActive.current = active; - return; - } - if (!isSwimlaneSelectActive.current && active) { - dragSelect.start(); - dragSelect.clearSelection(); - dragSelect.setSelectables(document.querySelectorAll('.sl-cell')); - isSwimlaneSelectActive.current = active; - } - }, []); - const onSwimlaneEnterHandler = () => setSwimlaneSelectActive(true); - const onSwimlaneLeaveHandler = () => setSwimlaneSelectActive(false); - - // Listens to render updates of the swimlanes to update dragSelect - const swimlaneRenderDoneListener = useCallback(() => { - dragSelect.clearSelection(); - dragSelect.setSelectables(document.querySelectorAll('.sl-cell')); - }, []); - - // Listener for click events in the swimlane to load corresponding anomaly data. - const swimlaneCellClick = useCallback( - (selectedCellsUpdate: any) => { - // If selectedCells is an empty object we clear any existing selection, - // otherwise we save the new selection in AppState and update the Explorer. - if (Object.keys(selectedCellsUpdate).length === 0) { - setSelectedCells(); - } else { - setSelectedCells(selectedCellsUpdate); - } - }, - [setSelectedCells] - ); - const menuItems = useMemo(() => { const items = []; if (canEditDashboards) { @@ -193,6 +106,19 @@ export const AnomalyTimeline: FC = React.memo( return items; }, [canEditDashboards]); + // If selecting a cell in the 'view by' swimlane, indicate the corresponding time in the Overall swimlane. + const overallCellSelection: AppStateSelectedCells | undefined = useMemo(() => { + if (!selectedCells) return; + + if (selectedCells.type === SWIMLANE_TYPE.OVERALL) return selectedCells; + + return { + type: SWIMLANE_TYPE.OVERALL, + lanes: [OVERALL_LABEL], + times: selectedCells.times, + }; + }, [selectedCells]); + return ( <> @@ -284,86 +210,68 @@ export const AnomalyTimeline: FC = React.memo( -
+ filterActive={filterActive} + maskAll={maskAll} + timeBuckets={timeBuckets} + swimlaneData={overallSwimlaneData as OverallSwimlaneData} + swimlaneType={SWIMLANE_TYPE.OVERALL} + selection={overallCellSelection} + onCellsSelection={setSelectedCells} + onResize={explorerService.setSwimlaneContainerWidth} + isLoading={loading} + noDataWarning={} + /> + + + + {viewBySwimlaneOptions.length > 0 && ( explorerService.setSwimlaneContainerWidth(width)} - isLoading={loading} - noDataWarning={} + onCellsSelection={setSelectedCells} + onResize={explorerService.setSwimlaneContainerWidth} + fromPage={viewByFromPage} + perPage={viewByPerPage} + swimlaneLimit={swimlaneLimit} + onPaginationChange={({ perPage: perPageUpdate, fromPage: fromPageUpdate }) => { + if (perPageUpdate) { + explorerService.setViewByPerPage(perPageUpdate); + } + if (fromPageUpdate) { + explorerService.setViewByFromPage(fromPageUpdate); + } + }} + isLoading={loading || viewBySwimlaneDataLoading} + noDataWarning={ + typeof viewBySwimlaneFieldName === 'string' ? ( + viewBySwimlaneFieldName === VIEW_BY_JOB_LABEL ? ( + + ) : ( + + ) + ) : null + } /> -
- - - - {viewBySwimlaneOptions.length > 0 && ( - <> - <> -
- explorerService.setSwimlaneContainerWidth(width)} - fromPage={viewByFromPage} - perPage={viewByPerPage} - swimlaneLimit={swimlaneLimit} - onPaginationChange={({ perPage: perPageUpdate, fromPage: fromPageUpdate }) => { - if (perPageUpdate) { - explorerService.setViewByPerPage(perPageUpdate); - } - if (fromPageUpdate) { - explorerService.setViewByFromPage(fromPageUpdate); - } - }} - isLoading={loading || viewBySwimlaneDataLoading} - noDataWarning={ - typeof viewBySwimlaneFieldName === 'string' ? ( - viewBySwimlaneFieldName === VIEW_BY_JOB_LABEL ? ( - - ) : ( - - ) - ) : null - } - /> -
- - )}
{isAddDashboardsActive && selectedJobs && ( diff --git a/x-pack/plugins/ml/public/application/explorer/explorer_constants.ts b/x-pack/plugins/ml/public/application/explorer/explorer_constants.ts index 21e13cb029d69e..7440bf32134133 100644 --- a/x-pack/plugins/ml/public/application/explorer/explorer_constants.ts +++ b/x-pack/plugins/ml/public/application/explorer/explorer_constants.ts @@ -62,6 +62,11 @@ export const MAX_INFLUENCER_FIELD_NAMES = 50; export const VIEW_BY_JOB_LABEL = i18n.translate('xpack.ml.explorer.jobIdLabel', { defaultMessage: 'job ID', }); + +export const OVERALL_LABEL = i18n.translate('xpack.ml.explorer.overallLabel', { + defaultMessage: 'Overall', +}); + /** * Hard limitation for the size of terms * aggregations on influencers values. diff --git a/x-pack/plugins/ml/public/application/explorer/explorer_dashboard_service.ts b/x-pack/plugins/ml/public/application/explorer/explorer_dashboard_service.ts index 1429bf08583618..4d697bcda1a065 100644 --- a/x-pack/plugins/ml/public/application/explorer/explorer_dashboard_service.ts +++ b/x-pack/plugins/ml/public/application/explorer/explorer_dashboard_service.ts @@ -18,17 +18,12 @@ import { DeepPartial } from '../../../common/types/common'; import { jobSelectionActionCreator } from './actions'; import { ExplorerChartsData } from './explorer_charts/explorer_charts_container_service'; -import { DRAG_SELECT_ACTION, EXPLORER_ACTION } from './explorer_constants'; +import { EXPLORER_ACTION } from './explorer_constants'; import { AppStateSelectedCells, TimeRangeBounds } from './explorer_utils'; import { explorerReducer, getExplorerDefaultState, ExplorerState } from './reducers'; export const ALLOW_CELL_RANGE_SELECTION = true; -export const dragSelect$ = new Subject<{ - action: typeof DRAG_SELECT_ACTION[keyof typeof DRAG_SELECT_ACTION]; - elements?: any[]; -}>(); - type ExplorerAction = Action | Observable; export const explorerAction$ = new Subject(); @@ -54,7 +49,7 @@ const explorerState$: Observable = explorerFilteredAction$.pipe( shareReplay(1) ); -interface ExplorerAppState { +export interface ExplorerAppState { mlExplorerSwimlane: { selectedType?: string; selectedLanes?: string[]; diff --git a/x-pack/plugins/ml/public/application/explorer/explorer_swimlane.test.tsx b/x-pack/plugins/ml/public/application/explorer/explorer_swimlane.test.tsx index df450a33a52df7..f7ae5f232999ef 100644 --- a/x-pack/plugins/ml/public/application/explorer/explorer_swimlane.test.tsx +++ b/x-pack/plugins/ml/public/application/explorer/explorer_swimlane.test.tsx @@ -10,7 +10,6 @@ import moment from 'moment-timezone'; import { mountWithIntl } from 'test_utils/enzyme_helpers'; import React from 'react'; -import { dragSelect$ } from './explorer_dashboard_service'; import { ExplorerSwimlane } from './explorer_swimlane'; import { TimeBuckets as TimeBucketsClass } from '../util/time_buckets'; import { ChartTooltipService } from '../components/chart_tooltip'; @@ -27,13 +26,15 @@ jest.mock('d3', () => { }; }); -jest.mock('./explorer_dashboard_service', () => ({ - dragSelect$: { - subscribe: jest.fn(() => ({ - unsubscribe: jest.fn(), - })), - }, -})); +jest.mock('@elastic/eui', () => { + return { + htmlIdGenerator: jest.fn(() => { + return jest.fn(() => { + return 'test-gen-id'; + }); + }), + }; +}); function getExplorerSwimlaneMocks() { const swimlaneData = ({ laneLabels: [] } as unknown) as OverallSwimlaneData; @@ -52,6 +53,7 @@ function getExplorerSwimlaneMocks() { timeBuckets, swimlaneData, tooltipService, + parentRef: {} as React.RefObject, }; } @@ -74,50 +76,42 @@ describe('ExplorerSwimlane', () => { test('Minimal initialization', () => { const mocks = getExplorerSwimlaneMocks(); - const swimlaneRenderDoneListener = jest.fn(); const wrapper = mountWithIntl( ); expect(wrapper.html()).toBe( - `
` + - `
` + '
' ); // test calls to mock functions // @ts-ignore - expect(dragSelect$.subscribe.mock.calls.length).toBeGreaterThanOrEqual(1); - // @ts-ignore - expect(wrapper.instance().dragSelectSubscriber.unsubscribe.mock.calls).toHaveLength(0); - // @ts-ignore expect(mocks.timeBuckets.setInterval.mock.calls.length).toBeGreaterThanOrEqual(1); // @ts-ignore expect(mocks.timeBuckets.getScaledDateFormat.mock.calls.length).toBeGreaterThanOrEqual(1); - expect(swimlaneRenderDoneListener.mock.calls.length).toBeGreaterThanOrEqual(1); }); test('Overall swimlane', () => { const mocks = getExplorerSwimlaneMocks(); - const swimlaneRenderDoneListener = jest.fn(); const wrapper = mountWithIntl( ); @@ -125,13 +119,8 @@ describe('ExplorerSwimlane', () => { // test calls to mock functions // @ts-ignore - expect(dragSelect$.subscribe.mock.calls.length).toBeGreaterThanOrEqual(1); - // @ts-ignore - expect(wrapper.instance().dragSelectSubscriber.unsubscribe.mock.calls).toHaveLength(0); - // @ts-ignore expect(mocks.timeBuckets.setInterval.mock.calls.length).toBeGreaterThanOrEqual(1); // @ts-ignore expect(mocks.timeBuckets.getScaledDateFormat.mock.calls.length).toBeGreaterThanOrEqual(1); - expect(swimlaneRenderDoneListener.mock.calls.length).toBeGreaterThanOrEqual(1); }); }); diff --git a/x-pack/plugins/ml/public/application/explorer/explorer_swimlane.tsx b/x-pack/plugins/ml/public/application/explorer/explorer_swimlane.tsx index aa386288ac7e08..0f92278e904455 100644 --- a/x-pack/plugins/ml/public/application/explorer/explorer_swimlane.tsx +++ b/x-pack/plugins/ml/public/application/explorer/explorer_swimlane.tsx @@ -13,15 +13,17 @@ import './_explorer.scss'; import _ from 'lodash'; import d3 from 'd3'; import moment from 'moment'; +import DragSelect from 'dragselect'; import { i18n } from '@kbn/i18n'; -import { Subscription } from 'rxjs'; +import { Subject, Subscription } from 'rxjs'; import { TooltipValue } from '@elastic/charts'; +import { htmlIdGenerator } from '@elastic/eui'; import { formatHumanReadableDateTime } from '../util/date_utils'; import { numTicksForDateFormat } from '../util/chart_utils'; import { getSeverityColor } from '../../../common/util/anomaly_utils'; import { mlEscape } from '../util/string_utils'; -import { ALLOW_CELL_RANGE_SELECTION, dragSelect$ } from './explorer_dashboard_service'; +import { ALLOW_CELL_RANGE_SELECTION } from './explorer_dashboard_service'; import { DRAG_SELECT_ACTION, SwimlaneType } from './explorer_constants'; import { EMPTY_FIELD_VALUE_LABEL } from '../timeseriesexplorer/components/entity_control/entity_control'; import { TimeBuckets as TimeBucketsClass } from '../util/time_buckets'; @@ -29,7 +31,7 @@ import { ChartTooltipService, ChartTooltipValue, } from '../components/chart_tooltip/chart_tooltip_service'; -import { OverallSwimlaneData, ViewBySwimLaneData } from './explorer_utils'; +import { AppStateSelectedCells, OverallSwimlaneData, ViewBySwimLaneData } from './explorer_utils'; const SCSS = { mlDragselectDragging: 'mlDragselectDragging', @@ -56,7 +58,6 @@ export interface ExplorerSwimlaneProps { filterActive?: boolean; maskAll?: boolean; timeBuckets: InstanceType; - swimlaneCellClick?: Function; swimlaneData: OverallSwimlaneData | ViewBySwimLaneData; swimlaneType: SwimlaneType; selection?: { @@ -64,8 +65,15 @@ export interface ExplorerSwimlaneProps { type: string; times: number[]; }; - swimlaneRenderDoneListener?: Function; + onCellsSelection: (payload?: AppStateSelectedCells) => void; tooltipService: ChartTooltipService; + 'data-test-subj'?: string; + /** + * We need to be aware of the parent element in order to set + * the height so the swim lane widget doesn't jump during loading + * or page changes. + */ + parentRef: React.RefObject; } export class ExplorerSwimlane extends React.Component { @@ -78,13 +86,70 @@ export class ExplorerSwimlane extends React.Component { rootNode = React.createRef(); + isSwimlaneSelectActive = false; + // make sure dragSelect is only available if the mouse pointer is actually over a swimlane + disableDragSelectOnMouseLeave = true; + + dragSelect$ = new Subject<{ + action: typeof DRAG_SELECT_ACTION[keyof typeof DRAG_SELECT_ACTION]; + elements?: any[]; + }>(); + + /** + * Unique id for swim lane instance + */ + rootNodeId = htmlIdGenerator()(); + + /** + * Initialize drag select instance + */ + dragSelect = new DragSelect({ + selectorClass: 'ml-swimlane-selector', + selectables: document.querySelectorAll(`#${this.rootNodeId} .sl-cell`), + callback: (elements) => { + if (elements.length > 1 && !ALLOW_CELL_RANGE_SELECTION) { + elements = [elements[0]]; + } + + if (elements.length > 0) { + this.dragSelect$.next({ + action: DRAG_SELECT_ACTION.NEW_SELECTION, + elements, + }); + } + + this.disableDragSelectOnMouseLeave = true; + }, + onDragStart: (e) => { + // make sure we don't trigger text selection on label + e.preventDefault(); + let target = e.target as HTMLElement; + while (target && target !== document.body && !target.classList.contains('sl-cell')) { + target = target.parentNode as HTMLElement; + } + if (ALLOW_CELL_RANGE_SELECTION && target !== document.body) { + this.dragSelect$.next({ + action: DRAG_SELECT_ACTION.DRAG_START, + }); + this.disableDragSelectOnMouseLeave = false; + } + }, + onElementSelect: () => { + if (ALLOW_CELL_RANGE_SELECTION) { + this.dragSelect$.next({ + action: DRAG_SELECT_ACTION.ELEMENT_SELECT, + }); + } + }, + }); + componentDidMount() { // property for data comparison to be able to filter // consecutive click events with the same data. let previousSelectedData: any = null; // Listen for dragSelect events - this.dragSelectSubscriber = dragSelect$.subscribe(({ action, elements = [] }) => { + this.dragSelectSubscriber = this.dragSelect$.subscribe(({ action, elements = [] }) => { const element = d3.select(this.rootNode.current!.parentNode!); const { swimlaneType } = this.props; @@ -154,7 +219,7 @@ export class ExplorerSwimlane extends React.Component { } selectCell(cellsToSelect: any[], { laneLabels, bucketScore, times }: SelectedData) { - const { selection, swimlaneCellClick = () => {}, swimlaneData, swimlaneType } = this.props; + const { selection, swimlaneData, swimlaneType } = this.props; let triggerNewSelection = false; @@ -184,7 +249,7 @@ export class ExplorerSwimlane extends React.Component { } if (triggerNewSelection === false) { - swimlaneCellClick({}); + this.swimlaneCellClick(); return; } @@ -194,7 +259,7 @@ export class ExplorerSwimlane extends React.Component { times: d3.extent(times), type: swimlaneType, }; - swimlaneCellClick(selectedCells); + this.swimlaneCellClick(selectedCells); } highlightOverall(times: number[]) { @@ -208,10 +273,8 @@ export class ExplorerSwimlane extends React.Component { } highlightSelection(cellsToSelect: Node[], laneLabels: string[], times: number[]) { - const { swimlaneType } = this.props; - - // This selects both overall and viewby swimlane - const wrapper = d3.selectAll('.mlExplorerSwimlane'); + // This selects the embeddable container + const wrapper = d3.select(`#${this.rootNodeId}`); wrapper.selectAll('.lane-label').classed('lane-label-masked', true); wrapper @@ -232,13 +295,12 @@ export class ExplorerSwimlane extends React.Component { rootParent.selectAll('.lane-label').classed('lane-label-masked', function (this: HTMLElement) { return laneLabels.indexOf(d3.select(this).text()) === -1; }); - - if (swimlaneType === 'viewBy') { - // If selecting a cell in the 'view by' swimlane, indicate the corresponding time in the Overall swimlane. - this.highlightOverall(times); - } } + /** + * TODO should happen with props instead of imperative check + * @param maskAll + */ maskIrrelevantSwimlanes(maskAll: boolean) { if (maskAll === true) { // This selects both overall and viewby swimlane @@ -288,7 +350,6 @@ export class ExplorerSwimlane extends React.Component { filterActive, maskAll, timeBuckets, - swimlaneCellClick, swimlaneData, swimlaneType, selection, @@ -358,9 +419,12 @@ export class ExplorerSwimlane extends React.Component { const numBuckets = Math.round((endTime - startTime) / stepSecs); const cellHeight = 30; const height = (lanes.length + 1) * cellHeight - 10; - const laneLabelWidth = 170; + // Set height for the wrapper element + if (this.props.parentRef.current) { + this.props.parentRef.current.style.height = `${height + 20}px`; + } - element.style('height', `${height + 20}px`); + const laneLabelWidth = 170; const swimlanes = element.select('.ml-swimlanes'); swimlanes.html(''); @@ -413,8 +477,8 @@ export class ExplorerSwimlane extends React.Component { } }) .on('click', () => { - if (selection && typeof selection.lanes !== 'undefined' && swimlaneCellClick) { - swimlaneCellClick({}); + if (selection && typeof selection.lanes !== 'undefined') { + this.swimlaneCellClick(); } }) .each(function (this: HTMLElement) { @@ -567,9 +631,7 @@ export class ExplorerSwimlane extends React.Component { element.selectAll('.sl-cell-inner').classed('sl-cell-inner-masked', true); } - if (this.props.swimlaneRenderDoneListener) { - this.props.swimlaneRenderDoneListener(); - } + this.swimlaneRenderDoneListener(); if ( (swimlaneType !== selectedType || @@ -593,10 +655,7 @@ export class ExplorerSwimlane extends React.Component { selectedTimeExtent[1] <= endTime ) { // Locate matching cell - look for exact time, otherwise closest before. - const swimlaneElements = element.select('.ml-swimlanes'); - const laneCells = swimlaneElements.selectAll( - `div[data-lane-label="${mlEscape(selectedLane)}"]` - ); + const laneCells = element.selectAll(`div[data-lane-label="${mlEscape(selectedLane)}"]`); laneCells.each(function (this: HTMLElement) { const cell = d3.select(this); @@ -632,9 +691,58 @@ export class ExplorerSwimlane extends React.Component { return true; } + /** + * Listener for click events in the swim lane and execute a prop callback. + * @param selectedCellsUpdate + */ + swimlaneCellClick(selectedCellsUpdate?: AppStateSelectedCells) { + // If selectedCells is an empty object we clear any existing selection, + // otherwise we save the new selection in AppState and update the Explorer. + if (!selectedCellsUpdate) { + this.props.onCellsSelection(); + } else { + this.props.onCellsSelection(selectedCellsUpdate); + } + } + + /** + * Listens to render updates of the swim lanes to update dragSelect + */ + swimlaneRenderDoneListener() { + this.dragSelect.clearSelection(); + this.dragSelect.setSelectables(document.querySelectorAll(`#${this.rootNodeId} .sl-cell`)); + } + + setSwimlaneSelectActive(active: boolean) { + if (this.isSwimlaneSelectActive && !active && this.disableDragSelectOnMouseLeave) { + this.dragSelect.stop(); + this.isSwimlaneSelectActive = active; + return; + } + if (!this.isSwimlaneSelectActive && active) { + this.dragSelect.start(); + this.dragSelect.clearSelection(); + this.dragSelect.setSelectables(document.querySelectorAll(`#${this.rootNodeId} .sl-cell`)); + this.isSwimlaneSelectActive = active; + } + } + render() { const { swimlaneType } = this.props; - return
; + return ( +
+
+
+ ); } } diff --git a/x-pack/plugins/ml/public/application/explorer/explorer_utils.d.ts b/x-pack/plugins/ml/public/application/explorer/explorer_utils.d.ts index 05fdb52e1ccb28..0faa20295996cf 100644 --- a/x-pack/plugins/ml/public/application/explorer/explorer_utils.d.ts +++ b/x-pack/plugins/ml/public/application/explorer/explorer_utils.d.ts @@ -7,6 +7,7 @@ import { Moment } from 'moment'; import { CombinedJob } from '../../../common/types/anomaly_detection_jobs'; +import { SwimlaneType } from './explorer_constants'; interface ClearedSelectedAnomaliesState { selectedCells: undefined; @@ -182,9 +183,9 @@ export declare interface FilterData { } export declare interface AppStateSelectedCells { - type: string; + type: SwimlaneType; lanes: string[]; times: number[]; - showTopFieldValues: boolean; - viewByFieldName: string; + showTopFieldValues?: boolean; + viewByFieldName?: string; } diff --git a/x-pack/plugins/ml/public/application/explorer/reducers/explorer_reducer/job_selection_change.ts b/x-pack/plugins/ml/public/application/explorer/reducers/explorer_reducer/job_selection_change.ts index 49f5794273a04d..4d5ad65065fc3e 100644 --- a/x-pack/plugins/ml/public/application/explorer/reducers/explorer_reducer/job_selection_change.ts +++ b/x-pack/plugins/ml/public/application/explorer/reducers/explorer_reducer/job_selection_change.ts @@ -4,6 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ +import { isEqual } from 'lodash'; import { ActionPayload } from '../../explorer_dashboard_service'; import { getDefaultSwimlaneData, getInfluencers } from '../../explorer_utils'; @@ -17,7 +18,11 @@ export const jobSelectionChange = (state: ExplorerState, payload: ActionPayload) noInfluencersConfigured: getInfluencers(selectedJobs).length === 0, overallSwimlaneData: getDefaultSwimlaneData(), selectedJobs, - viewByFromPage: 1, + // currently job selection set asynchronously so + // we want to preserve the pagination from the url state + // on initial load + viewByFromPage: + !state.selectedJobs || isEqual(state.selectedJobs, selectedJobs) ? state.viewByFromPage : 1, }; // clear filter if selected jobs have no influencers diff --git a/x-pack/plugins/ml/public/application/explorer/swimlane_container.tsx b/x-pack/plugins/ml/public/application/explorer/swimlane_container.tsx index e34e1d26c9cab2..51ea0f00d5f6ac 100644 --- a/x-pack/plugins/ml/public/application/explorer/swimlane_container.tsx +++ b/x-pack/plugins/ml/public/application/explorer/swimlane_container.tsx @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import React, { FC, useCallback, useState } from 'react'; +import React, { FC, useCallback, useRef, useState } from 'react'; import { EuiText, EuiLoadingChart, @@ -49,7 +49,7 @@ export function isViewBySwimLaneData(arg: any): arg is ViewBySwimLaneData { * @constructor */ export const SwimlaneContainer: FC< - Omit & { + Omit & { onResize: (width: number) => void; fromPage?: number; perPage?: number; @@ -70,6 +70,7 @@ export const SwimlaneContainer: FC< ...props }) => { const [chartWidth, setChartWidth] = useState(0); + const wrapperRef = useRef(null); const resizeHandler = useCallback( throttle((e: { width: number; height: number }) => { @@ -111,36 +112,40 @@ export const SwimlaneContainer: FC< data-test-subj="mlSwimLaneContainer" > - - {showSwimlane && !isLoading && ( - - {(tooltipService) => ( - + + {showSwimlane && !isLoading && ( + + {(tooltipService) => ( + + )} + + )} + {isLoading && ( + + - )} - - )} - {isLoading && ( - - + )} + {!isLoading && !showSwimlane && ( + {noDataWarning}} /> - - )} - {!isLoading && !showSwimlane && ( - {noDataWarning}} - /> - )} - + )} + +
+ {isPaginationVisible && ( = ({ jobsWithTim const [lastRefresh, setLastRefresh] = useState(0); const timefilter = useTimefilter({ timeRangeSelector: true, autoRefreshSelector: true }); - const { jobIds } = useJobSelection(jobsWithTimeRange, getDateFormatTz()); + const { jobIds } = useJobSelection(jobsWithTimeRange); const refresh = useRefresh(); useEffect(() => { @@ -109,6 +109,14 @@ const ExplorerUrlStateManager: FC = ({ jobsWithTim } }, [globalState?.time?.from, globalState?.time?.to]); + useEffect(() => { + if (jobIds.length > 0) { + explorerService.updateJobSelection(jobIds); + } else { + explorerService.clearJobs(); + } + }, [JSON.stringify(jobIds)]); + useEffect(() => { const viewByFieldName = appState?.mlExplorerSwimlane?.viewByFieldName; if (viewByFieldName !== undefined) { @@ -119,15 +127,17 @@ const ExplorerUrlStateManager: FC = ({ jobsWithTim if (filterData !== undefined) { explorerService.setFilterData(filterData); } - }, []); - useEffect(() => { - if (jobIds.length > 0) { - explorerService.updateJobSelection(jobIds); - } else { - explorerService.clearJobs(); + const viewByPerPage = (appState as ExplorerAppState)?.mlExplorerSwimlane?.viewByPerPage; + if (viewByPerPage) { + explorerService.setViewByPerPage(viewByPerPage); } - }, [JSON.stringify(jobIds)]); + + const viewByFromPage = (appState as ExplorerAppState)?.mlExplorerSwimlane?.viewByFromPage; + if (viewByFromPage) { + explorerService.setViewByFromPage(viewByFromPage); + } + }, []); const [explorerData, loadExplorerData] = useExplorerData(); useEffect(() => { diff --git a/x-pack/plugins/ml/public/application/routing/use_refresh.ts b/x-pack/plugins/ml/public/application/routing/use_refresh.ts index c247fd9765e966..539ce6f88a421e 100644 --- a/x-pack/plugins/ml/public/application/routing/use_refresh.ts +++ b/x-pack/plugins/ml/public/application/routing/use_refresh.ts @@ -6,7 +6,7 @@ import { useObservable } from 'react-use'; import { merge } from 'rxjs'; -import { map, skip } from 'rxjs/operators'; +import { map } from 'rxjs/operators'; import { useMemo } from 'react'; import { annotationsRefresh$ } from '../services/annotations_service'; @@ -29,9 +29,7 @@ export const useRefresh = () => { return merge( mlTimefilterRefresh$, timefilter.getTimeUpdate$().pipe( - // skip initially emitted value - skip(1), - map((_) => { + map(() => { const { from, to } = timefilter.getTime(); return { lastRefresh: Date.now(), timeRange: { start: from, end: to } }; }) diff --git a/x-pack/plugins/ml/public/application/services/anomaly_timeline_service.ts b/x-pack/plugins/ml/public/application/services/anomaly_timeline_service.ts index f2e362f754f2b9..2bdb758be874c2 100644 --- a/x-pack/plugins/ml/public/application/services/anomaly_timeline_service.ts +++ b/x-pack/plugins/ml/public/application/services/anomaly_timeline_service.ts @@ -5,7 +5,6 @@ */ import { IUiSettingsClient } from 'kibana/public'; -import { i18n } from '@kbn/i18n'; import { TimefilterContract, TimeRange, @@ -18,7 +17,7 @@ import { SwimlaneData, ViewBySwimLaneData, } from '../explorer/explorer_utils'; -import { VIEW_BY_JOB_LABEL } from '../explorer/explorer_constants'; +import { OVERALL_LABEL, VIEW_BY_JOB_LABEL } from '../explorer/explorer_constants'; import { MlResultsService } from './results_service'; /** @@ -288,9 +287,7 @@ export class AnomalyTimelineService { searchBounds: Required, interval: number ): OverallSwimlaneData { - const overallLabel = i18n.translate('xpack.ml.explorer.overallLabel', { - defaultMessage: 'Overall', - }); + const overallLabel = OVERALL_LABEL; const dataset: OverallSwimlaneData = { laneLabels: [overallLabel], points: [], @@ -302,7 +299,7 @@ export class AnomalyTimelineService { // Store the earliest and latest times of the data returned by the ES aggregations, // These will be used for calculating the earliest and latest times for the swim lane charts. Object.entries(scoresByTime).forEach(([timeMs, score]) => { - const time = Number(timeMs) / 1000; + const time = +timeMs / 1000; dataset.points.push({ laneLabel: overallLabel, time, @@ -346,7 +343,7 @@ export class AnomalyTimelineService { maxScoreByLaneLabel[influencerFieldValue] = 0; Object.entries(influencerData).forEach(([timeMs, anomalyScore]) => { - const time = Number(timeMs) / 1000; + const time = +timeMs / 1000; dataset.points.push({ laneLabel: influencerFieldValue, time, diff --git a/x-pack/plugins/ml/public/application/services/ml_api_service/index.ts b/x-pack/plugins/ml/public/application/services/ml_api_service/index.ts index d1b6f95f32bed5..599e4d4bb8a10e 100644 --- a/x-pack/plugins/ml/public/application/services/ml_api_service/index.ts +++ b/x-pack/plugins/ml/public/application/services/ml_api_service/index.ts @@ -27,7 +27,10 @@ import { ModelSnapshot, } from '../../../../common/types/anomaly_detection_jobs'; import { ES_AGGREGATION } from '../../../../common/constants/aggregation_types'; -import { FieldRequestConfig } from '../../datavisualizer/index_based/common'; +import { + FieldHistogramRequestConfig, + FieldRequestConfig, +} from '../../datavisualizer/index_based/common'; import { DataRecognizerConfigResponse, Module } from '../../../../common/types/modules'; import { getHttp } from '../../util/dependency_cache'; @@ -494,6 +497,30 @@ export function mlApiServicesProvider(httpService: HttpService) { }); }, + getVisualizerFieldHistograms({ + indexPatternTitle, + query, + fields, + samplerShardSize, + }: { + indexPatternTitle: string; + query: any; + fields: FieldHistogramRequestConfig[]; + samplerShardSize?: number; + }) { + const body = JSON.stringify({ + query, + fields, + samplerShardSize, + }); + + return httpService.http({ + path: `${basePath()}/data_visualizer/get_field_histograms/${indexPatternTitle}`, + method: 'POST', + body, + }); + }, + getVisualizerOverallStats({ indexPatternTitle, query, diff --git a/x-pack/plugins/ml/public/application/services/new_job_capabilities_service.ts b/x-pack/plugins/ml/public/application/services/new_job_capabilities_service.ts index bc65ebe7a5fac7..e2313de5c88b04 100644 --- a/x-pack/plugins/ml/public/application/services/new_job_capabilities_service.ts +++ b/x-pack/plugins/ml/public/application/services/new_job_capabilities_service.ts @@ -20,7 +20,7 @@ import { import { ml } from './ml_api_service'; import { getIndexPatternAndSavedSearch } from '../util/index_utils'; -// called in the angular routing resolve block to initialize the +// called in the routing resolve block to initialize the // newJobCapsService with the currently selected index pattern export function loadNewJobCapabilities( indexPatternId: string, diff --git a/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/anomaly_swimlane_embeddable.tsx b/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/anomaly_swimlane_embeddable.tsx index 83070a5d94ba09..9f96b73d67c578 100644 --- a/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/anomaly_swimlane_embeddable.tsx +++ b/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/anomaly_swimlane_embeddable.tsx @@ -14,8 +14,8 @@ import { EmbeddableInput, EmbeddableOutput, IContainer, + IEmbeddable, } from '../../../../../../src/plugins/embeddable/public'; -import { MlStartDependencies } from '../../plugin'; import { EmbeddableSwimLaneContainer } from './embeddable_swim_lane_container'; import { AnomalyDetectorService } from '../../application/services/anomaly_detector_service'; import { JobId } from '../../../common/types/anomaly_detection_jobs'; @@ -27,6 +27,9 @@ import { TimeRange, } from '../../../../../../src/plugins/data/common'; import { SwimlaneType } from '../../application/explorer/explorer_constants'; +import { MlDependencies } from '../../application/app'; +import { AppStateSelectedCells } from '../../application/explorer/explorer_utils'; +import { SWIM_LANE_SELECTION_TRIGGER } from '../../ui_actions/triggers'; export const ANOMALY_SWIMLANE_EMBEDDABLE_TYPE = 'ml_anomaly_swimlane'; @@ -49,16 +52,26 @@ export interface AnomalySwimlaneEmbeddableCustomInput { timeRange: TimeRange; } +export interface EditSwimlanePanelContext { + embeddable: IEmbeddable; +} + +export interface SwimLaneDrilldownContext extends EditSwimlanePanelContext { + /** + * Optional data provided by swim lane selection + */ + data?: AppStateSelectedCells; +} + export type AnomalySwimlaneEmbeddableInput = EmbeddableInput & AnomalySwimlaneEmbeddableCustomInput; export type AnomalySwimlaneEmbeddableOutput = EmbeddableOutput & AnomalySwimlaneEmbeddableCustomOutput; export interface AnomalySwimlaneEmbeddableCustomOutput { - jobIds: JobId[]; - swimlaneType: SwimlaneType; - viewBy?: string; perPage?: number; + fromPage?: number; + interval?: number; } export interface AnomalySwimlaneServices { @@ -68,7 +81,7 @@ export interface AnomalySwimlaneServices { export type AnomalySwimlaneEmbeddableServices = [ CoreStart, - MlStartDependencies, + MlDependencies, AnomalySwimlaneServices ]; @@ -82,16 +95,13 @@ export class AnomalySwimlaneEmbeddable extends Embeddable< constructor( initialInput: AnomalySwimlaneEmbeddableInput, - private services: [CoreStart, MlStartDependencies, AnomalySwimlaneServices], + public services: [CoreStart, MlDependencies, AnomalySwimlaneServices], parent?: IContainer ) { super( initialInput, { - jobIds: initialInput.jobIds, - swimlaneType: initialInput.swimlaneType, defaultTitle: initialInput.title, - ...(initialInput.viewBy ? { viewBy: initialInput.viewBy } : {}), }, parent ); @@ -107,12 +117,12 @@ export class AnomalySwimlaneEmbeddable extends Embeddable< { - this.updateInput(input); - }} + onInputChange={this.updateInput.bind(this)} + onOutputChange={this.updateOutput.bind(this)} /> , node @@ -129,4 +139,8 @@ export class AnomalySwimlaneEmbeddable extends Embeddable< public reload() { this.reload$.next(); } + + public supportedTriggers() { + return [SWIM_LANE_SELECTION_TRIGGER as typeof SWIM_LANE_SELECTION_TRIGGER]; + } } diff --git a/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/anomaly_swimlane_embeddable_factory.ts b/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/anomaly_swimlane_embeddable_factory.ts index 0d587b428d89b6..14fbf77544b216 100644 --- a/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/anomaly_swimlane_embeddable_factory.ts +++ b/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/anomaly_swimlane_embeddable_factory.ts @@ -19,19 +19,22 @@ import { AnomalySwimlaneEmbeddableInput, AnomalySwimlaneEmbeddableServices, } from './anomaly_swimlane_embeddable'; -import { MlStartDependencies } from '../../plugin'; import { HttpService } from '../../application/services/http_service'; import { AnomalyDetectorService } from '../../application/services/anomaly_detector_service'; import { AnomalyTimelineService } from '../../application/services/anomaly_timeline_service'; import { mlResultsServiceProvider } from '../../application/services/results_service'; import { resolveAnomalySwimlaneUserInput } from './anomaly_swimlane_setup_flyout'; import { mlApiServicesProvider } from '../../application/services/ml_api_service'; +import { MlPluginStart, MlStartDependencies } from '../../plugin'; +import { MlDependencies } from '../../application/app'; export class AnomalySwimlaneEmbeddableFactory implements EmbeddableFactoryDefinition { public readonly type = ANOMALY_SWIMLANE_EMBEDDABLE_TYPE; - constructor(private getStartServices: StartServicesAccessor) {} + constructor( + private getStartServices: StartServicesAccessor + ) {} public async isEditable() { return true; @@ -64,7 +67,11 @@ export class AnomalySwimlaneEmbeddableFactory mlResultsServiceProvider(mlApiServicesProvider(httpService)) ); - return [coreStart, pluginsStart, { anomalyDetectorService, anomalyTimelineService }]; + return [ + coreStart, + pluginsStart as MlDependencies, + { anomalyDetectorService, anomalyTimelineService }, + ]; } public async create( diff --git a/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/anomaly_swimlane_initializer.tsx b/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/anomaly_swimlane_initializer.tsx index be9a332e51dbcc..e5a13adca05db4 100644 --- a/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/anomaly_swimlane_initializer.tsx +++ b/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/anomaly_swimlane_initializer.tsx @@ -17,6 +17,7 @@ import { EuiModalHeaderTitle, EuiSelect, EuiFieldText, + EuiModal, } from '@elastic/eui'; import { FormattedMessage } from '@kbn/i18n/react'; import { i18n } from '@kbn/i18n'; @@ -33,7 +34,6 @@ export interface AnomalySwimlaneInitializerProps { panelTitle: string; swimlaneType: SwimlaneType; viewBy?: string; - limit?: number; }) => void; onCancel: () => void; } @@ -81,7 +81,7 @@ export const AnomalySwimlaneInitializer: FC = ( (swimlaneType === SWIMLANE_TYPE.VIEW_BY && !!viewBySwimlaneFieldName)); return ( -
+ = ( /> -
+ ); }; diff --git a/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/embeddable_swim_lane_container.test.tsx b/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/embeddable_swim_lane_container.test.tsx index 846a3f543c2d4d..23045834eae5f0 100644 --- a/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/embeddable_swim_lane_container.test.tsx +++ b/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/embeddable_swim_lane_container.test.tsx @@ -6,18 +6,25 @@ import React from 'react'; import { render } from '@testing-library/react'; -import { EmbeddableSwimLaneContainer } from './embeddable_swim_lane_container'; +import { + EmbeddableSwimLaneContainer, + ExplorerSwimlaneContainerProps, +} from './embeddable_swim_lane_container'; import { BehaviorSubject, Observable } from 'rxjs'; import { I18nProvider } from '@kbn/i18n/react'; import { + AnomalySwimlaneEmbeddable, AnomalySwimlaneEmbeddableInput, AnomalySwimlaneServices, } from './anomaly_swimlane_embeddable'; import { CoreStart } from 'kibana/public'; -import { MlStartDependencies } from '../../plugin'; import { useSwimlaneInputResolver } from './swimlane_input_resolver'; import { SWIMLANE_TYPE } from '../../application/explorer/explorer_constants'; import { SwimlaneContainer } from '../../application/explorer/swimlane_container'; +import { MlDependencies } from '../../application/app'; +import { uiActionsPluginMock } from 'src/plugins/ui_actions/public/mocks'; +import { TriggerContract } from 'src/plugins/ui_actions/public/triggers'; +import { TriggerId } from 'src/plugins/ui_actions/public'; jest.mock('./swimlane_input_resolver', () => ({ useSwimlaneInputResolver: jest.fn(() => { @@ -37,13 +44,30 @@ const defaultOptions = { wrapper: I18nProvider }; describe('ExplorerSwimlaneContainer', () => { let embeddableInput: BehaviorSubject>; let refresh: BehaviorSubject; - let services: [CoreStart, MlStartDependencies, AnomalySwimlaneServices]; + let services: jest.Mocked<[CoreStart, MlDependencies, AnomalySwimlaneServices]>; + let embeddableContext: AnomalySwimlaneEmbeddable; + let trigger: jest.Mocked>; + const onInputChange = jest.fn(); + const onOutputChange = jest.fn(); beforeEach(() => { + embeddableContext = { id: 'test-id' } as AnomalySwimlaneEmbeddable; embeddableInput = new BehaviorSubject({ id: 'test-swimlane-embeddable', } as Partial); + + trigger = ({ exec: jest.fn() } as unknown) as jest.Mocked>; + + const uiActionsMock = uiActionsPluginMock.createStartContract(); + uiActionsMock.getTrigger.mockReturnValue(trigger); + + services = ([ + {}, + { + uiActions: uiActionsMock, + }, + ] as unknown) as ExplorerSwimlaneContainerProps['services']; }); test('should render a swimlane with a valid embeddable input', async () => { @@ -74,12 +98,14 @@ describe('ExplorerSwimlaneContainer', () => { render( } services={services} refresh={refresh} onInputChange={onInputChange} + onOutputChange={onOutputChange} />, defaultOptions ); @@ -110,6 +136,7 @@ describe('ExplorerSwimlaneContainer', () => { const { findByText } = render( @@ -117,6 +144,7 @@ describe('ExplorerSwimlaneContainer', () => { services={services} refresh={refresh} onInputChange={onInputChange} + onOutputChange={onOutputChange} />, defaultOptions ); diff --git a/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/embeddable_swim_lane_container.tsx b/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/embeddable_swim_lane_container.tsx index 5d91bdb41df6af..8ee4e391fcddee 100644 --- a/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/embeddable_swim_lane_container.tsx +++ b/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/embeddable_swim_lane_container.tsx @@ -4,14 +4,14 @@ * you may not use this file except in compliance with the Elastic License. */ -import React, { FC, useState } from 'react'; +import React, { FC, useCallback, useState, useEffect } from 'react'; import { EuiCallOut } from '@elastic/eui'; import { Observable } from 'rxjs'; import { CoreStart } from 'kibana/public'; import { FormattedMessage } from '@kbn/i18n/react'; -import { MlStartDependencies } from '../../plugin'; import { + AnomalySwimlaneEmbeddable, AnomalySwimlaneEmbeddableInput, AnomalySwimlaneEmbeddableOutput, AnomalySwimlaneServices, @@ -22,25 +22,36 @@ import { isViewBySwimLaneData, SwimlaneContainer, } from '../../application/explorer/swimlane_container'; +import { AppStateSelectedCells } from '../../application/explorer/explorer_utils'; +import { MlDependencies } from '../../application/app'; +import { SWIM_LANE_SELECTION_TRIGGER } from '../../ui_actions/triggers'; export interface ExplorerSwimlaneContainerProps { id: string; + embeddableContext: AnomalySwimlaneEmbeddable; embeddableInput: Observable; - services: [CoreStart, MlStartDependencies, AnomalySwimlaneServices]; + services: [CoreStart, MlDependencies, AnomalySwimlaneServices]; refresh: Observable; - onInputChange: (output: Partial) => void; + onInputChange: (input: Partial) => void; + onOutputChange: (output: Partial) => void; } export const EmbeddableSwimLaneContainer: FC = ({ id, + embeddableContext, embeddableInput, services, refresh, onInputChange, + onOutputChange, }) => { const [chartWidth, setChartWidth] = useState(0); const [fromPage, setFromPage] = useState(1); + const [{}, { uiActions }] = services; + + const [selectedCells, setSelectedCells] = useState(); + const [ swimlaneType, swimlaneData, @@ -58,6 +69,28 @@ export const EmbeddableSwimLaneContainer: FC = ( fromPage ); + useEffect(() => { + onOutputChange({ + perPage, + fromPage, + interval: swimlaneData?.interval, + }); + }, [perPage, fromPage, swimlaneData]); + + const onCellsSelection = useCallback( + (update?: AppStateSelectedCells) => { + setSelectedCells(update); + + if (update) { + uiActions.getTrigger(SWIM_LANE_SELECTION_TRIGGER).exec({ + embeddable: embeddableContext, + data: update, + }); + } + }, + [swimlaneData, perPage, fromPage] + ); + if (error) { return ( = ( data-test-subj="mlAnomalySwimlaneEmbeddableWrapper" > { - setChartWidth(width); - }} + onResize={setChartWidth} + selection={selectedCells} + onCellsSelection={onCellsSelection} onPaginationChange={(update) => { if (update.fromPage) { setFromPage(update.fromPage); diff --git a/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/swimlane_input_resolver.ts b/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/swimlane_input_resolver.ts index 9ed6f88150f68d..f17c779a002527 100644 --- a/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/swimlane_input_resolver.ts +++ b/x-pack/plugins/ml/public/embeddables/anomaly_swimlane/swimlane_input_resolver.ts @@ -40,6 +40,7 @@ import { parseInterval } from '../../../common/util/parse_interval'; import { AnomalyDetectorService } from '../../application/services/anomaly_detector_service'; import { isViewBySwimLaneData } from '../../application/explorer/swimlane_container'; import { ViewMode } from '../../../../../../src/plugins/embeddable/public'; +import { CONTROLLED_BY_SWIM_LANE_FILTER } from '../../ui_actions/apply_influencer_filters_action'; const FETCH_RESULTS_DEBOUNCE_MS = 500; @@ -240,7 +241,9 @@ export function processFilters(filters: Filter[], query: Query) { const must = [inputQuery]; const mustNot = []; for (const filter of filters) { - if (filter.meta.disabled) continue; + // ignore disabled filters as well as created by swim lane selection + if (filter.meta.disabled || filter.meta.controlledBy === CONTROLLED_BY_SWIM_LANE_FILTER) + continue; const { meta: { negate, type, key: fieldName }, diff --git a/x-pack/plugins/ml/public/embeddables/index.ts b/x-pack/plugins/ml/public/embeddables/index.ts index 5e9d54645b5168..db9f094d5721e0 100644 --- a/x-pack/plugins/ml/public/embeddables/index.ts +++ b/x-pack/plugins/ml/public/embeddables/index.ts @@ -4,15 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -import { CoreSetup } from 'kibana/public'; import { AnomalySwimlaneEmbeddableFactory } from './anomaly_swimlane'; -import { MlPluginStart, MlStartDependencies } from '../plugin'; +import { MlCoreSetup } from '../plugin'; import { EmbeddableSetup } from '../../../../../src/plugins/embeddable/public'; -export function registerEmbeddables( - embeddable: EmbeddableSetup, - core: CoreSetup -) { +export function registerEmbeddables(embeddable: EmbeddableSetup, core: MlCoreSetup) { const anomalySwimlaneEmbeddableFactory = new AnomalySwimlaneEmbeddableFactory( core.getStartServices ); diff --git a/x-pack/plugins/ml/public/plugin.ts b/x-pack/plugins/ml/public/plugin.ts index 7f7544a44efa7f..449d8baa2a1847 100644 --- a/x-pack/plugins/ml/public/plugin.ts +++ b/x-pack/plugins/ml/public/plugin.ts @@ -13,7 +13,7 @@ import { PluginInitializerContext, } from 'kibana/public'; import { ManagementSetup } from 'src/plugins/management/public'; -import { SharePluginStart } from 'src/plugins/share/public'; +import { SharePluginSetup, SharePluginStart, UrlGeneratorState } from 'src/plugins/share/public'; import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; import { DataPublicPluginStart } from 'src/plugins/data/public'; @@ -28,14 +28,16 @@ import { PLUGIN_ID, PLUGIN_ICON } from '../common/constants/app'; import { registerFeature } from './register_feature'; import { DEFAULT_APP_CATEGORIES } from '../../../../src/core/public'; import { registerEmbeddables } from './embeddables'; -import { UiActionsSetup } from '../../../../src/plugins/ui_actions/public'; +import { UiActionsSetup, UiActionsStart } from '../../../../src/plugins/ui_actions/public'; import { registerMlUiActions } from './ui_actions'; import { KibanaLegacyStart } from '../../../../src/plugins/kibana_legacy/public'; +import { MlUrlGenerator, MlUrlGeneratorState, ML_APP_URL_GENERATOR } from './url_generator'; export interface MlStartDependencies { data: DataPublicPluginStart; share: SharePluginStart; kibanaLegacy: KibanaLegacyStart; + uiActions: UiActionsStart; } export interface MlSetupDependencies { security?: SecurityPluginSetup; @@ -47,13 +49,30 @@ export interface MlSetupDependencies { embeddable: EmbeddableSetup; uiActions: UiActionsSetup; kibanaVersion: string; - share: SharePluginStart; + share: SharePluginSetup; +} + +declare module '../../../../src/plugins/share/public' { + export interface UrlGeneratorStateMapping { + [ML_APP_URL_GENERATOR]: UrlGeneratorState; + } } +export type MlCoreSetup = CoreSetup; + export class MlPlugin implements Plugin { constructor(private initializerContext: PluginInitializerContext) {} - setup(core: CoreSetup, pluginsSetup: MlSetupDependencies) { + setup(core: MlCoreSetup, pluginsSetup: MlSetupDependencies) { + const baseUrl = core.http.basePath.prepend('/app/ml'); + + pluginsSetup.share.urlGenerators.registerUrlGenerator( + new MlUrlGenerator({ + appBasePath: baseUrl, + useHash: core.uiSettings.get('state:storeInSessionStorage'), + }) + ); + core.application.register({ id: PLUGIN_ID, title: i18n.translate('xpack.ml.plugin.title', { @@ -80,7 +99,7 @@ export class MlPlugin implements Plugin { licenseManagement: pluginsSetup.licenseManagement, home: pluginsSetup.home, embeddable: pluginsSetup.embeddable, - uiActions: pluginsSetup.uiActions, + uiActions: pluginsStart.uiActions, kibanaVersion, }, { @@ -96,10 +115,8 @@ export class MlPlugin implements Plugin { registerFeature(pluginsSetup.home); initManagementSection(pluginsSetup, core); - - registerMlUiActions(pluginsSetup.uiActions, core); - registerEmbeddables(pluginsSetup.embeddable, core); + registerMlUiActions(pluginsSetup.uiActions, core); return {}; } @@ -113,6 +130,7 @@ export class MlPlugin implements Plugin { }); return {}; } + public stop() {} } diff --git a/x-pack/plugins/ml/public/ui_actions/apply_influencer_filters_action.tsx b/x-pack/plugins/ml/public/ui_actions/apply_influencer_filters_action.tsx new file mode 100644 index 00000000000000..3af39993d39fdd --- /dev/null +++ b/x-pack/plugins/ml/public/ui_actions/apply_influencer_filters_action.tsx @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { i18n } from '@kbn/i18n'; +import { ActionContextMapping, createAction } from '../../../../../src/plugins/ui_actions/public'; +import { + AnomalySwimlaneEmbeddable, + SwimLaneDrilldownContext, +} from '../embeddables/anomaly_swimlane/anomaly_swimlane_embeddable'; +import { MlCoreSetup } from '../plugin'; +import { SWIMLANE_TYPE, VIEW_BY_JOB_LABEL } from '../application/explorer/explorer_constants'; +import { Filter, FilterStateStore } from '../../../../../src/plugins/data/common'; + +export const APPLY_INFLUENCER_FILTERS_ACTION = 'applyInfluencerFiltersAction'; + +export const CONTROLLED_BY_SWIM_LANE_FILTER = 'anomaly-swim-lane'; + +export function createApplyInfluencerFiltersAction( + getStartServices: MlCoreSetup['getStartServices'] +) { + return createAction({ + id: 'apply-to-current-view', + type: APPLY_INFLUENCER_FILTERS_ACTION, + getIconType(context: ActionContextMapping[typeof APPLY_INFLUENCER_FILTERS_ACTION]): string { + return 'filter'; + }, + getDisplayName() { + return i18n.translate('xpack.ml.actions.applyInfluencersFiltersTitle', { + defaultMessage: 'Filer for value', + }); + }, + async execute({ data }: SwimLaneDrilldownContext) { + if (!data) { + throw new Error('No swim lane selection data provided'); + } + const [, pluginStart] = await getStartServices(); + const filterManager = pluginStart.data.query.filterManager; + + filterManager.addFilters( + data.lanes.map((influencerValue) => { + return { + $state: { + store: FilterStateStore.APP_STATE, + }, + meta: { + alias: i18n.translate('xpack.ml.actions.influencerFilterAliasLabel', { + defaultMessage: 'Influencer {labelValue}', + values: { + labelValue: `${data.viewByFieldName}:${influencerValue}`, + }, + }), + controlledBy: CONTROLLED_BY_SWIM_LANE_FILTER, + disabled: false, + key: data.viewByFieldName, + negate: false, + params: { + query: influencerValue, + }, + type: 'phrase', + }, + query: { + match_phrase: { + [data.viewByFieldName!]: influencerValue, + }, + }, + }; + }) + ); + }, + async isCompatible({ embeddable, data }: SwimLaneDrilldownContext) { + // Only compatible with view by influencer swim lanes and single selection + return ( + embeddable instanceof AnomalySwimlaneEmbeddable && + data !== undefined && + data.type === SWIMLANE_TYPE.VIEW_BY && + data.viewByFieldName !== VIEW_BY_JOB_LABEL && + data.lanes.length === 1 + ); + }, + }); +} diff --git a/x-pack/plugins/ml/public/ui_actions/apply_time_range_action.tsx b/x-pack/plugins/ml/public/ui_actions/apply_time_range_action.tsx new file mode 100644 index 00000000000000..ec59ba20acf98c --- /dev/null +++ b/x-pack/plugins/ml/public/ui_actions/apply_time_range_action.tsx @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { i18n } from '@kbn/i18n'; +import moment from 'moment'; +import { ActionContextMapping, createAction } from '../../../../../src/plugins/ui_actions/public'; +import { + AnomalySwimlaneEmbeddable, + SwimLaneDrilldownContext, +} from '../embeddables/anomaly_swimlane/anomaly_swimlane_embeddable'; +import { MlCoreSetup } from '../plugin'; + +export const APPLY_TIME_RANGE_SELECTION_ACTION = 'applyTimeRangeSelectionAction'; + +export function createApplyTimeRangeSelectionAction( + getStartServices: MlCoreSetup['getStartServices'] +) { + return createAction({ + id: 'apply-time-range-selection', + type: APPLY_TIME_RANGE_SELECTION_ACTION, + getIconType(context: ActionContextMapping[typeof APPLY_TIME_RANGE_SELECTION_ACTION]): string { + return 'timeline'; + }, + getDisplayName: () => + i18n.translate('xpack.ml.actions.applyTimeRangeSelectionTitle', { + defaultMessage: 'Apply time range selection', + }), + async execute({ embeddable, data }: SwimLaneDrilldownContext) { + if (!data) { + throw new Error('No swim lane selection data provided'); + } + const [, pluginStart] = await getStartServices(); + const timefilter = pluginStart.data.query.timefilter.timefilter; + const { interval } = embeddable.getOutput(); + + if (!interval) { + throw new Error('Interval is required to set a time range'); + } + + let [from, to] = data.times; + from = from * 1000; + // extend bounds with the interval + to = to * 1000 + interval * 1000; + + timefilter.setTime({ + from: moment(from), + to: moment(to), + mode: 'absolute', + }); + }, + async isCompatible({ embeddable, data }: SwimLaneDrilldownContext) { + return embeddable instanceof AnomalySwimlaneEmbeddable && data !== undefined; + }, + }); +} diff --git a/x-pack/plugins/ml/public/ui_actions/edit_swimlane_panel_action.tsx b/x-pack/plugins/ml/public/ui_actions/edit_swimlane_panel_action.tsx index 0db41c1ed104e0..cfd90f92e32380 100644 --- a/x-pack/plugins/ml/public/ui_actions/edit_swimlane_panel_action.tsx +++ b/x-pack/plugins/ml/public/ui_actions/edit_swimlane_panel_action.tsx @@ -4,24 +4,19 @@ * you may not use this file except in compliance with the Elastic License. */ -import { CoreSetup } from 'kibana/public'; import { i18n } from '@kbn/i18n'; import { ActionContextMapping, createAction } from '../../../../../src/plugins/ui_actions/public'; -import { IEmbeddable } from '../../../../../src/plugins/embeddable/public'; import { AnomalySwimlaneEmbeddable, - AnomalySwimlaneEmbeddableInput, - AnomalySwimlaneEmbeddableOutput, + EditSwimlanePanelContext, } from '../embeddables/anomaly_swimlane/anomaly_swimlane_embeddable'; import { resolveAnomalySwimlaneUserInput } from '../embeddables/anomaly_swimlane/anomaly_swimlane_setup_flyout'; +import { ViewMode } from '../../../../../src/plugins/embeddable/public'; +import { MlCoreSetup } from '../plugin'; export const EDIT_SWIMLANE_PANEL_ACTION = 'editSwimlanePanelAction'; -export interface EditSwimlanePanelContext { - embeddable: IEmbeddable; -} - -export function createEditSwimlanePanelAction(getStartServices: CoreSetup['getStartServices']) { +export function createEditSwimlanePanelAction(getStartServices: MlCoreSetup['getStartServices']) { return createAction({ id: 'edit-anomaly-swimlane', type: EDIT_SWIMLANE_PANEL_ACTION, @@ -48,7 +43,8 @@ export function createEditSwimlanePanelAction(getStartServices: CoreSetup['getSt }, isCompatible: async ({ embeddable }: EditSwimlanePanelContext) => { return ( - embeddable instanceof AnomalySwimlaneEmbeddable && embeddable.getInput().viewMode === 'edit' + embeddable instanceof AnomalySwimlaneEmbeddable && + embeddable.getInput().viewMode === ViewMode.EDIT ); }, }); diff --git a/x-pack/plugins/ml/public/ui_actions/index.ts b/x-pack/plugins/ml/public/ui_actions/index.ts index 4a1535c4e8c2ec..b7262a330b3107 100644 --- a/x-pack/plugins/ml/public/ui_actions/index.ts +++ b/x-pack/plugins/ml/public/ui_actions/index.ts @@ -8,23 +8,65 @@ import { CoreSetup } from 'kibana/public'; import { createEditSwimlanePanelAction, EDIT_SWIMLANE_PANEL_ACTION, - EditSwimlanePanelContext, } from './edit_swimlane_panel_action'; +import { + createOpenInExplorerAction, + OPEN_IN_ANOMALY_EXPLORER_ACTION, +} from './open_in_anomaly_explorer_action'; +import { EditSwimlanePanelContext } from '../embeddables/anomaly_swimlane/anomaly_swimlane_embeddable'; import { UiActionsSetup } from '../../../../../src/plugins/ui_actions/public'; import { MlPluginStart, MlStartDependencies } from '../plugin'; import { CONTEXT_MENU_TRIGGER } from '../../../../../src/plugins/embeddable/public'; +import { + APPLY_INFLUENCER_FILTERS_ACTION, + createApplyInfluencerFiltersAction, +} from './apply_influencer_filters_action'; +import { SWIM_LANE_SELECTION_TRIGGER, swimLaneSelectionTrigger } from './triggers'; +import { SwimLaneDrilldownContext } from '../embeddables/anomaly_swimlane/anomaly_swimlane_embeddable'; +import { + APPLY_TIME_RANGE_SELECTION_ACTION, + createApplyTimeRangeSelectionAction, +} from './apply_time_range_action'; +/** + * Register ML UI actions + */ export function registerMlUiActions( uiActions: UiActionsSetup, core: CoreSetup ) { + // Initialize actions const editSwimlanePanelAction = createEditSwimlanePanelAction(core.getStartServices); + const openInExplorerAction = createOpenInExplorerAction(core.getStartServices); + const applyInfluencerFiltersAction = createApplyInfluencerFiltersAction(core.getStartServices); + const applyTimeRangeSelectionAction = createApplyTimeRangeSelectionAction(core.getStartServices); + + // Register actions uiActions.registerAction(editSwimlanePanelAction); + uiActions.registerAction(openInExplorerAction); + uiActions.registerAction(applyInfluencerFiltersAction); + uiActions.registerAction(applyTimeRangeSelectionAction); + + // Assign triggers uiActions.attachAction(CONTEXT_MENU_TRIGGER, editSwimlanePanelAction.id); + uiActions.attachAction(CONTEXT_MENU_TRIGGER, openInExplorerAction.id); + + uiActions.registerTrigger(swimLaneSelectionTrigger); + + uiActions.addTriggerAction(SWIM_LANE_SELECTION_TRIGGER, applyInfluencerFiltersAction); + uiActions.addTriggerAction(SWIM_LANE_SELECTION_TRIGGER, applyTimeRangeSelectionAction); + uiActions.addTriggerAction(SWIM_LANE_SELECTION_TRIGGER, openInExplorerAction); } declare module '../../../../../src/plugins/ui_actions/public' { export interface ActionContextMapping { [EDIT_SWIMLANE_PANEL_ACTION]: EditSwimlanePanelContext; + [OPEN_IN_ANOMALY_EXPLORER_ACTION]: SwimLaneDrilldownContext; + [APPLY_INFLUENCER_FILTERS_ACTION]: SwimLaneDrilldownContext; + [APPLY_TIME_RANGE_SELECTION_ACTION]: SwimLaneDrilldownContext; + } + + export interface TriggerContextMapping { + [SWIM_LANE_SELECTION_TRIGGER]: SwimLaneDrilldownContext; } } diff --git a/x-pack/plugins/ml/public/ui_actions/open_in_anomaly_explorer_action.tsx b/x-pack/plugins/ml/public/ui_actions/open_in_anomaly_explorer_action.tsx new file mode 100644 index 00000000000000..211840467e38c9 --- /dev/null +++ b/x-pack/plugins/ml/public/ui_actions/open_in_anomaly_explorer_action.tsx @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { i18n } from '@kbn/i18n'; +import { ActionContextMapping, createAction } from '../../../../../src/plugins/ui_actions/public'; +import { + AnomalySwimlaneEmbeddable, + SwimLaneDrilldownContext, +} from '../embeddables/anomaly_swimlane/anomaly_swimlane_embeddable'; +import { MlCoreSetup } from '../plugin'; +import { ML_APP_URL_GENERATOR } from '../url_generator'; + +export const OPEN_IN_ANOMALY_EXPLORER_ACTION = 'openInAnomalyExplorerAction'; + +export function createOpenInExplorerAction(getStartServices: MlCoreSetup['getStartServices']) { + return createAction({ + id: 'open-in-anomaly-explorer', + type: OPEN_IN_ANOMALY_EXPLORER_ACTION, + getIconType(context: ActionContextMapping[typeof OPEN_IN_ANOMALY_EXPLORER_ACTION]): string { + return 'tableOfContents'; + }, + getDisplayName() { + return i18n.translate('xpack.ml.actions.openInAnomalyExplorerTitle', { + defaultMessage: 'Open in Anomaly Explorer', + }); + }, + async getHref({ embeddable, data }: SwimLaneDrilldownContext): Promise { + const [, pluginsStart] = await getStartServices(); + const urlGenerator = pluginsStart.share.urlGenerators.getUrlGenerator(ML_APP_URL_GENERATOR); + const { jobIds, timeRange, viewBy } = embeddable.getInput(); + const { perPage, fromPage } = embeddable.getOutput(); + + return urlGenerator.createUrl({ + page: 'explorer', + jobIds, + timeRange, + mlExplorerSwimlane: { + viewByFromPage: fromPage, + viewByPerPage: perPage, + viewByFieldName: viewBy, + ...(data + ? { + selectedType: data.type, + selectedTimes: data.times, + selectedLanes: data.lanes, + } + : {}), + }, + }); + }, + async execute({ embeddable, data }: SwimLaneDrilldownContext) { + if (!embeddable) { + throw new Error('Not possible to execute an action without the embeddable context'); + } + const [{ application }] = await getStartServices(); + const anomalyExplorerUrl = await this.getHref!({ embeddable, data }); + await application.navigateToUrl(anomalyExplorerUrl!); + }, + async isCompatible({ embeddable }: SwimLaneDrilldownContext) { + return embeddable instanceof AnomalySwimlaneEmbeddable; + }, + }); +} diff --git a/x-pack/plugins/ml/public/ui_actions/triggers.ts b/x-pack/plugins/ml/public/ui_actions/triggers.ts new file mode 100644 index 00000000000000..8a8b2602573a1f --- /dev/null +++ b/x-pack/plugins/ml/public/ui_actions/triggers.ts @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { Trigger } from '../../../../../src/plugins/ui_actions/public'; + +export const SWIM_LANE_SELECTION_TRIGGER = 'SWIM_LANE_SELECTION_TRIGGER'; + +export const swimLaneSelectionTrigger: Trigger<'SWIM_LANE_SELECTION_TRIGGER'> = { + id: SWIM_LANE_SELECTION_TRIGGER, + // This is empty string to hide title of ui_actions context menu that appears + // when this trigger is executed. + title: '', + description: 'Swim lane selection triggered', +}; diff --git a/x-pack/plugins/ml/public/url_generator.test.ts b/x-pack/plugins/ml/public/url_generator.test.ts new file mode 100644 index 00000000000000..45e2932b7781a1 --- /dev/null +++ b/x-pack/plugins/ml/public/url_generator.test.ts @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { MlUrlGenerator } from './url_generator'; + +describe('MlUrlGenerator', () => { + const urlGenerator = new MlUrlGenerator({ + appBasePath: '/app/ml', + useHash: false, + }); + + it('should generate valid URL for the Anomaly Explorer page', async () => { + const url = await urlGenerator.createUrl({ + page: 'explorer', + jobIds: ['test-job'], + mlExplorerSwimlane: { viewByFromPage: 2, viewByPerPage: 20 }, + }); + expect(url).toBe( + '/app/ml#/explorer?_g=(ml:(jobIds:!(test-job)))&_a=(mlExplorerFilter:(),mlExplorerSwimlane:(viewByFromPage:2,viewByPerPage:20))' + ); + }); + + it('should throw an error in case the page is not provided', async () => { + expect.assertions(1); + + // @ts-ignore + await urlGenerator.createUrl({ jobIds: ['test-job'] }).catch((e) => { + expect(e.message).toEqual('Page type is not provided or unknown'); + }); + }); +}); diff --git a/x-pack/plugins/ml/public/url_generator.ts b/x-pack/plugins/ml/public/url_generator.ts new file mode 100644 index 00000000000000..65d5077e081a3a --- /dev/null +++ b/x-pack/plugins/ml/public/url_generator.ts @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { UrlGeneratorsDefinition } from '../../../../src/plugins/share/public'; +import { TimeRange } from '../../../../src/plugins/data/public'; +import { setStateToKbnUrl } from '../../../../src/plugins/kibana_utils/public'; +import { JobId } from '../../reporting/common/types'; +import { ExplorerAppState } from './application/explorer/explorer_dashboard_service'; + +export const ML_APP_URL_GENERATOR = 'ML_APP_URL_GENERATOR'; + +export interface ExplorerUrlState { + /** + * ML App Page + */ + page: 'explorer'; + /** + * Job IDs + */ + jobIds: JobId[]; + /** + * Optionally set the time range in the time picker. + */ + timeRange?: TimeRange; + /** + * Optional state for the swim lane + */ + mlExplorerSwimlane?: ExplorerAppState['mlExplorerSwimlane']; + mlExplorerFilter?: ExplorerAppState['mlExplorerFilter']; +} + +/** + * Union type of ML URL state based on page + */ +export type MlUrlGeneratorState = ExplorerUrlState; + +export interface ExplorerQueryState { + ml: { jobIds: JobId[] }; + time?: TimeRange; +} + +interface Params { + appBasePath: string; + useHash: boolean; +} + +export class MlUrlGenerator implements UrlGeneratorsDefinition { + constructor(private readonly params: Params) {} + + public readonly id = ML_APP_URL_GENERATOR; + + public readonly createUrl = async ({ page, ...params }: MlUrlGeneratorState): Promise => { + if (page === 'explorer') { + return this.createExplorerUrl(params); + } + throw new Error('Page type is not provided or unknown'); + }; + + /** + * Creates URL to the Anomaly Explorer page + */ + private createExplorerUrl({ + timeRange, + jobIds, + mlExplorerSwimlane = {}, + mlExplorerFilter = {}, + }: Omit): string { + const appState: ExplorerAppState = { + mlExplorerSwimlane, + mlExplorerFilter, + }; + + const queryState: ExplorerQueryState = { + ml: { + jobIds, + }, + }; + + if (timeRange) queryState.time = timeRange; + + let url = `${this.params.appBasePath}#/explorer`; + url = setStateToKbnUrl('_g', queryState, { useHash: false }, url); + url = setStateToKbnUrl('_a', appState, { useHash: false }, url); + + return url; + } +} diff --git a/x-pack/plugins/ml/server/lib/capabilities/check_capabilities.test.ts b/x-pack/plugins/ml/server/lib/capabilities/check_capabilities.test.ts index 8e18b57ac92a8b..21d32813c0d511 100644 --- a/x-pack/plugins/ml/server/lib/capabilities/check_capabilities.test.ts +++ b/x-pack/plugins/ml/server/lib/capabilities/check_capabilities.test.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { getAdminCapabilities, getUserCapabilities } from './__mocks__/ml_capabilities'; import { capabilitiesProvider } from './check_capabilities'; import { MlLicense } from '../../../common/license'; @@ -23,18 +23,23 @@ const mlLicenseBasic = { const mlIsEnabled = async () => true; const mlIsNotEnabled = async () => false; -const callWithRequestNonUpgrade = ((async () => ({ - upgrade_mode: false, -})) as unknown) as LegacyAPICaller; -const callWithRequestUpgrade = ((async () => ({ - upgrade_mode: true, -})) as unknown) as LegacyAPICaller; +const mlClusterClientNonUpgrade = ({ + callAsInternalUser: async () => ({ + upgrade_mode: false, + }), +} as unknown) as ILegacyScopedClusterClient; + +const mlClusterClientUpgrade = ({ + callAsInternalUser: async () => ({ + upgrade_mode: true, + }), +} as unknown) as ILegacyScopedClusterClient; describe('check_capabilities', () => { describe('getCapabilities() - right number of capabilities', () => { test('kibana capabilities count', async (done) => { const { getCapabilities } = capabilitiesProvider( - callWithRequestNonUpgrade, + mlClusterClientNonUpgrade, getAdminCapabilities(), mlLicense, mlIsEnabled @@ -49,7 +54,7 @@ describe('check_capabilities', () => { describe('getCapabilities() with security', () => { test('ml_user capabilities only', async (done) => { const { getCapabilities } = capabilitiesProvider( - callWithRequestNonUpgrade, + mlClusterClientNonUpgrade, getUserCapabilities(), mlLicense, mlIsEnabled @@ -98,7 +103,7 @@ describe('check_capabilities', () => { test('full capabilities', async (done) => { const { getCapabilities } = capabilitiesProvider( - callWithRequestNonUpgrade, + mlClusterClientNonUpgrade, getAdminCapabilities(), mlLicense, mlIsEnabled @@ -147,7 +152,7 @@ describe('check_capabilities', () => { test('upgrade in progress with full capabilities', async (done) => { const { getCapabilities } = capabilitiesProvider( - callWithRequestUpgrade, + mlClusterClientUpgrade, getAdminCapabilities(), mlLicense, mlIsEnabled @@ -196,7 +201,7 @@ describe('check_capabilities', () => { test('upgrade in progress with partial capabilities', async (done) => { const { getCapabilities } = capabilitiesProvider( - callWithRequestUpgrade, + mlClusterClientUpgrade, getUserCapabilities(), mlLicense, mlIsEnabled @@ -245,7 +250,7 @@ describe('check_capabilities', () => { test('full capabilities, ml disabled in space', async (done) => { const { getCapabilities } = capabilitiesProvider( - callWithRequestNonUpgrade, + mlClusterClientNonUpgrade, getDefaultCapabilities(), mlLicense, mlIsNotEnabled @@ -295,7 +300,7 @@ describe('check_capabilities', () => { test('full capabilities, basic license, ml disabled in space', async (done) => { const { getCapabilities } = capabilitiesProvider( - callWithRequestNonUpgrade, + mlClusterClientNonUpgrade, getDefaultCapabilities(), mlLicenseBasic, mlIsNotEnabled diff --git a/x-pack/plugins/ml/server/lib/capabilities/check_capabilities.ts b/x-pack/plugins/ml/server/lib/capabilities/check_capabilities.ts index bdcdf50b983f5d..c976ab598b28c6 100644 --- a/x-pack/plugins/ml/server/lib/capabilities/check_capabilities.ts +++ b/x-pack/plugins/ml/server/lib/capabilities/check_capabilities.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller, KibanaRequest } from 'kibana/server'; +import { ILegacyScopedClusterClient, KibanaRequest } from 'kibana/server'; import { mlLog } from '../../client/log'; import { MlCapabilities, @@ -22,12 +22,12 @@ import { } from './errors'; export function capabilitiesProvider( - callAsCurrentUser: LegacyAPICaller, + mlClusterClient: ILegacyScopedClusterClient, capabilities: MlCapabilities, mlLicense: MlLicense, isMlEnabledInSpace: () => Promise ) { - const { isUpgradeInProgress } = upgradeCheckProvider(callAsCurrentUser); + const { isUpgradeInProgress } = upgradeCheckProvider(mlClusterClient); async function getCapabilities(): Promise { const upgradeInProgress = await isUpgradeInProgress(); const isPlatinumOrTrialLicense = mlLicense.isFullLicense(); diff --git a/x-pack/plugins/ml/server/lib/capabilities/upgrade.ts b/x-pack/plugins/ml/server/lib/capabilities/upgrade.ts index 45f3f3da20c24b..6df4d0c87ecf54 100644 --- a/x-pack/plugins/ml/server/lib/capabilities/upgrade.ts +++ b/x-pack/plugins/ml/server/lib/capabilities/upgrade.ts @@ -4,14 +4,14 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { mlLog } from '../../client/log'; -export function upgradeCheckProvider(callAsCurrentUser: LegacyAPICaller) { +export function upgradeCheckProvider({ callAsInternalUser }: ILegacyScopedClusterClient) { async function isUpgradeInProgress(): Promise { let upgradeInProgress = false; try { - const info = await callAsCurrentUser('ml.info'); + const info = await callAsInternalUser('ml.info'); // if ml indices are currently being migrated, upgrade_mode will be set to true // pass this back with the privileges to allow for the disabling of UI controls. upgradeInProgress = info.upgrade_mode === true; diff --git a/x-pack/plugins/ml/server/lib/check_annotations/index.ts b/x-pack/plugins/ml/server/lib/check_annotations/index.ts index 2c46be394cbb22..fb37917c512cbb 100644 --- a/x-pack/plugins/ml/server/lib/check_annotations/index.ts +++ b/x-pack/plugins/ml/server/lib/check_annotations/index.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { mlLog } from '../../client/log'; import { @@ -17,7 +17,9 @@ import { // - ML_ANNOTATIONS_INDEX_PATTERN index is present // - ML_ANNOTATIONS_INDEX_ALIAS_READ alias is present // - ML_ANNOTATIONS_INDEX_ALIAS_WRITE alias is present -export async function isAnnotationsFeatureAvailable(callAsCurrentUser: LegacyAPICaller) { +export async function isAnnotationsFeatureAvailable({ + callAsCurrentUser, +}: ILegacyScopedClusterClient) { try { const indexParams = { index: ML_ANNOTATIONS_INDEX_PATTERN }; diff --git a/x-pack/plugins/ml/server/lib/request_authorization.ts b/x-pack/plugins/ml/server/lib/request_authorization.ts new file mode 100644 index 00000000000000..01df0900b96f47 --- /dev/null +++ b/x-pack/plugins/ml/server/lib/request_authorization.ts @@ -0,0 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { KibanaRequest } from 'kibana/server'; + +export function getAuthorizationHeader(request: KibanaRequest) { + return { + headers: { 'es-secondary-authorization': request.headers.authorization }, + }; +} diff --git a/x-pack/plugins/ml/server/models/annotation_service/annotation.test.ts b/x-pack/plugins/ml/server/models/annotation_service/annotation.test.ts index 19db8b7b56aa6a..3bf9bd0232a5d5 100644 --- a/x-pack/plugins/ml/server/models/annotation_service/annotation.test.ts +++ b/x-pack/plugins/ml/server/models/annotation_service/annotation.test.ts @@ -6,7 +6,6 @@ import getAnnotationsRequestMock from './__mocks__/get_annotations_request.json'; import getAnnotationsResponseMock from './__mocks__/get_annotations_response.json'; -import { LegacyAPICaller } from 'kibana/server'; import { ANNOTATION_TYPE } from '../../../common/constants/annotations'; import { ML_ANNOTATIONS_INDEX_ALIAS_WRITE } from '../../../common/constants/index_patterns'; @@ -20,10 +19,10 @@ const acknowledgedResponseMock = { acknowledged: true }; const jobIdMock = 'jobIdMock'; describe('annotation_service', () => { - let callWithRequestSpy: any; + let mlClusterClientSpy = {} as any; beforeEach(() => { - callWithRequestSpy = (jest.fn((action: string) => { + const callAs = jest.fn((action: string) => { switch (action) { case 'delete': case 'index': @@ -31,13 +30,18 @@ describe('annotation_service', () => { case 'search': return Promise.resolve(getAnnotationsResponseMock); } - }) as unknown) as LegacyAPICaller; + }); + + mlClusterClientSpy = { + callAsCurrentUser: callAs, + callAsInternalUser: callAs, + }; }); describe('deleteAnnotation()', () => { it('should delete annotation', async (done) => { - const { deleteAnnotation } = annotationServiceProvider(callWithRequestSpy); - const mockFunct = callWithRequestSpy; + const { deleteAnnotation } = annotationServiceProvider(mlClusterClientSpy); + const mockFunct = mlClusterClientSpy; const annotationMockId = 'mockId'; const deleteParamsMock: DeleteParams = { @@ -48,8 +52,8 @@ describe('annotation_service', () => { const response = await deleteAnnotation(annotationMockId); - expect(mockFunct.mock.calls[0][0]).toBe('delete'); - expect(mockFunct.mock.calls[0][1]).toEqual(deleteParamsMock); + expect(mockFunct.callAsCurrentUser.mock.calls[0][0]).toBe('delete'); + expect(mockFunct.callAsCurrentUser.mock.calls[0][1]).toEqual(deleteParamsMock); expect(response).toBe(acknowledgedResponseMock); done(); }); @@ -57,8 +61,8 @@ describe('annotation_service', () => { describe('getAnnotation()', () => { it('should get annotations for specific job', async (done) => { - const { getAnnotations } = annotationServiceProvider(callWithRequestSpy); - const mockFunct = callWithRequestSpy; + const { getAnnotations } = annotationServiceProvider(mlClusterClientSpy); + const mockFunct = mlClusterClientSpy; const indexAnnotationArgsMock: IndexAnnotationArgs = { jobIds: [jobIdMock], @@ -69,8 +73,8 @@ describe('annotation_service', () => { const response: GetResponse = await getAnnotations(indexAnnotationArgsMock); - expect(mockFunct.mock.calls[0][0]).toBe('search'); - expect(mockFunct.mock.calls[0][1]).toEqual(getAnnotationsRequestMock); + expect(mockFunct.callAsCurrentUser.mock.calls[0][0]).toBe('search'); + expect(mockFunct.callAsCurrentUser.mock.calls[0][1]).toEqual(getAnnotationsRequestMock); expect(Object.keys(response.annotations)).toHaveLength(1); expect(response.annotations[jobIdMock]).toHaveLength(2); expect(isAnnotations(response.annotations[jobIdMock])).toBeTruthy(); @@ -84,11 +88,13 @@ describe('annotation_service', () => { message: 'mock error message', }; - const callWithRequestSpyError = (jest.fn(() => { - return Promise.resolve(mockEsError); - }) as unknown) as LegacyAPICaller; + const mlClusterClientSpyError: any = { + callAsCurrentUser: jest.fn(() => { + return Promise.resolve(mockEsError); + }), + }; - const { getAnnotations } = annotationServiceProvider(callWithRequestSpyError); + const { getAnnotations } = annotationServiceProvider(mlClusterClientSpyError); const indexAnnotationArgsMock: IndexAnnotationArgs = { jobIds: [jobIdMock], @@ -105,8 +111,8 @@ describe('annotation_service', () => { describe('indexAnnotation()', () => { it('should index annotation', async (done) => { - const { indexAnnotation } = annotationServiceProvider(callWithRequestSpy); - const mockFunct = callWithRequestSpy; + const { indexAnnotation } = annotationServiceProvider(mlClusterClientSpy); + const mockFunct = mlClusterClientSpy; const annotationMock: Annotation = { annotation: 'Annotation text', @@ -118,10 +124,10 @@ describe('annotation_service', () => { const response = await indexAnnotation(annotationMock, usernameMock); - expect(mockFunct.mock.calls[0][0]).toBe('index'); + expect(mockFunct.callAsCurrentUser.mock.calls[0][0]).toBe('index'); // test if the annotation has been correctly augmented - const indexParamsCheck = mockFunct.mock.calls[0][1]; + const indexParamsCheck = mockFunct.callAsCurrentUser.mock.calls[0][1]; const annotation = indexParamsCheck.body; expect(annotation.create_username).toBe(usernameMock); expect(annotation.modified_username).toBe(usernameMock); @@ -133,8 +139,8 @@ describe('annotation_service', () => { }); it('should remove ._id and .key before updating annotation', async (done) => { - const { indexAnnotation } = annotationServiceProvider(callWithRequestSpy); - const mockFunct = callWithRequestSpy; + const { indexAnnotation } = annotationServiceProvider(mlClusterClientSpy); + const mockFunct = mlClusterClientSpy; const annotationMock: Annotation = { _id: 'mockId', @@ -148,10 +154,10 @@ describe('annotation_service', () => { const response = await indexAnnotation(annotationMock, usernameMock); - expect(mockFunct.mock.calls[0][0]).toBe('index'); + expect(mockFunct.callAsCurrentUser.mock.calls[0][0]).toBe('index'); // test if the annotation has been correctly augmented - const indexParamsCheck = mockFunct.mock.calls[0][1]; + const indexParamsCheck = mockFunct.callAsCurrentUser.mock.calls[0][1]; const annotation = indexParamsCheck.body; expect(annotation.create_username).toBe(usernameMock); expect(annotation.modified_username).toBe(usernameMock); @@ -165,8 +171,8 @@ describe('annotation_service', () => { }); it('should update annotation text and the username for modified_username', async (done) => { - const { getAnnotations, indexAnnotation } = annotationServiceProvider(callWithRequestSpy); - const mockFunct = callWithRequestSpy; + const { getAnnotations, indexAnnotation } = annotationServiceProvider(mlClusterClientSpy); + const mockFunct = mlClusterClientSpy; const indexAnnotationArgsMock: IndexAnnotationArgs = { jobIds: [jobIdMock], @@ -190,9 +196,9 @@ describe('annotation_service', () => { await indexAnnotation(annotation, modifiedUsernameMock); - expect(mockFunct.mock.calls[1][0]).toBe('index'); + expect(mockFunct.callAsCurrentUser.mock.calls[1][0]).toBe('index'); // test if the annotation has been correctly updated - const indexParamsCheck = mockFunct.mock.calls[1][1]; + const indexParamsCheck = mockFunct.callAsCurrentUser.mock.calls[1][1]; const modifiedAnnotation = indexParamsCheck.body; expect(modifiedAnnotation.annotation).toBe(modifiedAnnotationText); expect(modifiedAnnotation.create_username).toBe(originalUsernameMock); diff --git a/x-pack/plugins/ml/server/models/annotation_service/annotation.ts b/x-pack/plugins/ml/server/models/annotation_service/annotation.ts index 2808b06103a757..c2582107062bb0 100644 --- a/x-pack/plugins/ml/server/models/annotation_service/annotation.ts +++ b/x-pack/plugins/ml/server/models/annotation_service/annotation.ts @@ -6,7 +6,7 @@ import Boom from 'boom'; import _ from 'lodash'; -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { ANNOTATION_TYPE } from '../../../common/constants/annotations'; import { @@ -61,14 +61,7 @@ export interface DeleteParams { id: string; } -type annotationProviderParams = DeleteParams | GetParams | IndexParams; - -export type callWithRequestType = ( - action: string, - params: annotationProviderParams -) => Promise; - -export function annotationProvider(callAsCurrentUser: LegacyAPICaller) { +export function annotationProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) { async function indexAnnotation(annotation: Annotation, username: string) { if (isAnnotation(annotation) === false) { // No need to translate, this will not be exposed in the UI. diff --git a/x-pack/plugins/ml/server/models/annotation_service/index.ts b/x-pack/plugins/ml/server/models/annotation_service/index.ts index efc42c693c24b6..e17af2a154b876 100644 --- a/x-pack/plugins/ml/server/models/annotation_service/index.ts +++ b/x-pack/plugins/ml/server/models/annotation_service/index.ts @@ -4,11 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { annotationProvider } from './annotation'; -export function annotationServiceProvider(callAsCurrentUser: LegacyAPICaller) { +export function annotationServiceProvider(mlClusterClient: ILegacyScopedClusterClient) { return { - ...annotationProvider(callAsCurrentUser), + ...annotationProvider(mlClusterClient), }; } diff --git a/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.d.ts b/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.d.ts index 3e80e79705a5c2..eeabb24d9be3b7 100644 --- a/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.d.ts +++ b/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.d.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { ES_AGGREGATION } from '../../../common/constants/aggregation_types'; export interface BucketSpanEstimatorData { @@ -20,8 +20,7 @@ export interface BucketSpanEstimatorData { timeField: string | undefined; } -export function estimateBucketSpanFactory( - callAsCurrentUser: LegacyAPICaller, - callAsInternalUser: LegacyAPICaller, - isSecurityDisabled: boolean -): (config: BucketSpanEstimatorData) => Promise; +export function estimateBucketSpanFactory({ + callAsCurrentUser, + callAsInternalUser, +}: ILegacyScopedClusterClient): (config: BucketSpanEstimatorData) => Promise; diff --git a/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.js b/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.js index 2e03a9532c831c..37585477794039 100644 --- a/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.js +++ b/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.js @@ -12,13 +12,10 @@ import { INTERVALS } from './intervals'; import { singleSeriesCheckerFactory } from './single_series_checker'; import { polledDataCheckerFactory } from './polled_data_checker'; -export function estimateBucketSpanFactory( - callAsCurrentUser, - callAsInternalUser, - isSecurityDisabled -) { - const PolledDataChecker = polledDataCheckerFactory(callAsCurrentUser); - const SingleSeriesChecker = singleSeriesCheckerFactory(callAsCurrentUser); +export function estimateBucketSpanFactory(mlClusterClient) { + const { callAsCurrentUser, callAsInternalUser } = mlClusterClient; + const PolledDataChecker = polledDataCheckerFactory(mlClusterClient); + const SingleSeriesChecker = singleSeriesCheckerFactory(mlClusterClient); class BucketSpanEstimator { constructor( @@ -334,99 +331,65 @@ export function estimateBucketSpanFactory( } return new Promise((resolve, reject) => { - function getBucketSpanEstimation() { - // fetch the `search.max_buckets` cluster setting so we're able to - // adjust aggregations to not exceed that limit. - callAsInternalUser('cluster.getSettings', { - flatSettings: true, - includeDefaults: true, - filterPath: '*.*max_buckets', - }) - .then((settings) => { - if (typeof settings !== 'object') { - reject('Unable to retrieve cluster settings'); - } - - // search.max_buckets could exist in default, persistent or transient cluster settings - const maxBucketsSetting = (settings.defaults || - settings.persistent || - settings.transient || - {})['search.max_buckets']; - - if (maxBucketsSetting === undefined) { - reject('Unable to retrieve cluster setting search.max_buckets'); - } - - const maxBuckets = parseInt(maxBucketsSetting); + // fetch the `search.max_buckets` cluster setting so we're able to + // adjust aggregations to not exceed that limit. + callAsInternalUser('cluster.getSettings', { + flatSettings: true, + includeDefaults: true, + filterPath: '*.*max_buckets', + }) + .then((settings) => { + if (typeof settings !== 'object') { + reject('Unable to retrieve cluster settings'); + } - const runEstimator = (splitFieldValues = []) => { - const bucketSpanEstimator = new BucketSpanEstimator( - formConfig, - splitFieldValues, - maxBuckets - ); + // search.max_buckets could exist in default, persistent or transient cluster settings + const maxBucketsSetting = (settings.defaults || + settings.persistent || + settings.transient || + {})['search.max_buckets']; - bucketSpanEstimator - .run() - .then((resp) => { - resolve(resp); - }) - .catch((resp) => { - reject(resp); - }); - }; - - // a partition has been selected, so we need to load some field values to use in the - // bucket span tests. - if (formConfig.splitField !== undefined) { - getRandomFieldValues(formConfig.index, formConfig.splitField, formConfig.query) - .then((splitFieldValues) => { - runEstimator(splitFieldValues); - }) - .catch((resp) => { - reject(resp); - }); - } else { - // no partition field selected or we're in the single metric config - runEstimator(); - } - }) - .catch((resp) => { - reject(resp); - }); - } + if (maxBucketsSetting === undefined) { + reject('Unable to retrieve cluster setting search.max_buckets'); + } - if (isSecurityDisabled) { - getBucketSpanEstimation(); - } else { - // if security is enabled, check that the user has permission to - // view jobs before calling getBucketSpanEstimation. - // getBucketSpanEstimation calls the 'cluster.getSettings' endpoint as the internal user - // and so could give the user access to more information than - // they are entitled to. - const body = { - cluster: [ - 'cluster:monitor/xpack/ml/job/get', - 'cluster:monitor/xpack/ml/job/stats/get', - 'cluster:monitor/xpack/ml/datafeeds/get', - 'cluster:monitor/xpack/ml/datafeeds/stats/get', - ], - }; - callAsCurrentUser('ml.privilegeCheck', { body }) - .then((resp) => { - if ( - resp.cluster['cluster:monitor/xpack/ml/job/get'] && - resp.cluster['cluster:monitor/xpack/ml/job/stats/get'] && - resp.cluster['cluster:monitor/xpack/ml/datafeeds/get'] && - resp.cluster['cluster:monitor/xpack/ml/datafeeds/stats/get'] - ) { - getBucketSpanEstimation(); - } else { - reject('Insufficient permissions to call bucket span estimation.'); - } - }) - .catch(reject); - } + const maxBuckets = parseInt(maxBucketsSetting); + + const runEstimator = (splitFieldValues = []) => { + const bucketSpanEstimator = new BucketSpanEstimator( + formConfig, + splitFieldValues, + maxBuckets + ); + + bucketSpanEstimator + .run() + .then((resp) => { + resolve(resp); + }) + .catch((resp) => { + reject(resp); + }); + }; + + // a partition has been selected, so we need to load some field values to use in the + // bucket span tests. + if (formConfig.splitField !== undefined) { + getRandomFieldValues(formConfig.index, formConfig.splitField, formConfig.query) + .then((splitFieldValues) => { + runEstimator(splitFieldValues); + }) + .catch((resp) => { + reject(resp); + }); + } else { + // no partition field selected or we're in the single metric config + runEstimator(); + } + }) + .catch((resp) => { + reject(resp); + }); }); }; } diff --git a/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.test.ts b/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.test.ts index 8da1fb69eec34f..f7c7dd8172ea5a 100644 --- a/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.test.ts +++ b/x-pack/plugins/ml/server/models/bucket_span_estimator/bucket_span_estimator.test.ts @@ -4,40 +4,21 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { ES_AGGREGATION } from '../../../common/constants/aggregation_types'; import { estimateBucketSpanFactory, BucketSpanEstimatorData } from './bucket_span_estimator'; -// Mock callWithRequest with the ability to simulate returning different -// permission settings. On each call using `ml.privilegeCheck` we retrieve -// the last value from `permissions` and pass that to one of the permission -// settings. The tests call `ml.privilegeCheck` two times, the first time -// sufficient permissions should be returned, the second time insufficient -// permissions. -const permissions = [false, true]; -const callWithRequest: LegacyAPICaller = (method: string) => { +const callAs = () => { return new Promise((resolve) => { - if (method === 'ml.privilegeCheck') { - resolve({ - cluster: { - 'cluster:monitor/xpack/ml/job/get': true, - 'cluster:monitor/xpack/ml/job/stats/get': true, - 'cluster:monitor/xpack/ml/datafeeds/get': true, - 'cluster:monitor/xpack/ml/datafeeds/stats/get': permissions.pop(), - }, - }); - return; - } resolve({}); }) as Promise; }; -const callWithInternalUser: LegacyAPICaller = () => { - return new Promise((resolve) => { - resolve({}); - }) as Promise; +const mlClusterClient: ILegacyScopedClusterClient = { + callAsCurrentUser: callAs, + callAsInternalUser: callAs, }; // mock configuration to be passed to the estimator @@ -59,17 +40,13 @@ const formConfig: BucketSpanEstimatorData = { describe('ML - BucketSpanEstimator', () => { it('call factory', () => { expect(function () { - estimateBucketSpanFactory(callWithRequest, callWithInternalUser, false); + estimateBucketSpanFactory(mlClusterClient); }).not.toThrow('Not initialized.'); }); it('call factory and estimator with security disabled', (done) => { expect(function () { - const estimateBucketSpan = estimateBucketSpanFactory( - callWithRequest, - callWithInternalUser, - true - ); + const estimateBucketSpan = estimateBucketSpanFactory(mlClusterClient); estimateBucketSpan(formConfig).catch((catchData) => { expect(catchData).toBe('Unable to retrieve cluster setting search.max_buckets'); @@ -81,11 +58,7 @@ describe('ML - BucketSpanEstimator', () => { it('call factory and estimator with security enabled.', (done) => { expect(function () { - const estimateBucketSpan = estimateBucketSpanFactory( - callWithRequest, - callWithInternalUser, - false - ); + const estimateBucketSpan = estimateBucketSpanFactory(mlClusterClient); estimateBucketSpan(formConfig).catch((catchData) => { expect(catchData).toBe('Unable to retrieve cluster setting search.max_buckets'); diff --git a/x-pack/plugins/ml/server/models/bucket_span_estimator/polled_data_checker.js b/x-pack/plugins/ml/server/models/bucket_span_estimator/polled_data_checker.js index de9fd06c34e6a6..347843e276c368 100644 --- a/x-pack/plugins/ml/server/models/bucket_span_estimator/polled_data_checker.js +++ b/x-pack/plugins/ml/server/models/bucket_span_estimator/polled_data_checker.js @@ -12,7 +12,7 @@ import _ from 'lodash'; -export function polledDataCheckerFactory(callAsCurrentUser) { +export function polledDataCheckerFactory({ callAsCurrentUser }) { class PolledDataChecker { constructor(index, timeField, duration, query) { this.index = index; diff --git a/x-pack/plugins/ml/server/models/bucket_span_estimator/single_series_checker.js b/x-pack/plugins/ml/server/models/bucket_span_estimator/single_series_checker.js index 6ae485fe11307e..a5449395501dcb 100644 --- a/x-pack/plugins/ml/server/models/bucket_span_estimator/single_series_checker.js +++ b/x-pack/plugins/ml/server/models/bucket_span_estimator/single_series_checker.js @@ -13,7 +13,7 @@ import { mlLog } from '../../client/log'; import { INTERVALS, LONG_INTERVALS } from './intervals'; -export function singleSeriesCheckerFactory(callAsCurrentUser) { +export function singleSeriesCheckerFactory({ callAsCurrentUser }) { const REF_DATA_INTERVAL = { name: '1h', ms: 3600000 }; class SingleSeriesChecker { diff --git a/x-pack/plugins/ml/server/models/calculate_model_memory_limit/calculate_model_memory_limit.ts b/x-pack/plugins/ml/server/models/calculate_model_memory_limit/calculate_model_memory_limit.ts index 61299aa3ae26df..bc3c326e7d0705 100644 --- a/x-pack/plugins/ml/server/models/calculate_model_memory_limit/calculate_model_memory_limit.ts +++ b/x-pack/plugins/ml/server/models/calculate_model_memory_limit/calculate_model_memory_limit.ts @@ -5,7 +5,7 @@ */ import numeral from '@elastic/numeral'; -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { MLCATEGORY } from '../../../common/constants/field_types'; import { AnalysisConfig } from '../../../common/types/anomaly_detection_jobs'; import { fieldsServiceProvider } from '../fields_service'; @@ -36,8 +36,8 @@ export interface ModelMemoryEstimate { /** * Retrieves overall and max bucket cardinalities. */ -const cardinalityCheckProvider = (callAsCurrentUser: LegacyAPICaller) => { - const fieldsService = fieldsServiceProvider(callAsCurrentUser); +const cardinalityCheckProvider = (mlClusterClient: ILegacyScopedClusterClient) => { + const fieldsService = fieldsServiceProvider(mlClusterClient); return async ( analysisConfig: AnalysisConfig, @@ -123,8 +123,9 @@ const cardinalityCheckProvider = (callAsCurrentUser: LegacyAPICaller) => { }; }; -export function calculateModelMemoryLimitProvider(callAsCurrentUser: LegacyAPICaller) { - const getCardinalities = cardinalityCheckProvider(callAsCurrentUser); +export function calculateModelMemoryLimitProvider(mlClusterClient: ILegacyScopedClusterClient) { + const { callAsInternalUser } = mlClusterClient; + const getCardinalities = cardinalityCheckProvider(mlClusterClient); /** * Retrieves an estimated size of the model memory limit used in the job config @@ -140,7 +141,7 @@ export function calculateModelMemoryLimitProvider(callAsCurrentUser: LegacyAPICa latestMs: number, allowMMLGreaterThanMax = false ): Promise { - const info = await callAsCurrentUser('ml.info'); + const info = (await callAsInternalUser('ml.info')) as MlInfoResponse; const maxModelMemoryLimit = info.limits.max_model_memory_limit?.toUpperCase(); const effectiveMaxModelMemoryLimit = info.limits.effective_max_model_memory_limit?.toUpperCase(); @@ -153,28 +154,26 @@ export function calculateModelMemoryLimitProvider(callAsCurrentUser: LegacyAPICa latestMs ); - const estimatedModelMemoryLimit = ( - await callAsCurrentUser('ml.estimateModelMemory', { - body: { - analysis_config: analysisConfig, - overall_cardinality: overallCardinality, - max_bucket_cardinality: maxBucketCardinality, - }, - }) - ).model_memory_estimate.toUpperCase(); + const estimatedModelMemoryLimit = ((await callAsInternalUser('ml.estimateModelMemory', { + body: { + analysis_config: analysisConfig, + overall_cardinality: overallCardinality, + max_bucket_cardinality: maxBucketCardinality, + }, + })) as ModelMemoryEstimate).model_memory_estimate.toUpperCase(); let modelMemoryLimit = estimatedModelMemoryLimit; let mmlCappedAtMax = false; // if max_model_memory_limit has been set, // make sure the estimated value is not greater than it. if (allowMMLGreaterThanMax === false) { - // @ts-ignore + // @ts-expect-error const mmlBytes = numeral(estimatedModelMemoryLimit).value(); if (maxModelMemoryLimit !== undefined) { - // @ts-ignore + // @ts-expect-error const maxBytes = numeral(maxModelMemoryLimit).value(); if (mmlBytes > maxBytes) { - // @ts-ignore + // @ts-expect-error modelMemoryLimit = `${Math.floor(maxBytes / numeral('1MB').value())}MB`; mmlCappedAtMax = true; } @@ -183,10 +182,10 @@ export function calculateModelMemoryLimitProvider(callAsCurrentUser: LegacyAPICa // if we've not already capped the estimated mml at the hard max server setting // ensure that the estimated mml isn't greater than the effective max mml if (mmlCappedAtMax === false && effectiveMaxModelMemoryLimit !== undefined) { - // @ts-ignore + // @ts-expect-error const effectiveMaxMmlBytes = numeral(effectiveMaxModelMemoryLimit).value(); if (mmlBytes > effectiveMaxMmlBytes) { - // @ts-ignore + // @ts-expect-error modelMemoryLimit = `${Math.floor(effectiveMaxMmlBytes / numeral('1MB').value())}MB`; } } diff --git a/x-pack/plugins/ml/server/models/calendar/calendar_manager.ts b/x-pack/plugins/ml/server/models/calendar/calendar_manager.ts index 5df9c037b3f837..43f4dc3cba7e21 100644 --- a/x-pack/plugins/ml/server/models/calendar/calendar_manager.ts +++ b/x-pack/plugins/ml/server/models/calendar/calendar_manager.ts @@ -5,7 +5,7 @@ */ import { difference } from 'lodash'; -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { EventManager, CalendarEvent } from './event_manager'; interface BasicCalendar { @@ -23,16 +23,16 @@ export interface FormCalendar extends BasicCalendar { } export class CalendarManager { - private _callAsCurrentUser: LegacyAPICaller; + private _callAsInternalUser: ILegacyScopedClusterClient['callAsInternalUser']; private _eventManager: EventManager; - constructor(callAsCurrentUser: LegacyAPICaller) { - this._callAsCurrentUser = callAsCurrentUser; - this._eventManager = new EventManager(callAsCurrentUser); + constructor(mlClusterClient: ILegacyScopedClusterClient) { + this._callAsInternalUser = mlClusterClient.callAsInternalUser; + this._eventManager = new EventManager(mlClusterClient); } async getCalendar(calendarId: string) { - const resp = await this._callAsCurrentUser('ml.calendars', { + const resp = await this._callAsInternalUser('ml.calendars', { calendarId, }); @@ -43,7 +43,7 @@ export class CalendarManager { } async getAllCalendars() { - const calendarsResp = await this._callAsCurrentUser('ml.calendars'); + const calendarsResp = await this._callAsInternalUser('ml.calendars'); const events: CalendarEvent[] = await this._eventManager.getAllEvents(); const calendars: Calendar[] = calendarsResp.calendars; @@ -74,7 +74,7 @@ export class CalendarManager { const events = calendar.events; delete calendar.calendarId; delete calendar.events; - await this._callAsCurrentUser('ml.addCalendar', { + await this._callAsInternalUser('ml.addCalendar', { calendarId, body: calendar, }); @@ -109,7 +109,7 @@ export class CalendarManager { // add all new jobs if (jobsToAdd.length) { - await this._callAsCurrentUser('ml.addJobToCalendar', { + await this._callAsInternalUser('ml.addJobToCalendar', { calendarId, jobId: jobsToAdd.join(','), }); @@ -117,7 +117,7 @@ export class CalendarManager { // remove all removed jobs if (jobsToRemove.length) { - await this._callAsCurrentUser('ml.removeJobFromCalendar', { + await this._callAsInternalUser('ml.removeJobFromCalendar', { calendarId, jobId: jobsToRemove.join(','), }); @@ -140,6 +140,6 @@ export class CalendarManager { } async deleteCalendar(calendarId: string) { - return this._callAsCurrentUser('ml.deleteCalendar', { calendarId }); + return this._callAsInternalUser('ml.deleteCalendar', { calendarId }); } } diff --git a/x-pack/plugins/ml/server/models/calendar/event_manager.ts b/x-pack/plugins/ml/server/models/calendar/event_manager.ts index 57034ab772710f..b670bbe187544c 100644 --- a/x-pack/plugins/ml/server/models/calendar/event_manager.ts +++ b/x-pack/plugins/ml/server/models/calendar/event_manager.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { GLOBAL_CALENDAR } from '../../../common/constants/calendars'; export interface CalendarEvent { @@ -16,10 +16,13 @@ export interface CalendarEvent { } export class EventManager { - constructor(private _callAsCurrentUser: LegacyAPICaller) {} + private _callAsInternalUser: ILegacyScopedClusterClient['callAsInternalUser']; + constructor({ callAsInternalUser }: ILegacyScopedClusterClient) { + this._callAsInternalUser = callAsInternalUser; + } async getCalendarEvents(calendarId: string) { - const resp = await this._callAsCurrentUser('ml.events', { calendarId }); + const resp = await this._callAsInternalUser('ml.events', { calendarId }); return resp.events; } @@ -27,7 +30,7 @@ export class EventManager { // jobId is optional async getAllEvents(jobId?: string) { const calendarId = GLOBAL_CALENDAR; - const resp = await this._callAsCurrentUser('ml.events', { + const resp = await this._callAsInternalUser('ml.events', { calendarId, jobId, }); @@ -38,14 +41,14 @@ export class EventManager { async addEvents(calendarId: string, events: CalendarEvent[]) { const body = { events }; - return await this._callAsCurrentUser('ml.addEvent', { + return await this._callAsInternalUser('ml.addEvent', { calendarId, body, }); } async deleteEvent(calendarId: string, eventId: string) { - return this._callAsCurrentUser('ml.deleteEvent', { calendarId, eventId }); + return this._callAsInternalUser('ml.deleteEvent', { calendarId, eventId }); } isEqual(ev1: CalendarEvent, ev2: CalendarEvent) { diff --git a/x-pack/plugins/ml/server/models/data_frame_analytics/analytics_audit_messages.ts b/x-pack/plugins/ml/server/models/data_frame_analytics/analytics_audit_messages.ts index abe389165182f0..c8471b54622055 100644 --- a/x-pack/plugins/ml/server/models/data_frame_analytics/analytics_audit_messages.ts +++ b/x-pack/plugins/ml/server/models/data_frame_analytics/analytics_audit_messages.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { callWithRequestType } from '../../../common/types/kibana'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { ML_NOTIFICATION_INDEX_PATTERN } from '../../../common/constants/index_patterns'; import { JobMessage } from '../../../common/types/audit_message'; @@ -23,7 +23,7 @@ interface BoolQuery { bool: { [key: string]: any }; } -export function analyticsAuditMessagesProvider(callWithRequest: callWithRequestType) { +export function analyticsAuditMessagesProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) { // search for audit messages, // analyticsId is optional. without it, all analytics will be listed. async function getAnalyticsAuditMessages(analyticsId: string) { @@ -69,7 +69,7 @@ export function analyticsAuditMessagesProvider(callWithRequest: callWithRequestT } try { - const resp = await callWithRequest('search', { + const resp = await callAsCurrentUser('search', { index: ML_NOTIFICATION_INDEX_PATTERN, ignore_unavailable: true, rest_total_hits_as_int: true, diff --git a/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.test.ts b/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.test.ts index ee8598ad338e32..82d7707464308a 100644 --- a/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.test.ts +++ b/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.test.ts @@ -4,17 +4,18 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller, SavedObjectsClientContract } from 'kibana/server'; +import { SavedObjectsClientContract, KibanaRequest } from 'kibana/server'; import { Module } from '../../../common/types/modules'; import { DataRecognizer } from '../data_recognizer'; describe('ML - data recognizer', () => { const dr = new DataRecognizer( - jest.fn() as LegacyAPICaller, + { callAsCurrentUser: jest.fn(), callAsInternalUser: jest.fn() }, ({ find: jest.fn(), bulkCreate: jest.fn(), - } as never) as SavedObjectsClientContract + } as unknown) as SavedObjectsClientContract, + { headers: { authorization: '' } } as KibanaRequest ); describe('jobOverrides', () => { diff --git a/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.ts b/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.ts index ae9a56f00a5c16..521d04159ca7a6 100644 --- a/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.ts +++ b/x-pack/plugins/ml/server/models/data_recognizer/data_recognizer.ts @@ -7,11 +7,16 @@ import fs from 'fs'; import Boom from 'boom'; import numeral from '@elastic/numeral'; -import { LegacyAPICaller, SavedObjectsClientContract } from 'kibana/server'; +import { + KibanaRequest, + ILegacyScopedClusterClient, + SavedObjectsClientContract, +} from 'kibana/server'; import moment from 'moment'; import { IndexPatternAttributes } from 'src/plugins/data/server'; import { merge } from 'lodash'; import { AnalysisLimits, CombinedJobWithStats } from '../../../common/types/anomaly_detection_jobs'; +import { getAuthorizationHeader } from '../../lib/request_authorization'; import { MlInfoResponse } from '../../../common/types/ml_server_info'; import { KibanaObjects, @@ -104,18 +109,28 @@ interface SaveResults { } export class DataRecognizer { - modulesDir = `${__dirname}/modules`; - indexPatternName: string = ''; - indexPatternId: string | undefined = undefined; + private _callAsCurrentUser: ILegacyScopedClusterClient['callAsCurrentUser']; + private _callAsInternalUser: ILegacyScopedClusterClient['callAsInternalUser']; + private _mlClusterClient: ILegacyScopedClusterClient; + private _authorizationHeader: object; + private _modulesDir = `${__dirname}/modules`; + private _indexPatternName: string = ''; + private _indexPatternId: string | undefined = undefined; /** * List of the module jobs that require model memory estimation */ jobsForModelMemoryEstimation: Array<{ job: ModuleJob; query: any }> = []; constructor( - private callAsCurrentUser: LegacyAPICaller, - private savedObjectsClient: SavedObjectsClientContract - ) {} + mlClusterClient: ILegacyScopedClusterClient, + private savedObjectsClient: SavedObjectsClientContract, + request: KibanaRequest + ) { + this._mlClusterClient = mlClusterClient; + this._callAsCurrentUser = mlClusterClient.callAsCurrentUser; + this._callAsInternalUser = mlClusterClient.callAsInternalUser; + this._authorizationHeader = getAuthorizationHeader(request); + } // list all directories under the given directory async listDirs(dirName: string): Promise { @@ -150,12 +165,12 @@ export class DataRecognizer { async loadManifestFiles(): Promise { const configs: Config[] = []; - const dirs = await this.listDirs(this.modulesDir); + const dirs = await this.listDirs(this._modulesDir); await Promise.all( dirs.map(async (dir) => { let file: string | undefined; try { - file = await this.readFile(`${this.modulesDir}/${dir}/manifest.json`); + file = await this.readFile(`${this._modulesDir}/${dir}/manifest.json`); } catch (error) { mlLog.warn(`Data recognizer skipping folder ${dir} as manifest.json cannot be read`); } @@ -204,7 +219,7 @@ export class DataRecognizer { if (moduleConfig.logoFile) { try { logo = await this.readFile( - `${this.modulesDir}/${i.dirName}/${moduleConfig.logoFile}` + `${this._modulesDir}/${i.dirName}/${moduleConfig.logoFile}` ); logo = JSON.parse(logo); } catch (e) { @@ -236,7 +251,7 @@ export class DataRecognizer { query: moduleConfig.query, }; - const resp = await this.callAsCurrentUser('search', { + const resp = await this._callAsCurrentUser('search', { index, rest_total_hits_as_int: true, size, @@ -281,7 +296,7 @@ export class DataRecognizer { manifestJSON.jobs.map(async (job) => { try { const jobConfig = await this.readFile( - `${this.modulesDir}/${dirName}/${ML_DIR}/${job.file}` + `${this._modulesDir}/${dirName}/${ML_DIR}/${job.file}` ); // use the file name for the id jobs.push({ @@ -301,7 +316,7 @@ export class DataRecognizer { manifestJSON.datafeeds.map(async (datafeed) => { try { const datafeedConfig = await this.readFile( - `${this.modulesDir}/${dirName}/${ML_DIR}/${datafeed.file}` + `${this._modulesDir}/${dirName}/${ML_DIR}/${datafeed.file}` ); const config = JSON.parse(datafeedConfig); // use the job id from the manifestFile @@ -329,7 +344,7 @@ export class DataRecognizer { manifestJSON!.kibana[key].map(async (obj) => { try { const kConfig = await this.readFile( - `${this.modulesDir}/${dirName}/${KIBANA_DIR}/${key}/${obj.file}` + `${this._modulesDir}/${dirName}/${KIBANA_DIR}/${key}/${obj.file}` ); // use the file name for the id const kId = obj.file.replace('.json', ''); @@ -385,26 +400,26 @@ export class DataRecognizer { ); } - this.indexPatternName = + this._indexPatternName = indexPatternName === undefined ? moduleConfig.defaultIndexPattern : indexPatternName; - this.indexPatternId = await this.getIndexPatternId(this.indexPatternName); + this._indexPatternId = await this.getIndexPatternId(this._indexPatternName); // the module's jobs contain custom URLs which require an index patten id // but there is no corresponding index pattern, throw an error - if (this.indexPatternId === undefined && this.doJobUrlsContainIndexPatternId(moduleConfig)) { + if (this._indexPatternId === undefined && this.doJobUrlsContainIndexPatternId(moduleConfig)) { throw Boom.badRequest( - `Module's jobs contain custom URLs which require a kibana index pattern (${this.indexPatternName}) which cannot be found.` + `Module's jobs contain custom URLs which require a kibana index pattern (${this._indexPatternName}) which cannot be found.` ); } // the module's saved objects require an index patten id // but there is no corresponding index pattern, throw an error if ( - this.indexPatternId === undefined && + this._indexPatternId === undefined && this.doSavedObjectsContainIndexPatternId(moduleConfig) ) { throw Boom.badRequest( - `Module's saved objects contain custom URLs which require a kibana index pattern (${this.indexPatternName}) which cannot be found.` + `Module's saved objects contain custom URLs which require a kibana index pattern (${this._indexPatternName}) which cannot be found.` ); } @@ -495,7 +510,7 @@ export class DataRecognizer { // Add a wildcard at the front of each of the job IDs in the module, // as a prefix may have been supplied when creating the jobs in the module. const jobIds = module.jobs.map((job) => `*${job.id}`); - const { jobsExist } = jobServiceProvider(this.callAsCurrentUser); + const { jobsExist } = jobServiceProvider(this._mlClusterClient); const jobInfo = await jobsExist(jobIds); // Check if the value for any of the jobs is false. @@ -504,11 +519,13 @@ export class DataRecognizer { if (doJobsExist === true) { // Get the IDs of the jobs created from the module, and their earliest / latest timestamps. - const jobStats: MlJobStats = await this.callAsCurrentUser('ml.jobStats', { jobId: jobIds }); + const jobStats: MlJobStats = await this._callAsInternalUser('ml.jobStats', { + jobId: jobIds, + }); const jobStatsJobs: JobStat[] = []; if (jobStats.jobs && jobStats.jobs.length > 0) { const foundJobIds = jobStats.jobs.map((job) => job.job_id); - const { getLatestBucketTimestampByJob } = resultsServiceProvider(this.callAsCurrentUser); + const { getLatestBucketTimestampByJob } = resultsServiceProvider(this._mlClusterClient); const latestBucketTimestampsByJob = await getLatestBucketTimestampByJob(foundJobIds); jobStats.jobs.forEach((job) => { @@ -669,7 +686,7 @@ export class DataRecognizer { async saveJob(job: ModuleJob) { const { id: jobId, config: body } = job; - return this.callAsCurrentUser('ml.addJob', { jobId, body }); + return this._callAsInternalUser('ml.addJob', { jobId, body }); } // save the datafeeds. @@ -690,7 +707,11 @@ export class DataRecognizer { async saveDatafeed(datafeed: ModuleDataFeed) { const { id: datafeedId, config: body } = datafeed; - return this.callAsCurrentUser('ml.addDatafeed', { datafeedId, body }); + return this._callAsInternalUser('ml.addDatafeed', { + datafeedId, + body, + ...this._authorizationHeader, + }); } async startDatafeeds( @@ -713,7 +734,7 @@ export class DataRecognizer { const result = { started: false } as DatafeedResponse; let opened = false; try { - const openResult = await this.callAsCurrentUser('ml.openJob', { + const openResult = await this._callAsInternalUser('ml.openJob', { jobId: datafeed.config.job_id, }); opened = openResult.opened; @@ -737,7 +758,10 @@ export class DataRecognizer { duration.end = end; } - await this.callAsCurrentUser('ml.startDatafeed', { datafeedId: datafeed.id, ...duration }); + await this._callAsInternalUser('ml.startDatafeed', { + datafeedId: datafeed.id, + ...duration, + }); result.started = true; } catch (error) { result.started = false; @@ -838,7 +862,7 @@ export class DataRecognizer { updateDatafeedIndices(moduleConfig: Module) { // if the supplied index pattern contains a comma, split into multiple indices and // add each one to the datafeed - const indexPatternNames = splitIndexPatternNames(this.indexPatternName); + const indexPatternNames = splitIndexPatternNames(this._indexPatternName); moduleConfig.datafeeds.forEach((df) => { const newIndices: string[] = []; @@ -876,7 +900,7 @@ export class DataRecognizer { if (url.match(INDEX_PATTERN_ID)) { const newUrl = url.replace( new RegExp(INDEX_PATTERN_ID, 'g'), - this.indexPatternId as string + this._indexPatternId as string ); // update the job's url cUrl.url_value = newUrl; @@ -915,7 +939,7 @@ export class DataRecognizer { if (jsonString.match(INDEX_PATTERN_ID)) { jsonString = jsonString.replace( new RegExp(INDEX_PATTERN_ID, 'g'), - this.indexPatternId as string + this._indexPatternId as string ); item.config.kibanaSavedObjectMeta!.searchSourceJSON = jsonString; } @@ -927,7 +951,7 @@ export class DataRecognizer { if (visStateString !== undefined && visStateString.match(INDEX_PATTERN_NAME)) { visStateString = visStateString.replace( new RegExp(INDEX_PATTERN_NAME, 'g'), - this.indexPatternName + this._indexPatternName ); item.config.visState = visStateString; } @@ -944,10 +968,10 @@ export class DataRecognizer { timeField: string, query?: any ): Promise<{ start: number; end: number }> { - const fieldsService = fieldsServiceProvider(this.callAsCurrentUser); + const fieldsService = fieldsServiceProvider(this._mlClusterClient); const timeFieldRange = await fieldsService.getTimeFieldRange( - this.indexPatternName, + this._indexPatternName, timeField, query ); @@ -974,7 +998,7 @@ export class DataRecognizer { if (estimateMML && this.jobsForModelMemoryEstimation.length > 0) { try { - const calculateModelMemoryLimit = calculateModelMemoryLimitProvider(this.callAsCurrentUser); + const calculateModelMemoryLimit = calculateModelMemoryLimitProvider(this._mlClusterClient); // Checks if all jobs in the module have the same time field configured const firstJobTimeField = this.jobsForModelMemoryEstimation[0].job.config.data_description @@ -1009,7 +1033,7 @@ export class DataRecognizer { const { modelMemoryLimit } = await calculateModelMemoryLimit( job.config.analysis_config, - this.indexPatternName, + this._indexPatternName, query, job.config.data_description.time_field, earliestMs, @@ -1027,20 +1051,20 @@ export class DataRecognizer { } } - const { limits } = await this.callAsCurrentUser('ml.info'); + const { limits } = (await this._callAsInternalUser('ml.info')) as MlInfoResponse; const maxMml = limits.max_model_memory_limit; if (!maxMml) { return; } - // @ts-ignore + // @ts-expect-error const maxBytes: number = numeral(maxMml.toUpperCase()).value(); for (const job of moduleConfig.jobs) { const mml = job.config?.analysis_limits?.model_memory_limit; if (mml !== undefined) { - // @ts-ignore + // @ts-expect-error const mmlBytes: number = numeral(mml.toUpperCase()).value(); if (mmlBytes > maxBytes) { // if the job's mml is over the max, diff --git a/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/rare_error_code.json b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/rare_error_code.json index 0f8fa814ac60a6..a4ec84f1fb3f33 100644 --- a/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/rare_error_code.json +++ b/x-pack/plugins/ml/server/models/data_recognizer/modules/siem_cloudtrail/ml/rare_error_code.json @@ -1,6 +1,6 @@ { "job_type": "anomaly_detector", - "description": "Looks for unsual errors. Rare and unusual errors may simply indicate an impending service failure but they can also be byproducts of attempted or successful persistence, privilege escalation, defense evasion, discovery, lateral movement, or collection activity by a threat actor.", + "description": "Looks for unusual errors. Rare and unusual errors may simply indicate an impending service failure but they can also be byproducts of attempted or successful persistence, privilege escalation, defense evasion, discovery, lateral movement, or collection activity by a threat actor.", "groups": [ "siem", "cloudtrail" diff --git a/x-pack/plugins/ml/server/models/data_visualizer/data_visualizer.ts b/x-pack/plugins/ml/server/models/data_visualizer/data_visualizer.ts index d58c797b446db6..7f19f32373e077 100644 --- a/x-pack/plugins/ml/server/models/data_visualizer/data_visualizer.ts +++ b/x-pack/plugins/ml/server/models/data_visualizer/data_visualizer.ts @@ -4,10 +4,12 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyCallAPIOptions, LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import _ from 'lodash'; +import { KBN_FIELD_TYPES } from '../../../../../../src/plugins/data/server'; import { ML_JOB_FIELD_TYPES } from '../../../common/constants/field_types'; import { getSafeAggregationName } from '../../../common/util/job_utils'; +import { stringHash } from '../../../common/util/string_utils'; import { buildBaseFilterCriteria, buildSamplerAggregation, @@ -19,6 +21,8 @@ const SAMPLER_TOP_TERMS_SHARD_SIZE = 5000; const AGGREGATABLE_EXISTS_REQUEST_BATCH_SIZE = 200; const FIELDS_REQUEST_BATCH_SIZE = 10; +const MAX_CHART_COLUMNS = 20; + interface FieldData { fieldName: string; existsInDocs: boolean; @@ -35,6 +39,11 @@ export interface Field { cardinality: number; } +export interface HistogramField { + fieldName: string; + type: string; +} + interface Distribution { percentiles: any[]; minPercentile: number; @@ -98,6 +107,70 @@ interface FieldExamples { examples: any[]; } +interface NumericColumnStats { + interval: number; + min: number; + max: number; +} +type NumericColumnStatsMap = Record; + +interface AggHistogram { + histogram: { + field: string; + interval: number; + }; +} + +interface AggCardinality { + cardinality: { + field: string; + }; +} + +interface AggTerms { + terms: { + field: string; + size: number; + }; +} + +interface NumericDataItem { + key: number; + key_as_string?: string; + doc_count: number; +} + +interface NumericChartData { + data: NumericDataItem[]; + id: string; + interval: number; + stats: [number, number]; + type: 'numeric'; +} + +interface OrdinalDataItem { + key: string; + key_as_string?: string; + doc_count: number; +} + +interface OrdinalChartData { + type: 'ordinal' | 'boolean'; + cardinality: number; + data: OrdinalDataItem[]; + id: string; +} + +interface UnsupportedChartData { + id: string; + type: 'unsupported'; +} + +type ChartRequestAgg = AggHistogram | AggCardinality | AggTerms; + +// type ChartDataItem = NumericDataItem | OrdinalDataItem; +type ChartData = NumericChartData | OrdinalChartData | UnsupportedChartData; + type BatchStats = | NumericFieldStats | StringFieldStats @@ -106,15 +179,182 @@ type BatchStats = | DocumentCountStats | FieldExamples; +const getAggIntervals = async ( + { callAsCurrentUser }: ILegacyScopedClusterClient, + indexPatternTitle: string, + query: any, + fields: HistogramField[], + samplerShardSize: number +): Promise => { + const numericColumns = fields.filter((field) => { + return field.type === KBN_FIELD_TYPES.NUMBER || field.type === KBN_FIELD_TYPES.DATE; + }); + + if (numericColumns.length === 0) { + return {}; + } + + const minMaxAggs = numericColumns.reduce((aggs, c) => { + const id = stringHash(c.fieldName); + aggs[id] = { + stats: { + field: c.fieldName, + }, + }; + return aggs; + }, {} as Record); + + const respStats = await callAsCurrentUser('search', { + index: indexPatternTitle, + size: 0, + body: { + query, + aggs: buildSamplerAggregation(minMaxAggs, samplerShardSize), + size: 0, + }, + }); + + const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize); + const aggregations = + aggsPath.length > 0 ? _.get(respStats.aggregations, aggsPath) : respStats.aggregations; + + return Object.keys(aggregations).reduce((p, aggName) => { + const stats = [aggregations[aggName].min, aggregations[aggName].max]; + if (!stats.includes(null)) { + const delta = aggregations[aggName].max - aggregations[aggName].min; + + let aggInterval = 1; + + if (delta > MAX_CHART_COLUMNS || delta <= 1) { + aggInterval = delta / (MAX_CHART_COLUMNS - 1); + } + + p[aggName] = { interval: aggInterval, min: stats[0], max: stats[1] }; + } + + return p; + }, {} as NumericColumnStatsMap); +}; + +// export for re-use by transforms plugin +export const getHistogramsForFields = async ( + mlClusterClient: ILegacyScopedClusterClient, + indexPatternTitle: string, + query: any, + fields: HistogramField[], + samplerShardSize: number +) => { + const { callAsCurrentUser } = mlClusterClient; + const aggIntervals = await getAggIntervals( + mlClusterClient, + indexPatternTitle, + query, + fields, + samplerShardSize + ); + + const chartDataAggs = fields.reduce((aggs, field) => { + const fieldName = field.fieldName; + const fieldType = field.type; + const id = stringHash(fieldName); + if (fieldType === KBN_FIELD_TYPES.NUMBER || fieldType === KBN_FIELD_TYPES.DATE) { + if (aggIntervals[id] !== undefined) { + aggs[`${id}_histogram`] = { + histogram: { + field: fieldName, + interval: aggIntervals[id].interval !== 0 ? aggIntervals[id].interval : 1, + }, + }; + } + } else if (fieldType === KBN_FIELD_TYPES.STRING || fieldType === KBN_FIELD_TYPES.BOOLEAN) { + if (fieldType === KBN_FIELD_TYPES.STRING) { + aggs[`${id}_cardinality`] = { + cardinality: { + field: fieldName, + }, + }; + } + aggs[`${id}_terms`] = { + terms: { + field: fieldName, + size: MAX_CHART_COLUMNS, + }, + }; + } + return aggs; + }, {} as Record); + + if (Object.keys(chartDataAggs).length === 0) { + return []; + } + + const respChartsData = await callAsCurrentUser('search', { + index: indexPatternTitle, + size: 0, + body: { + query, + aggs: buildSamplerAggregation(chartDataAggs, samplerShardSize), + size: 0, + }, + }); + + const aggsPath = getSamplerAggregationsResponsePath(samplerShardSize); + const aggregations = + aggsPath.length > 0 + ? _.get(respChartsData.aggregations, aggsPath) + : respChartsData.aggregations; + + const chartsData: ChartData[] = fields.map( + (field): ChartData => { + const fieldName = field.fieldName; + const fieldType = field.type; + const id = stringHash(field.fieldName); + + if (fieldType === KBN_FIELD_TYPES.NUMBER || fieldType === KBN_FIELD_TYPES.DATE) { + if (aggIntervals[id] === undefined) { + return { + type: 'numeric', + data: [], + interval: 0, + stats: [0, 0], + id: fieldName, + }; + } + + return { + data: aggregations[`${id}_histogram`].buckets, + interval: aggIntervals[id].interval, + stats: [aggIntervals[id].min, aggIntervals[id].max], + type: 'numeric', + id: fieldName, + }; + } else if (fieldType === KBN_FIELD_TYPES.STRING || fieldType === KBN_FIELD_TYPES.BOOLEAN) { + return { + type: fieldType === KBN_FIELD_TYPES.STRING ? 'ordinal' : 'boolean', + cardinality: + fieldType === KBN_FIELD_TYPES.STRING ? aggregations[`${id}_cardinality`].value : 2, + data: aggregations[`${id}_terms`].buckets, + id: fieldName, + }; + } + + return { + type: 'unsupported', + id: fieldName, + }; + } + ); + + return chartsData; +}; + export class DataVisualizer { - callAsCurrentUser: ( - endpoint: string, - clientParams: Record, - options?: LegacyCallAPIOptions - ) => Promise; - - constructor(callAsCurrentUser: LegacyAPICaller) { - this.callAsCurrentUser = callAsCurrentUser; + private _mlClusterClient: ILegacyScopedClusterClient; + private _callAsCurrentUser: ILegacyScopedClusterClient['callAsCurrentUser']; + + constructor(mlClusterClient: ILegacyScopedClusterClient) { + this._callAsCurrentUser = mlClusterClient.callAsCurrentUser; + this._mlClusterClient = mlClusterClient; } // Obtains overall stats on the fields in the supplied index pattern, returning an object @@ -200,6 +440,24 @@ export class DataVisualizer { return stats; } + // Obtains binned histograms for supplied list of fields. The statistics for each field in the + // returned array depend on the type of the field (keyword, number, date etc). + // Sampling will be used if supplied samplerShardSize > 0. + async getHistogramsForFields( + indexPatternTitle: string, + query: any, + fields: HistogramField[], + samplerShardSize: number + ): Promise { + return await getHistogramsForFields( + this._mlClusterClient, + indexPatternTitle, + query, + fields, + samplerShardSize + ); + } + // Obtains statistics for supplied list of fields. The statistics for each field in the // returned array depend on the type of the field (keyword, number, date etc). // Sampling will be used if supplied samplerShardSize > 0. @@ -371,7 +629,7 @@ export class DataVisualizer { aggs: buildSamplerAggregation(aggs, samplerShardSize), }; - const resp = await this.callAsCurrentUser('search', { + const resp = await this._callAsCurrentUser('search', { index, rest_total_hits_as_int: true, size, @@ -438,7 +696,7 @@ export class DataVisualizer { }; filterCriteria.push({ exists: { field } }); - const resp = await this.callAsCurrentUser('search', { + const resp = await this._callAsCurrentUser('search', { index, rest_total_hits_as_int: true, size, @@ -480,7 +738,7 @@ export class DataVisualizer { aggs, }; - const resp = await this.callAsCurrentUser('search', { + const resp = await this._callAsCurrentUser('search', { index, size, body, @@ -583,7 +841,7 @@ export class DataVisualizer { aggs: buildSamplerAggregation(aggs, samplerShardSize), }; - const resp = await this.callAsCurrentUser('search', { + const resp = await this._callAsCurrentUser('search', { index, size, body, @@ -704,7 +962,7 @@ export class DataVisualizer { aggs: buildSamplerAggregation(aggs, samplerShardSize), }; - const resp = await this.callAsCurrentUser('search', { + const resp = await this._callAsCurrentUser('search', { index, size, body, @@ -778,7 +1036,7 @@ export class DataVisualizer { aggs: buildSamplerAggregation(aggs, samplerShardSize), }; - const resp = await this.callAsCurrentUser('search', { + const resp = await this._callAsCurrentUser('search', { index, size, body, @@ -845,7 +1103,7 @@ export class DataVisualizer { aggs: buildSamplerAggregation(aggs, samplerShardSize), }; - const resp = await this.callAsCurrentUser('search', { + const resp = await this._callAsCurrentUser('search', { index, size, body, @@ -907,7 +1165,7 @@ export class DataVisualizer { }, }; - const resp = await this.callAsCurrentUser('search', { + const resp = await this._callAsCurrentUser('search', { index, rest_total_hits_as_int: true, size, diff --git a/x-pack/plugins/ml/server/models/data_visualizer/index.ts b/x-pack/plugins/ml/server/models/data_visualizer/index.ts index ed44e9b12e1d14..ca1df0fe8300c9 100644 --- a/x-pack/plugins/ml/server/models/data_visualizer/index.ts +++ b/x-pack/plugins/ml/server/models/data_visualizer/index.ts @@ -4,4 +4,4 @@ * you may not use this file except in compliance with the Elastic License. */ -export { DataVisualizer } from './data_visualizer'; +export { getHistogramsForFields, DataVisualizer } from './data_visualizer'; diff --git a/x-pack/plugins/ml/server/models/fields_service/fields_service.ts b/x-pack/plugins/ml/server/models/fields_service/fields_service.ts index 661ea6c6fec24b..43a6876f76c49e 100644 --- a/x-pack/plugins/ml/server/models/fields_service/fields_service.ts +++ b/x-pack/plugins/ml/server/models/fields_service/fields_service.ts @@ -5,7 +5,7 @@ */ import Boom from 'boom'; -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { duration } from 'moment'; import { parseInterval } from '../../../common/util/parse_interval'; import { initCardinalityFieldsCache } from './fields_aggs_cache'; @@ -14,7 +14,7 @@ import { initCardinalityFieldsCache } from './fields_aggs_cache'; * Service for carrying out queries to obtain data * specific to fields in Elasticsearch indices. */ -export function fieldsServiceProvider(callAsCurrentUser: LegacyAPICaller) { +export function fieldsServiceProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) { const fieldsAggsCache = initCardinalityFieldsCache(); /** diff --git a/x-pack/plugins/ml/server/models/file_data_visualizer/file_data_visualizer.ts b/x-pack/plugins/ml/server/models/file_data_visualizer/file_data_visualizer.ts index 978355d098b13a..9cd71c046b66c7 100644 --- a/x-pack/plugins/ml/server/models/file_data_visualizer/file_data_visualizer.ts +++ b/x-pack/plugins/ml/server/models/file_data_visualizer/file_data_visualizer.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { AnalysisResult, FormattedOverrides, @@ -13,9 +13,9 @@ import { export type InputData = any[]; -export function fileDataVisualizerProvider(callAsCurrentUser: LegacyAPICaller) { +export function fileDataVisualizerProvider({ callAsInternalUser }: ILegacyScopedClusterClient) { async function analyzeFile(data: any, overrides: any): Promise { - const results = await callAsCurrentUser('ml.fileStructure', { + const results = await callAsInternalUser('ml.fileStructure', { body: data, ...overrides, }); diff --git a/x-pack/plugins/ml/server/models/file_data_visualizer/import_data.ts b/x-pack/plugins/ml/server/models/file_data_visualizer/import_data.ts index e082a7462241a5..fc9b333298c9dd 100644 --- a/x-pack/plugins/ml/server/models/file_data_visualizer/import_data.ts +++ b/x-pack/plugins/ml/server/models/file_data_visualizer/import_data.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { INDEX_META_DATA_CREATED_BY } from '../../../common/constants/file_datavisualizer'; import { ImportResponse, @@ -15,7 +15,7 @@ import { } from '../../../common/types/file_datavisualizer'; import { InputData } from './file_data_visualizer'; -export function importDataProvider(callAsCurrentUser: LegacyAPICaller) { +export function importDataProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) { async function importData( id: string, index: string, diff --git a/x-pack/plugins/ml/server/models/filter/filter_manager.ts b/x-pack/plugins/ml/server/models/filter/filter_manager.ts index 40a20030cb635a..20dc95e92a86c7 100644 --- a/x-pack/plugins/ml/server/models/filter/filter_manager.ts +++ b/x-pack/plugins/ml/server/models/filter/filter_manager.ts @@ -5,7 +5,7 @@ */ import Boom from 'boom'; -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { DetectorRule, DetectorRuleScope } from '../../../common/types/detector_rules'; @@ -58,14 +58,17 @@ interface PartialJob { } export class FilterManager { - constructor(private callAsCurrentUser: LegacyAPICaller) {} + private _callAsInternalUser: ILegacyScopedClusterClient['callAsInternalUser']; + constructor({ callAsInternalUser }: ILegacyScopedClusterClient) { + this._callAsInternalUser = callAsInternalUser; + } async getFilter(filterId: string) { try { const [JOBS, FILTERS] = [0, 1]; const results = await Promise.all([ - this.callAsCurrentUser('ml.jobs'), - this.callAsCurrentUser('ml.filters', { filterId }), + this._callAsInternalUser('ml.jobs'), + this._callAsInternalUser('ml.filters', { filterId }), ]); if (results[FILTERS] && results[FILTERS].filters.length) { @@ -87,7 +90,7 @@ export class FilterManager { async getAllFilters() { try { - const filtersResp = await this.callAsCurrentUser('ml.filters'); + const filtersResp = await this._callAsInternalUser('ml.filters'); return filtersResp.filters; } catch (error) { throw Boom.badRequest(error); @@ -98,8 +101,8 @@ export class FilterManager { try { const [JOBS, FILTERS] = [0, 1]; const results = await Promise.all([ - this.callAsCurrentUser('ml.jobs'), - this.callAsCurrentUser('ml.filters'), + this._callAsInternalUser('ml.jobs'), + this._callAsInternalUser('ml.filters'), ]); // Build a map of filter_ids against jobs and detectors using that filter. @@ -137,7 +140,7 @@ export class FilterManager { delete filter.filterId; try { // Returns the newly created filter. - return await this.callAsCurrentUser('ml.addFilter', { filterId, body: filter }); + return await this._callAsInternalUser('ml.addFilter', { filterId, body: filter }); } catch (error) { throw Boom.badRequest(error); } @@ -157,7 +160,7 @@ export class FilterManager { } // Returns the newly updated filter. - return await this.callAsCurrentUser('ml.updateFilter', { + return await this._callAsInternalUser('ml.updateFilter', { filterId, body, }); @@ -167,7 +170,7 @@ export class FilterManager { } async deleteFilter(filterId: string) { - return this.callAsCurrentUser('ml.deleteFilter', { filterId }); + return this._callAsInternalUser('ml.deleteFilter', { filterId }); } buildFiltersInUse(jobsList: PartialJob[]) { diff --git a/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.d.ts b/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.d.ts index f11771a88c5c63..d72552b548b827 100644 --- a/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.d.ts +++ b/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.d.ts @@ -4,10 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; export function jobAuditMessagesProvider( - callAsCurrentUser: LegacyAPICaller + mlClusterClient: ILegacyScopedClusterClient ): { getJobAuditMessages: (jobId?: string, from?: string) => any; getAuditMessagesSummary: (jobIds?: string[]) => any; diff --git a/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.js b/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.js index 6b782f86523632..dcbabd879b47a3 100644 --- a/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.js +++ b/x-pack/plugins/ml/server/models/job_audit_messages/job_audit_messages.js @@ -34,14 +34,14 @@ const anomalyDetectorTypeFilter = { }, }; -export function jobAuditMessagesProvider(callAsCurrentUser) { +export function jobAuditMessagesProvider({ callAsCurrentUser, callAsInternalUser }) { // search for audit messages, // jobId is optional. without it, all jobs will be listed. // from is optional and should be a string formatted in ES time units. e.g. 12h, 1d, 7d async function getJobAuditMessages(jobId, from) { let gte = null; if (jobId !== undefined && from === undefined) { - const jobs = await callAsCurrentUser('ml.jobs', { jobId }); + const jobs = await callAsInternalUser('ml.jobs', { jobId }); if (jobs.count > 0 && jobs.jobs !== undefined) { gte = moment(jobs.jobs[0].create_time).valueOf(); } diff --git a/x-pack/plugins/ml/server/models/job_service/datafeeds.ts b/x-pack/plugins/ml/server/models/job_service/datafeeds.ts index 0f64f5e0e7b4f2..98e1be48bb766e 100644 --- a/x-pack/plugins/ml/server/models/job_service/datafeeds.ts +++ b/x-pack/plugins/ml/server/models/job_service/datafeeds.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { i18n } from '@kbn/i18n'; import { JOB_STATE, DATAFEED_STATE } from '../../../common/constants/states'; import { fillResultsWithTimeouts, isRequestTimeout } from './error_utils'; @@ -26,7 +26,7 @@ interface Results { }; } -export function datafeedsProvider(callAsCurrentUser: LegacyAPICaller) { +export function datafeedsProvider({ callAsInternalUser }: ILegacyScopedClusterClient) { async function forceStartDatafeeds(datafeedIds: string[], start?: number, end?: number) { const jobIds = await getJobIdsByDatafeedId(); const doStartsCalled = datafeedIds.reduce((acc, cur) => { @@ -84,7 +84,7 @@ export function datafeedsProvider(callAsCurrentUser: LegacyAPICaller) { async function openJob(jobId: string) { let opened = false; try { - const resp = await callAsCurrentUser('ml.openJob', { jobId }); + const resp = await callAsInternalUser('ml.openJob', { jobId }); opened = resp.opened; } catch (error) { if (error.statusCode === 409) { @@ -97,7 +97,7 @@ export function datafeedsProvider(callAsCurrentUser: LegacyAPICaller) { } async function startDatafeed(datafeedId: string, start?: number, end?: number) { - return callAsCurrentUser('ml.startDatafeed', { datafeedId, start, end }); + return callAsInternalUser('ml.startDatafeed', { datafeedId, start, end }); } async function stopDatafeeds(datafeedIds: string[]) { @@ -105,7 +105,7 @@ export function datafeedsProvider(callAsCurrentUser: LegacyAPICaller) { for (const datafeedId of datafeedIds) { try { - results[datafeedId] = await callAsCurrentUser('ml.stopDatafeed', { datafeedId }); + results[datafeedId] = await callAsInternalUser('ml.stopDatafeed', { datafeedId }); } catch (error) { if (isRequestTimeout(error)) { return fillResultsWithTimeouts(results, datafeedId, datafeedIds, DATAFEED_STATE.STOPPED); @@ -117,11 +117,11 @@ export function datafeedsProvider(callAsCurrentUser: LegacyAPICaller) { } async function forceDeleteDatafeed(datafeedId: string) { - return callAsCurrentUser('ml.deleteDatafeed', { datafeedId, force: true }); + return callAsInternalUser('ml.deleteDatafeed', { datafeedId, force: true }); } async function getDatafeedIdsByJobId() { - const { datafeeds } = await callAsCurrentUser('ml.datafeeds'); + const { datafeeds } = (await callAsInternalUser('ml.datafeeds')) as MlDatafeedsResponse; return datafeeds.reduce((acc, cur) => { acc[cur.job_id] = cur.datafeed_id; return acc; @@ -129,7 +129,7 @@ export function datafeedsProvider(callAsCurrentUser: LegacyAPICaller) { } async function getJobIdsByDatafeedId() { - const { datafeeds } = await callAsCurrentUser('ml.datafeeds'); + const { datafeeds } = (await callAsInternalUser('ml.datafeeds')) as MlDatafeedsResponse; return datafeeds.reduce((acc, cur) => { acc[cur.datafeed_id] = cur.job_id; return acc; diff --git a/x-pack/plugins/ml/server/models/job_service/groups.ts b/x-pack/plugins/ml/server/models/job_service/groups.ts index ab5707ab29e65c..c4ea854c14f872 100644 --- a/x-pack/plugins/ml/server/models/job_service/groups.ts +++ b/x-pack/plugins/ml/server/models/job_service/groups.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { CalendarManager } from '../calendar'; import { GLOBAL_CALENDAR } from '../../../common/constants/calendars'; import { Job } from '../../../common/types/anomaly_detection_jobs'; @@ -23,14 +23,15 @@ interface Results { }; } -export function groupsProvider(callAsCurrentUser: LegacyAPICaller) { - const calMngr = new CalendarManager(callAsCurrentUser); +export function groupsProvider(mlClusterClient: ILegacyScopedClusterClient) { + const calMngr = new CalendarManager(mlClusterClient); + const { callAsInternalUser } = mlClusterClient; async function getAllGroups() { const groups: { [id: string]: Group } = {}; const jobIds: { [id: string]: undefined | null } = {}; const [{ jobs }, calendars] = await Promise.all([ - callAsCurrentUser('ml.jobs'), + callAsInternalUser('ml.jobs') as Promise, calMngr.getAllCalendars(), ]); @@ -79,7 +80,7 @@ export function groupsProvider(callAsCurrentUser: LegacyAPICaller) { for (const job of jobs) { const { job_id: jobId, groups } = job; try { - await callAsCurrentUser('ml.updateJob', { jobId, body: { groups } }); + await callAsInternalUser('ml.updateJob', { jobId, body: { groups } }); results[jobId] = { success: true }; } catch (error) { results[jobId] = { success: false, error }; diff --git a/x-pack/plugins/ml/server/models/job_service/index.ts b/x-pack/plugins/ml/server/models/job_service/index.ts index 5d053c1be73e42..1ff33a7b00f0b5 100644 --- a/x-pack/plugins/ml/server/models/job_service/index.ts +++ b/x-pack/plugins/ml/server/models/job_service/index.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { datafeedsProvider } from './datafeeds'; import { jobsProvider } from './jobs'; import { groupsProvider } from './groups'; @@ -12,14 +12,14 @@ import { newJobCapsProvider } from './new_job_caps'; import { newJobChartsProvider, topCategoriesProvider } from './new_job'; import { modelSnapshotProvider } from './model_snapshots'; -export function jobServiceProvider(callAsCurrentUser: LegacyAPICaller) { +export function jobServiceProvider(mlClusterClient: ILegacyScopedClusterClient) { return { - ...datafeedsProvider(callAsCurrentUser), - ...jobsProvider(callAsCurrentUser), - ...groupsProvider(callAsCurrentUser), - ...newJobCapsProvider(callAsCurrentUser), - ...newJobChartsProvider(callAsCurrentUser), - ...topCategoriesProvider(callAsCurrentUser), - ...modelSnapshotProvider(callAsCurrentUser), + ...datafeedsProvider(mlClusterClient), + ...jobsProvider(mlClusterClient), + ...groupsProvider(mlClusterClient), + ...newJobCapsProvider(mlClusterClient), + ...newJobChartsProvider(mlClusterClient), + ...topCategoriesProvider(mlClusterClient), + ...modelSnapshotProvider(mlClusterClient), }; } diff --git a/x-pack/plugins/ml/server/models/job_service/jobs.ts b/x-pack/plugins/ml/server/models/job_service/jobs.ts index 2d26b2150edf33..aca0c5d72a9f5a 100644 --- a/x-pack/plugins/ml/server/models/job_service/jobs.ts +++ b/x-pack/plugins/ml/server/models/job_service/jobs.ts @@ -7,7 +7,7 @@ import { i18n } from '@kbn/i18n'; import { uniq } from 'lodash'; import Boom from 'boom'; -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { JOB_STATE, DATAFEED_STATE } from '../../../common/constants/states'; import { MlSummaryJob, @@ -46,14 +46,16 @@ interface Results { }; } -export function jobsProvider(callAsCurrentUser: LegacyAPICaller) { - const { forceDeleteDatafeed, getDatafeedIdsByJobId } = datafeedsProvider(callAsCurrentUser); - const { getAuditMessagesSummary } = jobAuditMessagesProvider(callAsCurrentUser); - const { getLatestBucketTimestampByJob } = resultsServiceProvider(callAsCurrentUser); - const calMngr = new CalendarManager(callAsCurrentUser); +export function jobsProvider(mlClusterClient: ILegacyScopedClusterClient) { + const { callAsCurrentUser, callAsInternalUser } = mlClusterClient; + + const { forceDeleteDatafeed, getDatafeedIdsByJobId } = datafeedsProvider(mlClusterClient); + const { getAuditMessagesSummary } = jobAuditMessagesProvider(mlClusterClient); + const { getLatestBucketTimestampByJob } = resultsServiceProvider(mlClusterClient); + const calMngr = new CalendarManager(mlClusterClient); async function forceDeleteJob(jobId: string) { - return callAsCurrentUser('ml.deleteJob', { jobId, force: true }); + return callAsInternalUser('ml.deleteJob', { jobId, force: true }); } async function deleteJobs(jobIds: string[]) { @@ -97,7 +99,7 @@ export function jobsProvider(callAsCurrentUser: LegacyAPICaller) { const results: Results = {}; for (const jobId of jobIds) { try { - await callAsCurrentUser('ml.closeJob', { jobId }); + await callAsInternalUser('ml.closeJob', { jobId }); results[jobId] = { closed: true }; } catch (error) { if (isRequestTimeout(error)) { @@ -113,7 +115,7 @@ export function jobsProvider(callAsCurrentUser: LegacyAPICaller) { // if the job has failed we want to attempt a force close. // however, if we received a 409 due to the datafeed being started we should not attempt a force close. try { - await callAsCurrentUser('ml.closeJob', { jobId, force: true }); + await callAsInternalUser('ml.closeJob', { jobId, force: true }); results[jobId] = { closed: true }; } catch (error2) { if (isRequestTimeout(error)) { @@ -136,12 +138,12 @@ export function jobsProvider(callAsCurrentUser: LegacyAPICaller) { throw Boom.notFound(`Cannot find datafeed for job ${jobId}`); } - const dfResult = await callAsCurrentUser('ml.stopDatafeed', { datafeedId, force: true }); + const dfResult = await callAsInternalUser('ml.stopDatafeed', { datafeedId, force: true }); if (!dfResult || dfResult.stopped !== true) { return { success: false }; } - await callAsCurrentUser('ml.closeJob', { jobId, force: true }); + await callAsInternalUser('ml.closeJob', { jobId, force: true }); return { success: true }; } @@ -257,13 +259,13 @@ export function jobsProvider(callAsCurrentUser: LegacyAPICaller) { Promise<{ [id: string]: number | undefined }> ] = [ jobIds.length > 0 - ? callAsCurrentUser('ml.jobs', { jobId: jobIds }) // move length check in side call - : callAsCurrentUser('ml.jobs'), + ? (callAsInternalUser('ml.jobs', { jobId: jobIds }) as Promise) // move length check in side call + : (callAsInternalUser('ml.jobs') as Promise), jobIds.length > 0 - ? callAsCurrentUser('ml.jobStats', { jobId: jobIds }) - : callAsCurrentUser('ml.jobStats'), - callAsCurrentUser('ml.datafeeds'), - callAsCurrentUser('ml.datafeedStats'), + ? (callAsInternalUser('ml.jobStats', { jobId: jobIds }) as Promise) + : (callAsInternalUser('ml.jobStats') as Promise), + callAsInternalUser('ml.datafeeds') as Promise, + callAsInternalUser('ml.datafeedStats') as Promise, calMngr.getAllCalendars(), getLatestBucketTimestampByJob(), ]; @@ -402,7 +404,7 @@ export function jobsProvider(callAsCurrentUser: LegacyAPICaller) { } catch (e) { // if the user doesn't have permission to load the task list, // use the jobs list to get the ids of deleting jobs - const { jobs } = await callAsCurrentUser('ml.jobs'); + const { jobs } = (await callAsInternalUser('ml.jobs')) as MlJobsResponse; jobIds.push(...jobs.filter((j) => j.deleting === true).map((j) => j.job_id)); } return { jobIds }; @@ -413,9 +415,9 @@ export function jobsProvider(callAsCurrentUser: LegacyAPICaller) { // e.g. *_low_request_rate_ecs async function jobsExist(jobIds: string[] = []) { // Get the list of job IDs. - const jobsInfo = await callAsCurrentUser('ml.jobs', { + const jobsInfo = (await callAsInternalUser('ml.jobs', { jobId: jobIds, - }); + })) as MlJobsResponse; const results: { [id: string]: boolean } = {}; if (jobsInfo.count > 0) { @@ -438,8 +440,8 @@ export function jobsProvider(callAsCurrentUser: LegacyAPICaller) { } async function getAllJobAndGroupIds() { - const { getAllGroups } = groupsProvider(callAsCurrentUser); - const jobs = await callAsCurrentUser('ml.jobs'); + const { getAllGroups } = groupsProvider(mlClusterClient); + const jobs = (await callAsInternalUser('ml.jobs')) as MlJobsResponse; const jobIds = jobs.jobs.map((job) => job.job_id); const groups = await getAllGroups(); const groupIds = groups.map((group) => group.id); @@ -453,7 +455,7 @@ export function jobsProvider(callAsCurrentUser: LegacyAPICaller) { async function getLookBackProgress(jobId: string, start: number, end: number) { const datafeedId = `datafeed-${jobId}`; const [jobStats, isRunning] = await Promise.all([ - callAsCurrentUser('ml.jobStats', { jobId: [jobId] }), + callAsInternalUser('ml.jobStats', { jobId: [jobId] }) as Promise, isDatafeedRunning(datafeedId), ]); @@ -472,9 +474,9 @@ export function jobsProvider(callAsCurrentUser: LegacyAPICaller) { } async function isDatafeedRunning(datafeedId: string) { - const stats = await callAsCurrentUser('ml.datafeedStats', { + const stats = (await callAsInternalUser('ml.datafeedStats', { datafeedId: [datafeedId], - }); + })) as MlDatafeedsStatsResponse; if (stats.datafeeds.length) { const state = stats.datafeeds[0].state; return ( diff --git a/x-pack/plugins/ml/server/models/job_service/model_snapshots.ts b/x-pack/plugins/ml/server/models/job_service/model_snapshots.ts index 136d4f47c7facc..576d6f8cbb1600 100644 --- a/x-pack/plugins/ml/server/models/job_service/model_snapshots.ts +++ b/x-pack/plugins/ml/server/models/job_service/model_snapshots.ts @@ -6,10 +6,9 @@ import Boom from 'boom'; import { i18n } from '@kbn/i18n'; -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { ModelSnapshot } from '../../../common/types/anomaly_detection_jobs'; -import { datafeedsProvider, MlDatafeedsResponse } from './datafeeds'; -import { MlJobsResponse } from './jobs'; +import { datafeedsProvider } from './datafeeds'; import { FormCalendar, CalendarManager } from '../calendar'; export interface ModelSnapshotsResponse { @@ -20,8 +19,9 @@ export interface RevertModelSnapshotResponse { model: ModelSnapshot; } -export function modelSnapshotProvider(callAsCurrentUser: LegacyAPICaller) { - const { forceStartDatafeeds, getDatafeedIdsByJobId } = datafeedsProvider(callAsCurrentUser); +export function modelSnapshotProvider(mlClusterClient: ILegacyScopedClusterClient) { + const { callAsInternalUser } = mlClusterClient; + const { forceStartDatafeeds, getDatafeedIdsByJobId } = datafeedsProvider(mlClusterClient); async function revertModelSnapshot( jobId: string, @@ -33,12 +33,12 @@ export function modelSnapshotProvider(callAsCurrentUser: LegacyAPICaller) { ) { let datafeedId = `datafeed-${jobId}`; // ensure job exists - await callAsCurrentUser('ml.jobs', { jobId: [jobId] }); + await callAsInternalUser('ml.jobs', { jobId: [jobId] }); try { // ensure the datafeed exists // the datafeed is probably called datafeed- - await callAsCurrentUser('ml.datafeeds', { + await callAsInternalUser('ml.datafeeds', { datafeedId: [datafeedId], }); } catch (e) { @@ -52,22 +52,19 @@ export function modelSnapshotProvider(callAsCurrentUser: LegacyAPICaller) { } // ensure the snapshot exists - const snapshot = await callAsCurrentUser('ml.modelSnapshots', { + const snapshot = (await callAsInternalUser('ml.modelSnapshots', { jobId, snapshotId, - }); + })) as ModelSnapshotsResponse; // apply the snapshot revert - const { model } = await callAsCurrentUser( - 'ml.revertModelSnapshot', - { - jobId, - snapshotId, - body: { - delete_intervening_results: deleteInterveningResults, - }, - } - ); + const { model } = (await callAsInternalUser('ml.revertModelSnapshot', { + jobId, + snapshotId, + body: { + delete_intervening_results: deleteInterveningResults, + }, + })) as RevertModelSnapshotResponse; // create calendar (if specified) and replay datafeed if (replay && model.snapshot_id === snapshotId && snapshot.model_snapshots.length) { @@ -88,7 +85,7 @@ export function modelSnapshotProvider(callAsCurrentUser: LegacyAPICaller) { end_time: s.end, })), }; - const cm = new CalendarManager(callAsCurrentUser); + const cm = new CalendarManager(mlClusterClient); await cm.newCalendar(calendar); } diff --git a/x-pack/plugins/ml/server/models/job_service/new_job/categorization/examples.ts b/x-pack/plugins/ml/server/models/job_service/new_job/categorization/examples.ts index bf0d79b3ec0725..ca3e0cef21049a 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job/categorization/examples.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job/categorization/examples.ts @@ -4,6 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ +import { ILegacyScopedClusterClient } from 'kibana/server'; import { chunk } from 'lodash'; import { SearchResponse } from 'elasticsearch'; import { CATEGORY_EXAMPLES_SAMPLE_SIZE } from '../../../../../common/constants/categorization_job'; @@ -12,15 +13,14 @@ import { CategorizationAnalyzer, CategoryFieldExample, } from '../../../../../common/types/categories'; -import { callWithRequestType } from '../../../../../common/types/kibana'; import { ValidationResults } from './validation_results'; const CHUNK_SIZE = 100; -export function categorizationExamplesProvider( - callWithRequest: callWithRequestType, - callWithInternalUser: callWithRequestType -) { +export function categorizationExamplesProvider({ + callAsCurrentUser, + callAsInternalUser, +}: ILegacyScopedClusterClient) { const validationResults = new ValidationResults(); async function categorizationExamples( @@ -57,7 +57,7 @@ export function categorizationExamplesProvider( } } - const results: SearchResponse<{ [id: string]: string }> = await callWithRequest('search', { + const results: SearchResponse<{ [id: string]: string }> = await callAsCurrentUser('search', { index: indexPatternTitle, size, body: { @@ -112,7 +112,7 @@ export function categorizationExamplesProvider( } async function loadTokens(examples: string[], analyzer: CategorizationAnalyzer) { - const { tokens }: { tokens: Token[] } = await callWithInternalUser('indices.analyze', { + const { tokens }: { tokens: Token[] } = await callAsInternalUser('indices.analyze', { body: { ...getAnalyzer(analyzer), text: examples, diff --git a/x-pack/plugins/ml/server/models/job_service/new_job/categorization/top_categories.ts b/x-pack/plugins/ml/server/models/job_service/new_job/categorization/top_categories.ts index 13c5f107972ebb..4f97238a4a0b5c 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job/categorization/top_categories.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job/categorization/top_categories.ts @@ -5,13 +5,13 @@ */ import { SearchResponse } from 'elasticsearch'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { ML_RESULTS_INDEX_PATTERN } from '../../../../../common/constants/index_patterns'; import { CategoryId, Category } from '../../../../../common/types/categories'; -import { callWithRequestType } from '../../../../../common/types/kibana'; -export function topCategoriesProvider(callWithRequest: callWithRequestType) { +export function topCategoriesProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) { async function getTotalCategories(jobId: string): Promise<{ total: number }> { - const totalResp = await callWithRequest('search', { + const totalResp = await callAsCurrentUser('search', { index: ML_RESULTS_INDEX_PATTERN, size: 0, body: { @@ -37,7 +37,7 @@ export function topCategoriesProvider(callWithRequest: callWithRequestType) { } async function getTopCategoryCounts(jobId: string, numberOfCategories: number) { - const top: SearchResponse = await callWithRequest('search', { + const top: SearchResponse = await callAsCurrentUser('search', { index: ML_RESULTS_INDEX_PATTERN, size: 0, body: { @@ -99,7 +99,7 @@ export function topCategoriesProvider(callWithRequest: callWithRequestType) { field: 'category_id', }, }; - const result: SearchResponse = await callWithRequest('search', { + const result: SearchResponse = await callAsCurrentUser('search', { index: ML_RESULTS_INDEX_PATTERN, size, body: { diff --git a/x-pack/plugins/ml/server/models/job_service/new_job/charts.ts b/x-pack/plugins/ml/server/models/job_service/new_job/charts.ts index 88ae8caa91e4a1..63ae2c624ac381 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job/charts.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job/charts.ts @@ -4,13 +4,13 @@ * you may not use this file except in compliance with the Elastic License. */ +import { ILegacyScopedClusterClient } from 'kibana/server'; import { newJobLineChartProvider } from './line_chart'; import { newJobPopulationChartProvider } from './population_chart'; -import { callWithRequestType } from '../../../../common/types/kibana'; -export function newJobChartsProvider(callWithRequest: callWithRequestType) { - const { newJobLineChart } = newJobLineChartProvider(callWithRequest); - const { newJobPopulationChart } = newJobPopulationChartProvider(callWithRequest); +export function newJobChartsProvider(mlClusterClient: ILegacyScopedClusterClient) { + const { newJobLineChart } = newJobLineChartProvider(mlClusterClient); + const { newJobPopulationChart } = newJobPopulationChartProvider(mlClusterClient); return { newJobLineChart, diff --git a/x-pack/plugins/ml/server/models/job_service/new_job/line_chart.ts b/x-pack/plugins/ml/server/models/job_service/new_job/line_chart.ts index 4872f0f5e0ea48..3080b37867de52 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job/line_chart.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job/line_chart.ts @@ -5,8 +5,8 @@ */ import { get } from 'lodash'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { AggFieldNamePair, EVENT_RATE_FIELD_ID } from '../../../../common/types/fields'; -import { callWithRequestType } from '../../../../common/types/kibana'; import { ML_MEDIAN_PERCENTS } from '../../../../common/util/job_utils'; type DtrIndex = number; @@ -23,7 +23,7 @@ interface ProcessedResults { totalResults: number; } -export function newJobLineChartProvider(callWithRequest: callWithRequestType) { +export function newJobLineChartProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) { async function newJobLineChart( indexPatternTitle: string, timeField: string, @@ -47,7 +47,7 @@ export function newJobLineChartProvider(callWithRequest: callWithRequestType) { splitFieldValue ); - const results = await callWithRequest('search', json); + const results = await callAsCurrentUser('search', json); return processSearchResults( results, aggFieldNamePairs.map((af) => af.field) diff --git a/x-pack/plugins/ml/server/models/job_service/new_job/population_chart.ts b/x-pack/plugins/ml/server/models/job_service/new_job/population_chart.ts index 26609bdcc8f7d4..a9a2ce57f966cc 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job/population_chart.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job/population_chart.ts @@ -5,8 +5,8 @@ */ import { get } from 'lodash'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { AggFieldNamePair, EVENT_RATE_FIELD_ID } from '../../../../common/types/fields'; -import { callWithRequestType } from '../../../../common/types/kibana'; import { ML_MEDIAN_PERCENTS } from '../../../../common/util/job_utils'; const OVER_FIELD_EXAMPLES_COUNT = 40; @@ -29,7 +29,7 @@ interface ProcessedResults { totalResults: number; } -export function newJobPopulationChartProvider(callWithRequest: callWithRequestType) { +export function newJobPopulationChartProvider({ callAsCurrentUser }: ILegacyScopedClusterClient) { async function newJobPopulationChart( indexPatternTitle: string, timeField: string, @@ -52,7 +52,7 @@ export function newJobPopulationChartProvider(callWithRequest: callWithRequestTy ); try { - const results = await callWithRequest('search', json); + const results = await callAsCurrentUser('search', json); return processSearchResults( results, aggFieldNamePairs.map((af) => af.field) diff --git a/x-pack/plugins/ml/server/models/job_service/new_job_caps/field_service.ts b/x-pack/plugins/ml/server/models/job_service/new_job_caps/field_service.ts index a5ed4a18bf51c0..fd20610450cc17 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job_caps/field_service.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job_caps/field_service.ts @@ -4,6 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ +import { ILegacyScopedClusterClient } from 'kibana/server'; import { cloneDeep } from 'lodash'; import { SavedObjectsClientContract } from 'kibana/server'; import { @@ -39,32 +40,32 @@ const supportedTypes: string[] = [ export function fieldServiceProvider( indexPattern: string, isRollup: boolean, - callWithRequest: any, + mlClusterClient: ILegacyScopedClusterClient, savedObjectsClient: SavedObjectsClientContract ) { - return new FieldsService(indexPattern, isRollup, callWithRequest, savedObjectsClient); + return new FieldsService(indexPattern, isRollup, mlClusterClient, savedObjectsClient); } class FieldsService { private _indexPattern: string; private _isRollup: boolean; - private _callWithRequest: any; + private _mlClusterClient: ILegacyScopedClusterClient; private _savedObjectsClient: SavedObjectsClientContract; constructor( indexPattern: string, isRollup: boolean, - callWithRequest: any, - savedObjectsClient: any + mlClusterClient: ILegacyScopedClusterClient, + savedObjectsClient: SavedObjectsClientContract ) { this._indexPattern = indexPattern; this._isRollup = isRollup; - this._callWithRequest = callWithRequest; + this._mlClusterClient = mlClusterClient; this._savedObjectsClient = savedObjectsClient; } private async loadFieldCaps(): Promise { - return this._callWithRequest('fieldCaps', { + return this._mlClusterClient.callAsCurrentUser('fieldCaps', { index: this._indexPattern, fields: '*', }); @@ -108,7 +109,7 @@ class FieldsService { if (this._isRollup) { const rollupService = await rollupServiceProvider( this._indexPattern, - this._callWithRequest, + this._mlClusterClient, this._savedObjectsClient ); const rollupConfigs: RollupJob[] | null = await rollupService.getRollupJobs(); diff --git a/x-pack/plugins/ml/server/models/job_service/new_job_caps/new_job_caps.test.ts b/x-pack/plugins/ml/server/models/job_service/new_job_caps/new_job_caps.test.ts index 02fef16a384d0d..38d6481e02a742 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job_caps/new_job_caps.test.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job_caps/new_job_caps.test.ts @@ -16,19 +16,23 @@ import farequoteJobCapsEmpty from './__mocks__/results/farequote_job_caps_empty. import cloudwatchJobCaps from './__mocks__/results/cloudwatch_rollup_job_caps.json'; describe('job_service - job_caps', () => { - let callWithRequestNonRollupMock: jest.Mock; - let callWithRequestRollupMock: jest.Mock; + let mlClusterClientNonRollupMock: any; + let mlClusterClientRollupMock: any; let savedObjectsClientMock: any; beforeEach(() => { - callWithRequestNonRollupMock = jest.fn((action: string) => { + const callAsNonRollupMock = jest.fn((action: string) => { switch (action) { case 'fieldCaps': return farequoteFieldCaps; } }); + mlClusterClientNonRollupMock = { + callAsCurrentUser: callAsNonRollupMock, + callAsInternalUser: callAsNonRollupMock, + }; - callWithRequestRollupMock = jest.fn((action: string) => { + const callAsRollupMock = jest.fn((action: string) => { switch (action) { case 'fieldCaps': return cloudwatchFieldCaps; @@ -36,6 +40,10 @@ describe('job_service - job_caps', () => { return Promise.resolve(rollupCaps); } }); + mlClusterClientRollupMock = { + callAsCurrentUser: callAsRollupMock, + callAsInternalUser: callAsRollupMock, + }; savedObjectsClientMock = { async find() { @@ -48,7 +56,7 @@ describe('job_service - job_caps', () => { it('can get job caps for index pattern', async (done) => { const indexPattern = 'farequote-*'; const isRollup = false; - const { newJobCaps } = newJobCapsProvider(callWithRequestNonRollupMock); + const { newJobCaps } = newJobCapsProvider(mlClusterClientNonRollupMock); const response = await newJobCaps(indexPattern, isRollup, savedObjectsClientMock); expect(response).toEqual(farequoteJobCaps); done(); @@ -57,7 +65,7 @@ describe('job_service - job_caps', () => { it('can get rollup job caps for non rollup index pattern', async (done) => { const indexPattern = 'farequote-*'; const isRollup = true; - const { newJobCaps } = newJobCapsProvider(callWithRequestNonRollupMock); + const { newJobCaps } = newJobCapsProvider(mlClusterClientNonRollupMock); const response = await newJobCaps(indexPattern, isRollup, savedObjectsClientMock); expect(response).toEqual(farequoteJobCapsEmpty); done(); @@ -68,7 +76,7 @@ describe('job_service - job_caps', () => { it('can get rollup job caps for rollup index pattern', async (done) => { const indexPattern = 'cloud_roll_index'; const isRollup = true; - const { newJobCaps } = newJobCapsProvider(callWithRequestRollupMock); + const { newJobCaps } = newJobCapsProvider(mlClusterClientRollupMock); const response = await newJobCaps(indexPattern, isRollup, savedObjectsClientMock); expect(response).toEqual(cloudwatchJobCaps); done(); @@ -77,7 +85,7 @@ describe('job_service - job_caps', () => { it('can get non rollup job caps for rollup index pattern', async (done) => { const indexPattern = 'cloud_roll_index'; const isRollup = false; - const { newJobCaps } = newJobCapsProvider(callWithRequestRollupMock); + const { newJobCaps } = newJobCapsProvider(mlClusterClientRollupMock); const response = await newJobCaps(indexPattern, isRollup, savedObjectsClientMock); expect(response).not.toEqual(cloudwatchJobCaps); done(); diff --git a/x-pack/plugins/ml/server/models/job_service/new_job_caps/new_job_caps.ts b/x-pack/plugins/ml/server/models/job_service/new_job_caps/new_job_caps.ts index a0ab4b5cf4e3e6..5616dade53a787 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job_caps/new_job_caps.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job_caps/new_job_caps.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { SavedObjectsClientContract } from 'kibana/server'; +import { ILegacyScopedClusterClient, SavedObjectsClientContract } from 'kibana/server'; import { Aggregation, Field, NewJobCaps } from '../../../../common/types/fields'; import { fieldServiceProvider } from './field_service'; @@ -12,7 +12,7 @@ interface NewJobCapsResponse { [indexPattern: string]: NewJobCaps; } -export function newJobCapsProvider(callWithRequest: any) { +export function newJobCapsProvider(mlClusterClient: ILegacyScopedClusterClient) { async function newJobCaps( indexPattern: string, isRollup: boolean = false, @@ -21,7 +21,7 @@ export function newJobCapsProvider(callWithRequest: any) { const fieldService = fieldServiceProvider( indexPattern, isRollup, - callWithRequest, + mlClusterClient, savedObjectsClient ); const { aggs, fields } = await fieldService.getData(); diff --git a/x-pack/plugins/ml/server/models/job_service/new_job_caps/rollup.ts b/x-pack/plugins/ml/server/models/job_service/new_job_caps/rollup.ts index f7d846839503dc..f3a9bd49c27d6e 100644 --- a/x-pack/plugins/ml/server/models/job_service/new_job_caps/rollup.ts +++ b/x-pack/plugins/ml/server/models/job_service/new_job_caps/rollup.ts @@ -4,6 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ +import { ILegacyScopedClusterClient } from 'kibana/server'; import { SavedObject } from 'kibana/server'; import { IndexPatternAttributes } from 'src/plugins/data/server'; import { SavedObjectsClientContract } from 'kibana/server'; @@ -21,7 +22,7 @@ export interface RollupJob { export async function rollupServiceProvider( indexPattern: string, - callWithRequest: any, + { callAsCurrentUser }: ILegacyScopedClusterClient, savedObjectsClient: SavedObjectsClientContract ) { const rollupIndexPatternObject = await loadRollupIndexPattern(indexPattern, savedObjectsClient); @@ -31,7 +32,7 @@ export async function rollupServiceProvider( if (rollupIndexPatternObject !== null) { const parsedTypeMetaData = JSON.parse(rollupIndexPatternObject.attributes.typeMeta); const rollUpIndex: string = parsedTypeMetaData.params.rollup_index; - const rollupCaps = await callWithRequest('ml.rollupIndexCapabilities', { + const rollupCaps = await callAsCurrentUser('ml.rollupIndexCapabilities', { indexPattern: rollUpIndex, }); diff --git a/x-pack/plugins/ml/server/models/job_validation/job_validation.test.ts b/x-pack/plugins/ml/server/models/job_validation/job_validation.test.ts index 8deaae823e8b3a..1c74953e4dda94 100644 --- a/x-pack/plugins/ml/server/models/job_validation/job_validation.test.ts +++ b/x-pack/plugins/ml/server/models/job_validation/job_validation.test.ts @@ -4,28 +4,48 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { validateJob, ValidateJobPayload } from './job_validation'; import { JobValidationMessage } from '../../../common/constants/messages'; -// mock callWithRequest -const callWithRequest: LegacyAPICaller = (method: string) => { - return new Promise((resolve) => { - if (method === 'fieldCaps') { - resolve({ fields: [] }); - return; - } else if (method === 'ml.info') { - resolve({ - limits: { - effective_max_model_memory_limit: '100MB', - max_model_memory_limit: '1GB', - }, - }); - } - resolve({}); - }) as Promise; -}; +const mlClusterClient = ({ + // mock callAsCurrentUser + callAsCurrentUser: (method: string) => { + return new Promise((resolve) => { + if (method === 'fieldCaps') { + resolve({ fields: [] }); + return; + } else if (method === 'ml.info') { + resolve({ + limits: { + effective_max_model_memory_limit: '100MB', + max_model_memory_limit: '1GB', + }, + }); + } + resolve({}); + }) as Promise; + }, + + // mock callAsInternalUser + callAsInternalUser: (method: string) => { + return new Promise((resolve) => { + if (method === 'fieldCaps') { + resolve({ fields: [] }); + return; + } else if (method === 'ml.info') { + resolve({ + limits: { + effective_max_model_memory_limit: '100MB', + max_model_memory_limit: '1GB', + }, + }); + } + resolve({}); + }) as Promise; + }, +} as unknown) as ILegacyScopedClusterClient; // Note: The tests cast `payload` as any // so we can simulate possible runtime payloads @@ -36,7 +56,7 @@ describe('ML - validateJob', () => { job: { analysis_config: { detectors: [] } }, } as unknown) as ValidateJobPayload; - return validateJob(callWithRequest, payload).then((messages) => { + return validateJob(mlClusterClient, payload).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual([ @@ -56,7 +76,7 @@ describe('ML - validateJob', () => { job_id: id, }, } as unknown) as ValidateJobPayload; - return validateJob(callWithRequest, payload).catch(() => { + return validateJob(mlClusterClient, payload).catch(() => { new Error('Promise should not fail for jobIdTests.'); }); }); @@ -77,7 +97,7 @@ describe('ML - validateJob', () => { job: { analysis_config: { detectors: [] }, groups: testIds }, } as unknown) as ValidateJobPayload; - return validateJob(callWithRequest, payload).then((messages) => { + return validateJob(mlClusterClient, payload).then((messages) => { const ids = messages.map((m) => m.id); expect(ids.includes(messageId)).toBe(true); }); @@ -117,7 +137,7 @@ describe('ML - validateJob', () => { const payload = ({ job: { analysis_config: { bucket_span: format, detectors: [] } }, } as unknown) as ValidateJobPayload; - return validateJob(callWithRequest, payload).catch(() => { + return validateJob(mlClusterClient, payload).catch(() => { new Error('Promise should not fail for bucketSpanFormatTests.'); }); }); @@ -152,11 +172,11 @@ describe('ML - validateJob', () => { function: '', }); payload.job.analysis_config.detectors.push({ - // @ts-ignore + // @ts-expect-error function: undefined, }); - return validateJob(callWithRequest, payload).then((messages) => { + return validateJob(mlClusterClient, payload).then((messages) => { const ids = messages.map((m) => m.id); expect(ids.includes('detectors_function_empty')).toBe(true); }); @@ -170,7 +190,7 @@ describe('ML - validateJob', () => { function: 'count', }); - return validateJob(callWithRequest, payload).then((messages) => { + return validateJob(mlClusterClient, payload).then((messages) => { const ids = messages.map((m) => m.id); expect(ids.includes('detectors_function_not_empty')).toBe(true); }); @@ -182,7 +202,7 @@ describe('ML - validateJob', () => { fields: {}, } as unknown) as ValidateJobPayload; - return validateJob(callWithRequest, payload).then((messages) => { + return validateJob(mlClusterClient, payload).then((messages) => { const ids = messages.map((m) => m.id); expect(ids.includes('index_fields_invalid')).toBe(true); }); @@ -194,7 +214,7 @@ describe('ML - validateJob', () => { fields: { testField: {} }, } as unknown) as ValidateJobPayload; - return validateJob(callWithRequest, payload).then((messages) => { + return validateJob(mlClusterClient, payload).then((messages) => { const ids = messages.map((m) => m.id); expect(ids.includes('index_fields_valid')).toBe(true); }); @@ -222,7 +242,7 @@ describe('ML - validateJob', () => { const payload = getBasicPayload() as any; delete payload.job.analysis_config.influencers; - validateJob(callWithRequest, payload).then( + validateJob(mlClusterClient, payload).then( () => done( new Error('Promise should not resolve for this test when influencers is not an Array.') @@ -234,7 +254,7 @@ describe('ML - validateJob', () => { it('detect duplicate detectors', () => { const payload = getBasicPayload() as any; payload.job.analysis_config.detectors.push({ function: 'count' }); - return validateJob(callWithRequest, payload).then((messages) => { + return validateJob(mlClusterClient, payload).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual([ 'job_id_valid', @@ -257,7 +277,7 @@ describe('ML - validateJob', () => { { function: 'count', by_field_name: 'airline' }, { function: 'count', partition_field_name: 'airline' }, ]; - return validateJob(callWithRequest, payload).then((messages) => { + return validateJob(mlClusterClient, payload).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual([ 'job_id_valid', @@ -272,7 +292,7 @@ describe('ML - validateJob', () => { // Failing https://github.com/elastic/kibana/issues/65865 it('basic validation passes, extended checks return some messages', () => { const payload = getBasicPayload(); - return validateJob(callWithRequest, payload).then((messages) => { + return validateJob(mlClusterClient, payload).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual([ 'job_id_valid', @@ -305,7 +325,7 @@ describe('ML - validateJob', () => { fields: { testField: {} }, }; - return validateJob(callWithRequest, payload).then((messages) => { + return validateJob(mlClusterClient, payload).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual([ 'job_id_valid', @@ -338,7 +358,7 @@ describe('ML - validateJob', () => { fields: { testField: {} }, }; - return validateJob(callWithRequest, payload).then((messages) => { + return validateJob(mlClusterClient, payload).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual([ 'job_id_valid', @@ -381,7 +401,7 @@ describe('ML - validateJob', () => { fields: { testField: {} }, }; - return validateJob(callWithRequest, payload).then((messages) => { + return validateJob(mlClusterClient, payload).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual([ 'job_id_valid', @@ -400,7 +420,7 @@ describe('ML - validateJob', () => { const docsTestPayload = getBasicPayload() as any; docsTestPayload.job.analysis_config.detectors = [{ function: 'count', by_field_name: 'airline' }]; it('creates a docs url pointing to the current docs version', () => { - return validateJob(callWithRequest, docsTestPayload).then((messages) => { + return validateJob(mlClusterClient, docsTestPayload).then((messages) => { const message = messages[ messages.findIndex((m) => m.id === 'field_not_aggregatable') ] as JobValidationMessage; @@ -409,7 +429,7 @@ describe('ML - validateJob', () => { }); it('creates a docs url pointing to the master docs version', () => { - return validateJob(callWithRequest, docsTestPayload, 'master').then((messages) => { + return validateJob(mlClusterClient, docsTestPayload, 'master').then((messages) => { const message = messages[ messages.findIndex((m) => m.id === 'field_not_aggregatable') ] as JobValidationMessage; diff --git a/x-pack/plugins/ml/server/models/job_validation/job_validation.ts b/x-pack/plugins/ml/server/models/job_validation/job_validation.ts index 6e65e5e64f3b75..118e923283b3f5 100644 --- a/x-pack/plugins/ml/server/models/job_validation/job_validation.ts +++ b/x-pack/plugins/ml/server/models/job_validation/job_validation.ts @@ -6,7 +6,7 @@ import { i18n } from '@kbn/i18n'; import Boom from 'boom'; -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { TypeOf } from '@kbn/config-schema'; import { fieldsServiceProvider } from '../fields_service'; @@ -19,7 +19,7 @@ import { import { VALIDATION_STATUS } from '../../../common/constants/validation'; import { basicJobValidation, uniqWithIsEqual } from '../../../common/util/job_utils'; -// @ts-ignore +// @ts-expect-error import { validateBucketSpan } from './validate_bucket_span'; import { validateCardinality } from './validate_cardinality'; import { validateInfluencers } from './validate_influencers'; @@ -35,10 +35,9 @@ export type ValidateJobPayload = TypeOf; * @kbn/config-schema has checked the payload {@link validateJobSchema}. */ export async function validateJob( - callWithRequest: LegacyAPICaller, + mlClusterClient: ILegacyScopedClusterClient, payload: ValidateJobPayload, kbnVersion = 'current', - callAsInternalUser?: LegacyAPICaller, isSecurityDisabled?: boolean ) { const messages = getMessages(); @@ -65,8 +64,8 @@ export async function validateJob( // if no duration was part of the request, fall back to finding out // the time range of the time field of the index, but also check first // if the time field is a valid field of type 'date' using isValidTimeField() - if (typeof duration === 'undefined' && (await isValidTimeField(callWithRequest, job))) { - const fs = fieldsServiceProvider(callWithRequest); + if (typeof duration === 'undefined' && (await isValidTimeField(mlClusterClient, job))) { + const fs = fieldsServiceProvider(mlClusterClient); const index = job.datafeed_config.indices.join(','); const timeField = job.data_description.time_field; const timeRange = await fs.getTimeFieldRange(index, timeField, job.datafeed_config.query); @@ -81,29 +80,23 @@ export async function validateJob( // next run only the cardinality tests to find out if they trigger an error // so we can decide later whether certain additional tests should be run - const cardinalityMessages = await validateCardinality(callWithRequest, job); + const cardinalityMessages = await validateCardinality(mlClusterClient, job); validationMessages.push(...cardinalityMessages); const cardinalityError = cardinalityMessages.some((m) => { return messages[m.id as MessageId].status === VALIDATION_STATUS.ERROR; }); validationMessages.push( - ...(await validateBucketSpan( - callWithRequest, - job, - duration, - callAsInternalUser, - isSecurityDisabled - )) + ...(await validateBucketSpan(mlClusterClient, job, duration, isSecurityDisabled)) ); - validationMessages.push(...(await validateTimeRange(callWithRequest, job, duration))); + validationMessages.push(...(await validateTimeRange(mlClusterClient, job, duration))); // only run the influencer and model memory limit checks // if cardinality checks didn't return a message with an error level if (cardinalityError === false) { - validationMessages.push(...(await validateInfluencers(callWithRequest, job))); + validationMessages.push(...(await validateInfluencers(job))); validationMessages.push( - ...(await validateModelMemoryLimit(callWithRequest, job, duration)) + ...(await validateModelMemoryLimit(mlClusterClient, job, duration)) ); } } else { diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_bucket_span.js b/x-pack/plugins/ml/server/models/job_validation/validate_bucket_span.js index 7dc2ad7ff3b8f3..11f8d8967c4e0b 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_bucket_span.js +++ b/x-pack/plugins/ml/server/models/job_validation/validate_bucket_span.js @@ -45,13 +45,7 @@ const pickBucketSpan = (bucketSpans) => { return bucketSpans[i]; }; -export async function validateBucketSpan( - callWithRequest, - job, - duration, - callAsInternalUser, - isSecurityDisabled -) { +export async function validateBucketSpan(mlClusterClient, job, duration) { validateJobObject(job); // if there is no duration, do not run the estimate test @@ -123,11 +117,7 @@ export async function validateBucketSpan( try { const estimations = estimatorConfigs.map((data) => { return new Promise((resolve) => { - estimateBucketSpanFactory( - callWithRequest, - callAsInternalUser, - isSecurityDisabled - )(data) + estimateBucketSpanFactory(mlClusterClient)(data) .then(resolve) // this catch gets triggered when the estimation code runs without error // but isn't able to come up with a bucket span estimation. diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_bucket_span.test.ts b/x-pack/plugins/ml/server/models/job_validation/validate_bucket_span.test.ts index 8d77fd5a1fd0e7..f9145ab576d71e 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_bucket_span.test.ts +++ b/x-pack/plugins/ml/server/models/job_validation/validate_bucket_span.test.ts @@ -20,32 +20,36 @@ import mockFareQuoteSearchResponse from './__mocks__/mock_farequote_search_respo // sparse data with a low number of buckets import mockItSearchResponse from './__mocks__/mock_it_search_response.json'; -// mock callWithRequestFactory -const callWithRequestFactory = (mockSearchResponse: any) => { - return () => { +// mock mlClusterClientFactory +const mlClusterClientFactory = (mockSearchResponse: any) => { + const callAs = () => { return new Promise((resolve) => { resolve(mockSearchResponse); }); }; + return { + callAsCurrentUser: callAs, + callAsInternalUser: callAs, + }; }; describe('ML - validateBucketSpan', () => { it('called without arguments', (done) => { - validateBucketSpan(callWithRequestFactory(mockFareQuoteSearchResponse)).then( + validateBucketSpan(mlClusterClientFactory(mockFareQuoteSearchResponse)).then( () => done(new Error('Promise should not resolve for this test without job argument.')), () => done() ); }); it('called with non-valid job argument #1, missing datafeed_config', (done) => { - validateBucketSpan(callWithRequestFactory(mockFareQuoteSearchResponse), {}).then( + validateBucketSpan(mlClusterClientFactory(mockFareQuoteSearchResponse), {}).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), () => done() ); }); it('called with non-valid job argument #2, missing datafeed_config.indices', (done) => { - validateBucketSpan(callWithRequestFactory(mockFareQuoteSearchResponse), { + validateBucketSpan(mlClusterClientFactory(mockFareQuoteSearchResponse), { datafeed_config: {}, }).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), @@ -55,7 +59,7 @@ describe('ML - validateBucketSpan', () => { it('called with non-valid job argument #3, missing data_description', (done) => { const job = { datafeed_config: { indices: [] } }; - validateBucketSpan(callWithRequestFactory(mockFareQuoteSearchResponse), job).then( + validateBucketSpan(mlClusterClientFactory(mockFareQuoteSearchResponse), job).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), () => done() ); @@ -63,7 +67,7 @@ describe('ML - validateBucketSpan', () => { it('called with non-valid job argument #4, missing data_description.time_field', (done) => { const job = { datafeed_config: { indices: [] }, data_description: {} }; - validateBucketSpan(callWithRequestFactory(mockFareQuoteSearchResponse), job).then( + validateBucketSpan(mlClusterClientFactory(mockFareQuoteSearchResponse), job).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), () => done() ); @@ -74,7 +78,7 @@ describe('ML - validateBucketSpan', () => { datafeed_config: { indices: [] }, data_description: { time_field: '@timestamp' }, }; - validateBucketSpan(callWithRequestFactory(mockFareQuoteSearchResponse), job).then( + validateBucketSpan(mlClusterClientFactory(mockFareQuoteSearchResponse), job).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), () => done() ); @@ -87,7 +91,7 @@ describe('ML - validateBucketSpan', () => { datafeed_config: { indices: [] }, }; - return validateBucketSpan(callWithRequestFactory(mockFareQuoteSearchResponse), job).then( + return validateBucketSpan(mlClusterClientFactory(mockFareQuoteSearchResponse), job).then( (messages: JobValidationMessage[]) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual([]); @@ -110,7 +114,7 @@ describe('ML - validateBucketSpan', () => { const duration = { start: 0, end: 1 }; return validateBucketSpan( - callWithRequestFactory(mockFareQuoteSearchResponse), + mlClusterClientFactory(mockFareQuoteSearchResponse), job, duration ).then((messages: JobValidationMessage[]) => { @@ -124,7 +128,7 @@ describe('ML - validateBucketSpan', () => { const duration = { start: 0, end: 1 }; return validateBucketSpan( - callWithRequestFactory(mockFareQuoteSearchResponse), + mlClusterClientFactory(mockFareQuoteSearchResponse), job, duration ).then((messages: JobValidationMessage[]) => { @@ -147,7 +151,7 @@ describe('ML - validateBucketSpan', () => { function: 'count', }); - return validateBucketSpan(callWithRequestFactory(mockSearchResponse), job, {}).then( + return validateBucketSpan(mlClusterClientFactory(mockSearchResponse), job, {}).then( (messages: JobValidationMessage[]) => { const ids = messages.map((m) => m.id); test(ids); diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_cardinality.test.ts b/x-pack/plugins/ml/server/models/job_validation/validate_cardinality.test.ts index bcfe4a48a0de00..92933877e28367 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_cardinality.test.ts +++ b/x-pack/plugins/ml/server/models/job_validation/validate_cardinality.test.ts @@ -6,7 +6,7 @@ import _ from 'lodash'; -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { CombinedJob } from '../../../common/types/anomaly_detection_jobs'; @@ -20,9 +20,12 @@ const mockResponses = { fieldCaps: mockFieldCaps, }; -// mock callWithRequestFactory -const callWithRequestFactory = (responses: Record, fail = false): LegacyAPICaller => { - return (requestName: string) => { +// mock mlClusterClientFactory +const mlClusterClientFactory = ( + responses: Record, + fail = false +): ILegacyScopedClusterClient => { + const callAs = (requestName: string) => { return new Promise((resolve, reject) => { const response = responses[requestName]; if (fail) { @@ -32,25 +35,29 @@ const callWithRequestFactory = (responses: Record, fail = false): L } }) as Promise; }; + return { + callAsCurrentUser: callAs, + callAsInternalUser: callAs, + }; }; describe('ML - validateCardinality', () => { it('called without arguments', (done) => { - validateCardinality(callWithRequestFactory(mockResponses)).then( + validateCardinality(mlClusterClientFactory(mockResponses)).then( () => done(new Error('Promise should not resolve for this test without job argument.')), () => done() ); }); it('called with non-valid job argument #1, missing analysis_config', (done) => { - validateCardinality(callWithRequestFactory(mockResponses), {} as CombinedJob).then( + validateCardinality(mlClusterClientFactory(mockResponses), {} as CombinedJob).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), () => done() ); }); it('called with non-valid job argument #2, missing datafeed_config', (done) => { - validateCardinality(callWithRequestFactory(mockResponses), { + validateCardinality(mlClusterClientFactory(mockResponses), { analysis_config: {}, } as CombinedJob).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), @@ -60,7 +67,7 @@ describe('ML - validateCardinality', () => { it('called with non-valid job argument #3, missing datafeed_config.indices', (done) => { const job = { analysis_config: {}, datafeed_config: {} } as CombinedJob; - validateCardinality(callWithRequestFactory(mockResponses), job).then( + validateCardinality(mlClusterClientFactory(mockResponses), job).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), () => done() ); @@ -71,7 +78,7 @@ describe('ML - validateCardinality', () => { analysis_config: {}, datafeed_config: { indices: [] }, } as unknown) as CombinedJob; - validateCardinality(callWithRequestFactory(mockResponses), job).then( + validateCardinality(mlClusterClientFactory(mockResponses), job).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), () => done() ); @@ -83,7 +90,7 @@ describe('ML - validateCardinality', () => { data_description: {}, datafeed_config: { indices: [] }, } as unknown) as CombinedJob; - validateCardinality(callWithRequestFactory(mockResponses), job).then( + validateCardinality(mlClusterClientFactory(mockResponses), job).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), () => done() ); @@ -95,7 +102,7 @@ describe('ML - validateCardinality', () => { datafeed_config: { indices: [] }, data_description: { time_field: '@timestamp' }, } as unknown) as CombinedJob; - validateCardinality(callWithRequestFactory(mockResponses), job).then( + validateCardinality(mlClusterClientFactory(mockResponses), job).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), () => done() ); @@ -110,7 +117,7 @@ describe('ML - validateCardinality', () => { }, } as unknown) as CombinedJob; - return validateCardinality(callWithRequestFactory(mockResponses), job).then((messages) => { + return validateCardinality(mlClusterClientFactory(mockResponses), job).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual([]); }); @@ -141,7 +148,7 @@ describe('ML - validateCardinality', () => { const mockCardinality = _.cloneDeep(mockResponses); mockCardinality.search.aggregations.airline_cardinality.value = cardinality; return validateCardinality( - callWithRequestFactory(mockCardinality), + mlClusterClientFactory(mockCardinality), (job as unknown) as CombinedJob ).then((messages) => { const ids = messages.map((m) => m.id); @@ -153,7 +160,7 @@ describe('ML - validateCardinality', () => { const job = getJobConfig('partition_field_name'); job.analysis_config.detectors[0].partition_field_name = '_source'; return validateCardinality( - callWithRequestFactory(mockResponses), + mlClusterClientFactory(mockResponses), (job as unknown) as CombinedJob ).then((messages) => { const ids = messages.map((m) => m.id); @@ -164,7 +171,7 @@ describe('ML - validateCardinality', () => { it(`field 'airline' aggregatable`, () => { const job = getJobConfig('partition_field_name'); return validateCardinality( - callWithRequestFactory(mockResponses), + mlClusterClientFactory(mockResponses), (job as unknown) as CombinedJob ).then((messages) => { const ids = messages.map((m) => m.id); @@ -174,7 +181,7 @@ describe('ML - validateCardinality', () => { it('field not aggregatable', () => { const job = getJobConfig('partition_field_name'); - return validateCardinality(callWithRequestFactory({}), (job as unknown) as CombinedJob).then( + return validateCardinality(mlClusterClientFactory({}), (job as unknown) as CombinedJob).then( (messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual(['field_not_aggregatable']); @@ -189,7 +196,7 @@ describe('ML - validateCardinality', () => { partition_field_name: 'airline', }); return validateCardinality( - callWithRequestFactory({}, true), + mlClusterClientFactory({}, true), (job as unknown) as CombinedJob ).then((messages) => { const ids = messages.map((m) => m.id); @@ -245,7 +252,7 @@ describe('ML - validateCardinality', () => { job.model_plot_config = { enabled: false }; const mockCardinality = _.cloneDeep(mockResponses); mockCardinality.search.aggregations.airline_cardinality.value = cardinality; - return validateCardinality(callWithRequestFactory(mockCardinality), job).then((messages) => { + return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual(['success_cardinality']); }); @@ -256,7 +263,7 @@ describe('ML - validateCardinality', () => { job.model_plot_config = { enabled: true }; const mockCardinality = _.cloneDeep(mockResponses); mockCardinality.search.aggregations.airline_cardinality.value = cardinality; - return validateCardinality(callWithRequestFactory(mockCardinality), job).then((messages) => { + return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual(['cardinality_model_plot_high']); }); @@ -267,7 +274,7 @@ describe('ML - validateCardinality', () => { job.model_plot_config = { enabled: false }; const mockCardinality = _.cloneDeep(mockResponses); mockCardinality.search.aggregations.airline_cardinality.value = cardinality; - return validateCardinality(callWithRequestFactory(mockCardinality), job).then((messages) => { + return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual(['cardinality_by_field']); }); @@ -278,7 +285,7 @@ describe('ML - validateCardinality', () => { job.model_plot_config = { enabled: true }; const mockCardinality = _.cloneDeep(mockResponses); mockCardinality.search.aggregations.airline_cardinality.value = cardinality; - return validateCardinality(callWithRequestFactory(mockCardinality), job).then((messages) => { + return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual(['cardinality_model_plot_high', 'cardinality_by_field']); }); @@ -289,7 +296,7 @@ describe('ML - validateCardinality', () => { job.model_plot_config = { enabled: true, terms: 'AAL,AAB' }; const mockCardinality = _.cloneDeep(mockResponses); mockCardinality.search.aggregations.airline_cardinality.value = cardinality; - return validateCardinality(callWithRequestFactory(mockCardinality), job).then((messages) => { + return validateCardinality(mlClusterClientFactory(mockCardinality), job).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual(['cardinality_by_field']); }); diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_cardinality.ts b/x-pack/plugins/ml/server/models/job_validation/validate_cardinality.ts index d5bc6aa20e32aa..1545c4c0062ec8 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_cardinality.ts +++ b/x-pack/plugins/ml/server/models/job_validation/validate_cardinality.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { DataVisualizer } from '../data_visualizer'; import { validateJobObject } from './validate_job_object'; @@ -43,8 +43,12 @@ type Validator = (obj: { messages: Messages; }>; -const validateFactory = (callWithRequest: LegacyAPICaller, job: CombinedJob): Validator => { - const dv = new DataVisualizer(callWithRequest); +const validateFactory = ( + mlClusterClient: ILegacyScopedClusterClient, + job: CombinedJob +): Validator => { + const { callAsCurrentUser } = mlClusterClient; + const dv = new DataVisualizer(mlClusterClient); const modelPlotConfigTerms = job?.model_plot_config?.terms ?? ''; const modelPlotConfigFieldCount = @@ -73,7 +77,7 @@ const validateFactory = (callWithRequest: LegacyAPICaller, job: CombinedJob): Va ] as string[]; // use fieldCaps endpoint to get data about whether fields are aggregatable - const fieldCaps = await callWithRequest('fieldCaps', { + const fieldCaps = await callAsCurrentUser('fieldCaps', { index: job.datafeed_config.indices.join(','), fields: uniqueFieldNames, }); @@ -150,7 +154,7 @@ const validateFactory = (callWithRequest: LegacyAPICaller, job: CombinedJob): Va }; export async function validateCardinality( - callWithRequest: LegacyAPICaller, + mlClusterClient: ILegacyScopedClusterClient, job?: CombinedJob ): Promise | never { const messages: Messages = []; @@ -170,7 +174,7 @@ export async function validateCardinality( } // validate({ type, isInvalid }) asynchronously returns an array of validation messages - const validate = validateFactory(callWithRequest, job); + const validate = validateFactory(mlClusterClient, job); const modelPlotEnabled = job.model_plot_config?.enabled ?? false; diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_influencers.test.ts b/x-pack/plugins/ml/server/models/job_validation/validate_influencers.test.ts index 594b51a773adae..39f5b86c44b7f3 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_influencers.test.ts +++ b/x-pack/plugins/ml/server/models/job_validation/validate_influencers.test.ts @@ -4,28 +4,20 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; - import { CombinedJob } from '../../../common/types/anomaly_detection_jobs'; import { validateInfluencers } from './validate_influencers'; describe('ML - validateInfluencers', () => { it('called without arguments throws an error', (done) => { - validateInfluencers( - (undefined as unknown) as LegacyAPICaller, - (undefined as unknown) as CombinedJob - ).then( + validateInfluencers((undefined as unknown) as CombinedJob).then( () => done(new Error('Promise should not resolve for this test without job argument.')), () => done() ); }); it('called with non-valid job argument #1, missing analysis_config', (done) => { - validateInfluencers( - (undefined as unknown) as LegacyAPICaller, - ({} as unknown) as CombinedJob - ).then( + validateInfluencers(({} as unknown) as CombinedJob).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), () => done() ); @@ -37,10 +29,7 @@ describe('ML - validateInfluencers', () => { datafeed_config: { indices: [] }, data_description: { time_field: '@timestamp' }, }; - validateInfluencers( - (undefined as unknown) as LegacyAPICaller, - (job as unknown) as CombinedJob - ).then( + validateInfluencers((job as unknown) as CombinedJob).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), () => done() ); @@ -52,10 +41,7 @@ describe('ML - validateInfluencers', () => { datafeed_config: { indices: [] }, data_description: { time_field: '@timestamp' }, }; - validateInfluencers( - (undefined as unknown) as LegacyAPICaller, - (job as unknown) as CombinedJob - ).then( + validateInfluencers((job as unknown) as CombinedJob).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), () => done() ); @@ -75,7 +61,7 @@ describe('ML - validateInfluencers', () => { it('success_influencer', () => { const job = getJobConfig(['airline']); - return validateInfluencers((undefined as unknown) as LegacyAPICaller, job).then((messages) => { + return validateInfluencers(job).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual(['success_influencers']); }); @@ -93,7 +79,7 @@ describe('ML - validateInfluencers', () => { ] ); - return validateInfluencers((undefined as unknown) as LegacyAPICaller, job).then((messages) => { + return validateInfluencers(job).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual([]); }); @@ -101,7 +87,7 @@ describe('ML - validateInfluencers', () => { it('influencer_low', () => { const job = getJobConfig(); - return validateInfluencers((undefined as unknown) as LegacyAPICaller, job).then((messages) => { + return validateInfluencers(job).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual(['influencer_low']); }); @@ -109,7 +95,7 @@ describe('ML - validateInfluencers', () => { it('influencer_high', () => { const job = getJobConfig(['i1', 'i2', 'i3', 'i4']); - return validateInfluencers((undefined as unknown) as LegacyAPICaller, job).then((messages) => { + return validateInfluencers(job).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual(['influencer_high']); }); @@ -127,7 +113,7 @@ describe('ML - validateInfluencers', () => { }, ] ); - return validateInfluencers((undefined as unknown) as LegacyAPICaller, job).then((messages) => { + return validateInfluencers(job).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toStrictEqual(['influencer_low_suggestion']); }); @@ -157,7 +143,7 @@ describe('ML - validateInfluencers', () => { }, ] ); - return validateInfluencers((undefined as unknown) as LegacyAPICaller, job).then((messages) => { + return validateInfluencers(job).then((messages) => { expect(messages).toStrictEqual([ { id: 'influencer_low_suggestions', diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_influencers.ts b/x-pack/plugins/ml/server/models/job_validation/validate_influencers.ts index 1a77bfaf608115..72995619f6eca3 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_influencers.ts +++ b/x-pack/plugins/ml/server/models/job_validation/validate_influencers.ts @@ -4,8 +4,6 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; - import { CombinedJob } from '../../../common/types/anomaly_detection_jobs'; import { validateJobObject } from './validate_job_object'; @@ -14,7 +12,7 @@ const INFLUENCER_LOW_THRESHOLD = 0; const INFLUENCER_HIGH_THRESHOLD = 4; const DETECTOR_FIELD_NAMES_THRESHOLD = 1; -export async function validateInfluencers(callWithRequest: LegacyAPICaller, job: CombinedJob) { +export async function validateInfluencers(job: CombinedJob) { validateJobObject(job); const messages = []; diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_model_memory_limit.test.ts b/x-pack/plugins/ml/server/models/job_validation/validate_model_memory_limit.test.ts index d9be8e282e9235..61af960847f7f0 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_model_memory_limit.test.ts +++ b/x-pack/plugins/ml/server/models/job_validation/validate_model_memory_limit.test.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { CombinedJob, Detector } from '../../../common/types/anomaly_detection_jobs'; import { ModelMemoryEstimate } from '../calculate_model_memory_limit/calculate_model_memory_limit'; import { validateModelMemoryLimit } from './validate_model_memory_limit'; @@ -73,15 +73,15 @@ describe('ML - validateModelMemoryLimit', () => { 'ml.estimateModelMemory'?: ModelMemoryEstimate; } - // mock callWithRequest + // mock callAsCurrentUser // used in three places: // - to retrieve the info endpoint // - to search for cardinality of split field // - to retrieve field capabilities used in search for split field cardinality - const getMockCallWithRequest = ({ + const getMockMlClusterClient = ({ 'ml.estimateModelMemory': estimateModelMemory, - }: MockAPICallResponse = {}) => - ((call: string) => { + }: MockAPICallResponse = {}): ILegacyScopedClusterClient => { + const callAs = (call: string) => { if (typeof call === undefined) { return Promise.reject(); } @@ -97,7 +97,13 @@ describe('ML - validateModelMemoryLimit', () => { response = estimateModelMemory || modelMemoryEstimateResponse; } return Promise.resolve(response); - }) as LegacyAPICaller; + }; + + return { + callAsCurrentUser: callAs, + callAsInternalUser: callAs, + }; + }; function getJobConfig(influencers: string[] = [], detectors: Detector[] = []) { return ({ @@ -129,7 +135,7 @@ describe('ML - validateModelMemoryLimit', () => { const job = getJobConfig(); const duration = undefined; - return validateModelMemoryLimit(getMockCallWithRequest(), job, duration).then((messages) => { + return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toEqual([]); }); @@ -138,10 +144,10 @@ describe('ML - validateModelMemoryLimit', () => { it('Called with no duration or split and mml above limit', () => { const job = getJobConfig(); const duration = undefined; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = '31mb'; - return validateModelMemoryLimit(getMockCallWithRequest(), job, duration).then((messages) => { + return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toEqual(['mml_greater_than_max_mml']); }); @@ -151,11 +157,11 @@ describe('ML - validateModelMemoryLimit', () => { const dtrs = createDetectors(10); const job = getJobConfig(['instance'], dtrs); const duration = { start: 0, end: 1 }; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = '20mb'; return validateModelMemoryLimit( - getMockCallWithRequest({ 'ml.estimateModelMemory': { model_memory_estimate: '66mb' } }), + getMockMlClusterClient({ 'ml.estimateModelMemory': { model_memory_estimate: '66mb' } }), job, duration ).then((messages) => { @@ -168,11 +174,11 @@ describe('ML - validateModelMemoryLimit', () => { const dtrs = createDetectors(2); const job = getJobConfig(['instance'], dtrs); const duration = { start: 0, end: 1 }; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = '30mb'; return validateModelMemoryLimit( - getMockCallWithRequest({ 'ml.estimateModelMemory': { model_memory_estimate: '24mb' } }), + getMockMlClusterClient({ 'ml.estimateModelMemory': { model_memory_estimate: '24mb' } }), job, duration ).then((messages) => { @@ -185,11 +191,11 @@ describe('ML - validateModelMemoryLimit', () => { const dtrs = createDetectors(2); const job = getJobConfig(['instance'], dtrs); const duration = { start: 0, end: 1 }; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = '10mb'; return validateModelMemoryLimit( - getMockCallWithRequest({ 'ml.estimateModelMemory': { model_memory_estimate: '22mb' } }), + getMockMlClusterClient({ 'ml.estimateModelMemory': { model_memory_estimate: '22mb' } }), job, duration ).then((messages) => { @@ -203,10 +209,10 @@ describe('ML - validateModelMemoryLimit', () => { const job = getJobConfig(['instance'], dtrs); const duration = { start: 0, end: 1 }; delete mlInfoResponse.limits.max_model_memory_limit; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = '10mb'; - return validateModelMemoryLimit(getMockCallWithRequest(), job, duration).then((messages) => { + return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toEqual(['half_estimated_mml_greater_than_mml']); }); @@ -215,10 +221,10 @@ describe('ML - validateModelMemoryLimit', () => { it('Called with no duration or split and mml above limit, no max setting', () => { const job = getJobConfig(); const duration = undefined; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = '31mb'; - return validateModelMemoryLimit(getMockCallWithRequest(), job, duration).then((messages) => { + return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toEqual([]); }); @@ -227,10 +233,10 @@ describe('ML - validateModelMemoryLimit', () => { it('Called with no duration or split and mml above limit, no max setting, above effective max mml', () => { const job = getJobConfig(); const duration = undefined; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = '41mb'; - return validateModelMemoryLimit(getMockCallWithRequest(), job, duration).then((messages) => { + return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toEqual(['mml_greater_than_effective_max_mml']); }); @@ -240,11 +246,11 @@ describe('ML - validateModelMemoryLimit', () => { const dtrs = createDetectors(1); const job = getJobConfig(['instance'], dtrs); const duration = { start: 0, end: 1 }; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = '20mb'; return validateModelMemoryLimit( - getMockCallWithRequest({ 'ml.estimateModelMemory': { model_memory_estimate: '19mb' } }), + getMockMlClusterClient({ 'ml.estimateModelMemory': { model_memory_estimate: '19mb' } }), job, duration ).then((messages) => { @@ -257,10 +263,10 @@ describe('ML - validateModelMemoryLimit', () => { const dtrs = createDetectors(1); const job = getJobConfig(['instance'], dtrs); const duration = { start: 0, end: 1 }; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = '0mb'; - return validateModelMemoryLimit(getMockCallWithRequest(), job, duration).then((messages) => { + return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toEqual(['mml_value_invalid']); }); @@ -270,10 +276,10 @@ describe('ML - validateModelMemoryLimit', () => { const dtrs = createDetectors(1); const job = getJobConfig(['instance'], dtrs); const duration = { start: 0, end: 1 }; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = '10mbananas'; - return validateModelMemoryLimit(getMockCallWithRequest(), job, duration).then((messages) => { + return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toEqual(['mml_value_invalid']); }); @@ -283,10 +289,10 @@ describe('ML - validateModelMemoryLimit', () => { const dtrs = createDetectors(1); const job = getJobConfig(['instance'], dtrs); const duration = { start: 0, end: 1 }; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = '10'; - return validateModelMemoryLimit(getMockCallWithRequest(), job, duration).then((messages) => { + return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toEqual(['mml_value_invalid']); }); @@ -296,10 +302,10 @@ describe('ML - validateModelMemoryLimit', () => { const dtrs = createDetectors(1); const job = getJobConfig(['instance'], dtrs); const duration = { start: 0, end: 1 }; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = 'mb'; - return validateModelMemoryLimit(getMockCallWithRequest(), job, duration).then((messages) => { + return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toEqual(['mml_value_invalid']); }); @@ -309,10 +315,10 @@ describe('ML - validateModelMemoryLimit', () => { const dtrs = createDetectors(1); const job = getJobConfig(['instance'], dtrs); const duration = { start: 0, end: 1 }; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = 'asdf'; - return validateModelMemoryLimit(getMockCallWithRequest(), job, duration).then((messages) => { + return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toEqual(['mml_value_invalid']); }); @@ -322,10 +328,10 @@ describe('ML - validateModelMemoryLimit', () => { const dtrs = createDetectors(1); const job = getJobConfig(['instance'], dtrs); const duration = { start: 0, end: 1 }; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = '1023KB'; - return validateModelMemoryLimit(getMockCallWithRequest(), job, duration).then((messages) => { + return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toEqual(['mml_value_invalid']); }); @@ -335,10 +341,10 @@ describe('ML - validateModelMemoryLimit', () => { const dtrs = createDetectors(1); const job = getJobConfig(['instance'], dtrs); const duration = { start: 0, end: 1 }; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = '1024KB'; - return validateModelMemoryLimit(getMockCallWithRequest(), job, duration).then((messages) => { + return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toEqual(['half_estimated_mml_greater_than_mml']); }); @@ -348,10 +354,10 @@ describe('ML - validateModelMemoryLimit', () => { const dtrs = createDetectors(1); const job = getJobConfig(['instance'], dtrs); const duration = { start: 0, end: 1 }; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = '6MB'; - return validateModelMemoryLimit(getMockCallWithRequest(), job, duration).then((messages) => { + return validateModelMemoryLimit(getMockMlClusterClient(), job, duration).then((messages) => { const ids = messages.map((m) => m.id); expect(ids).toEqual(['half_estimated_mml_greater_than_mml']); }); @@ -361,11 +367,11 @@ describe('ML - validateModelMemoryLimit', () => { const dtrs = createDetectors(1); const job = getJobConfig(['instance'], dtrs); const duration = { start: 0, end: 1 }; - // @ts-ignore + // @ts-expect-error job.analysis_limits.model_memory_limit = '20MB'; return validateModelMemoryLimit( - getMockCallWithRequest({ 'ml.estimateModelMemory': { model_memory_estimate: '20mb' } }), + getMockMlClusterClient({ 'ml.estimateModelMemory': { model_memory_estimate: '20mb' } }), job, duration ).then((messages) => { diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_model_memory_limit.ts b/x-pack/plugins/ml/server/models/job_validation/validate_model_memory_limit.ts index 2c7d1cc23bbaad..728342294c4247 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_model_memory_limit.ts +++ b/x-pack/plugins/ml/server/models/job_validation/validate_model_memory_limit.ts @@ -5,7 +5,7 @@ */ import numeral from '@elastic/numeral'; -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { CombinedJob } from '../../../common/types/anomaly_detection_jobs'; import { validateJobObject } from './validate_job_object'; import { calculateModelMemoryLimitProvider } from '../calculate_model_memory_limit'; @@ -16,10 +16,11 @@ import { MlInfoResponse } from '../../../common/types/ml_server_info'; const MODEL_MEMORY_LIMIT_MINIMUM_BYTES = 1048576; export async function validateModelMemoryLimit( - callWithRequest: LegacyAPICaller, + mlClusterClient: ILegacyScopedClusterClient, job: CombinedJob, duration?: { start?: number; end?: number } ) { + const { callAsInternalUser } = mlClusterClient; validateJobObject(job); // retrieve the model memory limit specified by the user in the job config. @@ -51,12 +52,12 @@ export async function validateModelMemoryLimit( // retrieve the max_model_memory_limit value from the server // this will be unset unless the user has set this on their cluster - const info = await callWithRequest('ml.info'); + const info = (await callAsInternalUser('ml.info')) as MlInfoResponse; const maxModelMemoryLimit = info.limits.max_model_memory_limit?.toUpperCase(); const effectiveMaxModelMemoryLimit = info.limits.effective_max_model_memory_limit?.toUpperCase(); if (runCalcModelMemoryTest) { - const { modelMemoryLimit } = await calculateModelMemoryLimitProvider(callWithRequest)( + const { modelMemoryLimit } = await calculateModelMemoryLimitProvider(mlClusterClient)( job.analysis_config, job.datafeed_config.indices.join(','), job.datafeed_config.query, @@ -65,14 +66,14 @@ export async function validateModelMemoryLimit( duration!.end as number, true ); - // @ts-ignore + // @ts-expect-error const mmlEstimateBytes: number = numeral(modelMemoryLimit).value(); let runEstimateGreaterThenMml = true; // if max_model_memory_limit has been set, // make sure the estimated value is not greater than it. if (typeof maxModelMemoryLimit !== 'undefined') { - // @ts-ignore + // @ts-expect-error const maxMmlBytes: number = numeral(maxModelMemoryLimit).value(); if (mmlEstimateBytes > maxMmlBytes) { runEstimateGreaterThenMml = false; @@ -89,7 +90,7 @@ export async function validateModelMemoryLimit( // do not run this if we've already found that it's larger than // the max mml if (runEstimateGreaterThenMml && mml !== null) { - // @ts-ignore + // @ts-expect-error const mmlBytes: number = numeral(mml).value(); if (mmlBytes < MODEL_MEMORY_LIMIT_MINIMUM_BYTES) { messages.push({ @@ -116,11 +117,11 @@ export async function validateModelMemoryLimit( // make sure the user defined MML is not greater than it if (mml !== null) { let maxMmlExceeded = false; - // @ts-ignore + // @ts-expect-error const mmlBytes = numeral(mml).value(); if (maxModelMemoryLimit !== undefined) { - // @ts-ignore + // @ts-expect-error const maxMmlBytes = numeral(maxModelMemoryLimit).value(); if (mmlBytes > maxMmlBytes) { maxMmlExceeded = true; @@ -133,7 +134,7 @@ export async function validateModelMemoryLimit( } if (effectiveMaxModelMemoryLimit !== undefined && maxMmlExceeded === false) { - // @ts-ignore + // @ts-expect-error const effectiveMaxMmlBytes = numeral(effectiveMaxModelMemoryLimit).value(); if (mmlBytes > effectiveMaxMmlBytes) { messages.push({ diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_time_range.test.ts b/x-pack/plugins/ml/server/models/job_validation/validate_time_range.test.ts index d4e1f0cc379fb9..f74d8a26ef3704 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_time_range.test.ts +++ b/x-pack/plugins/ml/server/models/job_validation/validate_time_range.test.ts @@ -6,7 +6,7 @@ import _ from 'lodash'; -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { CombinedJob } from '../../../common/types/anomaly_detection_jobs'; @@ -21,12 +21,16 @@ const mockSearchResponse = { search: mockTimeRange, }; -const callWithRequestFactory = (resp: any): LegacyAPICaller => { - return (path: string) => { +const mlClusterClientFactory = (resp: any): ILegacyScopedClusterClient => { + const callAs = (path: string) => { return new Promise((resolve) => { resolve(resp[path]); }) as Promise; }; + return { + callAsCurrentUser: callAs, + callAsInternalUser: callAs, + }; }; function getMinimalValidJob() { @@ -46,7 +50,7 @@ function getMinimalValidJob() { describe('ML - isValidTimeField', () => { it('called without job config argument triggers Promise rejection', (done) => { isValidTimeField( - callWithRequestFactory(mockSearchResponse), + mlClusterClientFactory(mockSearchResponse), (undefined as unknown) as CombinedJob ).then( () => done(new Error('Promise should not resolve for this test without job argument.')), @@ -55,7 +59,7 @@ describe('ML - isValidTimeField', () => { }); it('time_field `@timestamp`', (done) => { - isValidTimeField(callWithRequestFactory(mockSearchResponse), getMinimalValidJob()).then( + isValidTimeField(mlClusterClientFactory(mockSearchResponse), getMinimalValidJob()).then( (valid) => { expect(valid).toBe(true); done(); @@ -74,7 +78,7 @@ describe('ML - isValidTimeField', () => { }; isValidTimeField( - callWithRequestFactory(mockSearchResponseNestedDate), + mlClusterClientFactory(mockSearchResponseNestedDate), mockJobConfigNestedDate ).then( (valid) => { @@ -89,7 +93,7 @@ describe('ML - isValidTimeField', () => { describe('ML - validateTimeRange', () => { it('called without arguments', (done) => { validateTimeRange( - callWithRequestFactory(mockSearchResponse), + mlClusterClientFactory(mockSearchResponse), (undefined as unknown) as CombinedJob ).then( () => done(new Error('Promise should not resolve for this test without job argument.')), @@ -98,7 +102,7 @@ describe('ML - validateTimeRange', () => { }); it('called with non-valid job argument #2, missing datafeed_config', (done) => { - validateTimeRange(callWithRequestFactory(mockSearchResponse), ({ + validateTimeRange(mlClusterClientFactory(mockSearchResponse), ({ analysis_config: {}, } as unknown) as CombinedJob).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), @@ -109,7 +113,7 @@ describe('ML - validateTimeRange', () => { it('called with non-valid job argument #3, missing datafeed_config.indices', (done) => { const job = { analysis_config: {}, datafeed_config: {} }; validateTimeRange( - callWithRequestFactory(mockSearchResponse), + mlClusterClientFactory(mockSearchResponse), (job as unknown) as CombinedJob ).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), @@ -120,7 +124,7 @@ describe('ML - validateTimeRange', () => { it('called with non-valid job argument #4, missing data_description', (done) => { const job = { analysis_config: {}, datafeed_config: { indices: [] } }; validateTimeRange( - callWithRequestFactory(mockSearchResponse), + mlClusterClientFactory(mockSearchResponse), (job as unknown) as CombinedJob ).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), @@ -131,7 +135,7 @@ describe('ML - validateTimeRange', () => { it('called with non-valid job argument #5, missing data_description.time_field', (done) => { const job = { analysis_config: {}, data_description: {}, datafeed_config: { indices: [] } }; validateTimeRange( - callWithRequestFactory(mockSearchResponse), + mlClusterClientFactory(mockSearchResponse), (job as unknown) as CombinedJob ).then( () => done(new Error('Promise should not resolve for this test without valid job argument.')), @@ -144,7 +148,7 @@ describe('ML - validateTimeRange', () => { mockSearchResponseInvalid.fieldCaps = undefined; const duration = { start: 0, end: 1 }; return validateTimeRange( - callWithRequestFactory(mockSearchResponseInvalid), + mlClusterClientFactory(mockSearchResponseInvalid), getMinimalValidJob(), duration ).then((messages) => { @@ -158,7 +162,7 @@ describe('ML - validateTimeRange', () => { jobShortTimeRange.analysis_config.bucket_span = '1s'; const duration = { start: 0, end: 1 }; return validateTimeRange( - callWithRequestFactory(mockSearchResponse), + mlClusterClientFactory(mockSearchResponse), jobShortTimeRange, duration ).then((messages) => { @@ -170,7 +174,7 @@ describe('ML - validateTimeRange', () => { it('too short time range, 25x bucket span is more than 2h', () => { const duration = { start: 0, end: 1 }; return validateTimeRange( - callWithRequestFactory(mockSearchResponse), + mlClusterClientFactory(mockSearchResponse), getMinimalValidJob(), duration ).then((messages) => { @@ -182,7 +186,7 @@ describe('ML - validateTimeRange', () => { it('time range between 2h and 25x bucket span', () => { const duration = { start: 0, end: 8000000 }; return validateTimeRange( - callWithRequestFactory(mockSearchResponse), + mlClusterClientFactory(mockSearchResponse), getMinimalValidJob(), duration ).then((messages) => { @@ -194,7 +198,7 @@ describe('ML - validateTimeRange', () => { it('valid time range', () => { const duration = { start: 0, end: 100000000 }; return validateTimeRange( - callWithRequestFactory(mockSearchResponse), + mlClusterClientFactory(mockSearchResponse), getMinimalValidJob(), duration ).then((messages) => { @@ -206,7 +210,7 @@ describe('ML - validateTimeRange', () => { it('invalid time range, start time is before the UNIX epoch', () => { const duration = { start: -1, end: 100000000 }; return validateTimeRange( - callWithRequestFactory(mockSearchResponse), + mlClusterClientFactory(mockSearchResponse), getMinimalValidJob(), duration ).then((messages) => { diff --git a/x-pack/plugins/ml/server/models/job_validation/validate_time_range.ts b/x-pack/plugins/ml/server/models/job_validation/validate_time_range.ts index f47938e059ec04..a94ceffa902732 100644 --- a/x-pack/plugins/ml/server/models/job_validation/validate_time_range.ts +++ b/x-pack/plugins/ml/server/models/job_validation/validate_time_range.ts @@ -4,11 +4,10 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { ES_FIELD_TYPES } from '../../../../../../src/plugins/data/server'; import { parseInterval } from '../../../common/util/parse_interval'; import { CombinedJob } from '../../../common/types/anomaly_detection_jobs'; -// @ts-ignore import { validateJobObject } from './validate_job_object'; interface ValidateTimeRangeMessage { @@ -27,7 +26,10 @@ const BUCKET_SPAN_COMPARE_FACTOR = 25; const MIN_TIME_SPAN_MS = 7200000; const MIN_TIME_SPAN_READABLE = '2 hours'; -export async function isValidTimeField(callAsCurrentUser: LegacyAPICaller, job: CombinedJob) { +export async function isValidTimeField( + { callAsCurrentUser }: ILegacyScopedClusterClient, + job: CombinedJob +) { const index = job.datafeed_config.indices.join(','); const timeField = job.data_description.time_field; @@ -45,7 +47,7 @@ export async function isValidTimeField(callAsCurrentUser: LegacyAPICaller, job: } export async function validateTimeRange( - callAsCurrentUser: LegacyAPICaller, + mlClientCluster: ILegacyScopedClusterClient, job: CombinedJob, timeRange?: Partial ) { @@ -54,7 +56,7 @@ export async function validateTimeRange( validateJobObject(job); // check if time_field is a date type - if (!(await isValidTimeField(callAsCurrentUser, job))) { + if (!(await isValidTimeField(mlClientCluster, job))) { messages.push({ id: 'time_field_invalid', timeField: job.data_description.time_field, diff --git a/x-pack/plugins/ml/server/models/results_service/get_partition_fields_values.ts b/x-pack/plugins/ml/server/models/results_service/get_partition_fields_values.ts index 99eeaacc8de9cd..d7403c45f1be2e 100644 --- a/x-pack/plugins/ml/server/models/results_service/get_partition_fields_values.ts +++ b/x-pack/plugins/ml/server/models/results_service/get_partition_fields_values.ts @@ -5,8 +5,8 @@ */ import Boom from 'boom'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { ML_RESULTS_INDEX_PATTERN } from '../../../common/constants/index_patterns'; -import { callWithRequestType } from '../../../common/types/kibana'; import { CriteriaField } from './results_service'; const PARTITION_FIELDS = ['partition_field', 'over_field', 'by_field'] as const; @@ -76,7 +76,10 @@ function getFieldObject(fieldType: PartitionFieldsType, aggs: any) { : {}; } -export const getPartitionFieldsValuesFactory = (callWithRequest: callWithRequestType) => +export const getPartitionFieldsValuesFactory = ({ + callAsCurrentUser, + callAsInternalUser, +}: ILegacyScopedClusterClient) => /** * Gets the record of partition fields with possible values that fit the provided queries. * @param jobId - Job ID @@ -92,7 +95,7 @@ export const getPartitionFieldsValuesFactory = (callWithRequest: callWithRequest earliestMs: number, latestMs: number ) { - const jobsResponse = await callWithRequest('ml.jobs', { jobId: [jobId] }); + const jobsResponse = await callAsInternalUser('ml.jobs', { jobId: [jobId] }); if (jobsResponse.count === 0 || jobsResponse.jobs === undefined) { throw Boom.notFound(`Job with the id "${jobId}" not found`); } @@ -101,7 +104,7 @@ export const getPartitionFieldsValuesFactory = (callWithRequest: callWithRequest const isModelPlotEnabled = job?.model_plot_config?.enabled; - const resp = await callWithRequest('search', { + const resp = await callAsCurrentUser('search', { index: ML_RESULTS_INDEX_PATTERN, size: 0, body: { diff --git a/x-pack/plugins/ml/server/models/results_service/results_service.ts b/x-pack/plugins/ml/server/models/results_service/results_service.ts index 8255395000f47d..8e904143263d77 100644 --- a/x-pack/plugins/ml/server/models/results_service/results_service.ts +++ b/x-pack/plugins/ml/server/models/results_service/results_service.ts @@ -7,7 +7,7 @@ import _ from 'lodash'; import moment from 'moment'; import { SearchResponse } from 'elasticsearch'; -import { LegacyAPICaller } from 'kibana/server'; +import { ILegacyScopedClusterClient } from 'kibana/server'; import { buildAnomalyTableItems } from './build_anomaly_table_items'; import { ML_RESULTS_INDEX_PATTERN } from '../../../common/constants/index_patterns'; import { ANOMALIES_TABLE_DEFAULT_QUERY_SIZE } from '../../../common/constants/search'; @@ -30,7 +30,8 @@ interface Influencer { fieldValue: any; } -export function resultsServiceProvider(callAsCurrentUser: LegacyAPICaller) { +export function resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClient) { + const { callAsCurrentUser } = mlClusterClient; // Obtains data for the anomalies table, aggregating anomalies by day or hour as requested. // Return an Object with properties 'anomalies' and 'interval' (interval used to aggregate anomalies, // one of day, hour or second. Note 'auto' can be provided as the aggregationInterval in the request, @@ -435,6 +436,6 @@ export function resultsServiceProvider(callAsCurrentUser: LegacyAPICaller) { getCategoryExamples, getLatestBucketTimestampByJob, getMaxAnomalyScore, - getPartitionFieldsValues: getPartitionFieldsValuesFactory(callAsCurrentUser), + getPartitionFieldsValues: getPartitionFieldsValuesFactory(mlClusterClient), }; } diff --git a/x-pack/plugins/ml/server/plugin.ts b/x-pack/plugins/ml/server/plugin.ts index 83b14d60fb4160..812db744d1bdaa 100644 --- a/x-pack/plugins/ml/server/plugin.ts +++ b/x-pack/plugins/ml/server/plugin.ts @@ -75,7 +75,7 @@ export class MlServerPlugin implements Plugin { try { - const { getAnnotations } = annotationServiceProvider( - context.ml!.mlClient.callAsCurrentUser - ); + const { getAnnotations } = annotationServiceProvider(context.ml!.mlClient); const resp = await getAnnotations(request.body); return response.ok({ @@ -96,19 +94,17 @@ export function annotationRoutes( mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const annotationsFeatureAvailable = await isAnnotationsFeatureAvailable( - context.ml!.mlClient.callAsCurrentUser + context.ml!.mlClient ); if (annotationsFeatureAvailable === false) { throw getAnnotationsFeatureUnavailableErrorMessage(); } - const { indexAnnotation } = annotationServiceProvider( - context.ml!.mlClient.callAsCurrentUser - ); + const { indexAnnotation } = annotationServiceProvider(context.ml!.mlClient); const currentUser = securityPlugin !== undefined ? securityPlugin.authc.getCurrentUser(request) : {}; - // @ts-ignore username doesn't exist on {} + // @ts-expect-error username doesn't exist on {} const username = currentUser?.username ?? ANNOTATION_USER_UNKNOWN; const resp = await indexAnnotation(request.body, username); @@ -143,16 +139,14 @@ export function annotationRoutes( mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const annotationsFeatureAvailable = await isAnnotationsFeatureAvailable( - context.ml!.mlClient.callAsCurrentUser + context.ml!.mlClient ); if (annotationsFeatureAvailable === false) { throw getAnnotationsFeatureUnavailableErrorMessage(); } const annotationId = request.params.annotationId; - const { deleteAnnotation } = annotationServiceProvider( - context.ml!.mlClient.callAsCurrentUser - ); + const { deleteAnnotation } = annotationServiceProvider(context.ml!.mlClient); const resp = await deleteAnnotation(annotationId); return response.ok({ diff --git a/x-pack/plugins/ml/server/routes/anomaly_detectors.ts b/x-pack/plugins/ml/server/routes/anomaly_detectors.ts index 78e05c9a6d07b5..8a59c174eb8e74 100644 --- a/x-pack/plugins/ml/server/routes/anomaly_detectors.ts +++ b/x-pack/plugins/ml/server/routes/anomaly_detectors.ts @@ -45,7 +45,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const results = await context.ml!.mlClient.callAsCurrentUser('ml.jobs'); + const results = await context.ml!.mlClient.callAsInternalUser('ml.jobs'); return response.ok({ body: results, }); @@ -77,7 +77,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const { jobId } = request.params; - const results = await context.ml!.mlClient.callAsCurrentUser('ml.jobs', { jobId }); + const results = await context.ml!.mlClient.callAsInternalUser('ml.jobs', { jobId }); return response.ok({ body: results, }); @@ -107,7 +107,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const results = await context.ml!.mlClient.callAsCurrentUser('ml.jobStats'); + const results = await context.ml!.mlClient.callAsInternalUser('ml.jobStats'); return response.ok({ body: results, }); @@ -139,7 +139,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const { jobId } = request.params; - const results = await context.ml!.mlClient.callAsCurrentUser('ml.jobStats', { jobId }); + const results = await context.ml!.mlClient.callAsInternalUser('ml.jobStats', { jobId }); return response.ok({ body: results, }); @@ -175,11 +175,9 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const { jobId } = request.params; - const body = request.body; - - const results = await context.ml!.mlClient.callAsCurrentUser('ml.addJob', { + const results = await context.ml!.mlClient.callAsInternalUser('ml.addJob', { jobId, - body, + body: request.body, }); return response.ok({ body: results, @@ -214,7 +212,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const { jobId } = request.params; - const results = await context.ml!.mlClient.callAsCurrentUser('ml.updateJob', { + const results = await context.ml!.mlClient.callAsInternalUser('ml.updateJob', { jobId, body: request.body, }); @@ -249,7 +247,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const { jobId } = request.params; - const results = await context.ml!.mlClient.callAsCurrentUser('ml.openJob', { + const results = await context.ml!.mlClient.callAsInternalUser('ml.openJob', { jobId, }); return response.ok({ @@ -289,7 +287,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { if (force !== undefined) { options.force = force; } - const results = await context.ml!.mlClient.callAsCurrentUser('ml.closeJob', options); + const results = await context.ml!.mlClient.callAsInternalUser('ml.closeJob', options); return response.ok({ body: results, }); @@ -327,7 +325,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { if (force !== undefined) { options.force = force; } - const results = await context.ml!.mlClient.callAsCurrentUser('ml.deleteJob', options); + const results = await context.ml!.mlClient.callAsInternalUser('ml.deleteJob', options); return response.ok({ body: results, }); @@ -356,7 +354,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const results = await context.ml!.mlClient.callAsCurrentUser('ml.validateDetector', { + const results = await context.ml!.mlClient.callAsInternalUser('ml.validateDetector', { body: request.body, }); return response.ok({ @@ -393,7 +391,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { try { const jobId = request.params.jobId; const duration = request.body.duration; - const results = await context.ml!.mlClient.callAsCurrentUser('ml.forecast', { + const results = await context.ml!.mlClient.callAsInternalUser('ml.forecast', { jobId, duration, }); @@ -432,7 +430,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const results = await context.ml!.mlClient.callAsCurrentUser('ml.records', { + const results = await context.ml!.mlClient.callAsInternalUser('ml.records', { jobId: request.params.jobId, body: request.body, }); @@ -471,7 +469,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const results = await context.ml!.mlClient.callAsCurrentUser('ml.buckets', { + const results = await context.ml!.mlClient.callAsInternalUser('ml.buckets', { jobId: request.params.jobId, timestamp: request.params.timestamp, body: request.body, @@ -511,7 +509,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const results = await context.ml!.mlClient.callAsCurrentUser('ml.overallBuckets', { + const results = await context.ml!.mlClient.callAsInternalUser('ml.overallBuckets', { jobId: request.params.jobId, top_n: request.body.topN, bucket_span: request.body.bucketSpan, @@ -548,7 +546,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const results = await context.ml!.mlClient.callAsCurrentUser('ml.categories', { + const results = await context.ml!.mlClient.callAsInternalUser('ml.categories', { jobId: request.params.jobId, categoryId: request.params.categoryId, }); @@ -582,7 +580,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const results = await context.ml!.mlClient.callAsCurrentUser('ml.modelSnapshots', { + const results = await context.ml!.mlClient.callAsInternalUser('ml.modelSnapshots', { jobId: request.params.jobId, }); return response.ok({ @@ -615,7 +613,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const results = await context.ml!.mlClient.callAsCurrentUser('ml.modelSnapshots', { + const results = await context.ml!.mlClient.callAsInternalUser('ml.modelSnapshots', { jobId: request.params.jobId, snapshotId: request.params.snapshotId, }); @@ -651,7 +649,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const results = await context.ml!.mlClient.callAsCurrentUser('ml.updateModelSnapshot', { + const results = await context.ml!.mlClient.callAsInternalUser('ml.updateModelSnapshot', { jobId: request.params.jobId, snapshotId: request.params.snapshotId, body: request.body, @@ -686,7 +684,7 @@ export function jobRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const results = await context.ml!.mlClient.callAsCurrentUser('ml.deleteModelSnapshot', { + const results = await context.ml!.mlClient.callAsInternalUser('ml.deleteModelSnapshot', { jobId: request.params.jobId, snapshotId: request.params.snapshotId, }); diff --git a/x-pack/plugins/ml/server/routes/calendars.ts b/x-pack/plugins/ml/server/routes/calendars.ts index 9c80651a13999e..f5d129abd515e4 100644 --- a/x-pack/plugins/ml/server/routes/calendars.ts +++ b/x-pack/plugins/ml/server/routes/calendars.ts @@ -11,32 +11,32 @@ import { calendarSchema, calendarIdSchema, calendarIdsSchema } from './schemas/c import { CalendarManager, Calendar, FormCalendar } from '../models/calendar'; function getAllCalendars(context: RequestHandlerContext) { - const cal = new CalendarManager(context.ml!.mlClient.callAsCurrentUser); + const cal = new CalendarManager(context.ml!.mlClient); return cal.getAllCalendars(); } function getCalendar(context: RequestHandlerContext, calendarId: string) { - const cal = new CalendarManager(context.ml!.mlClient.callAsCurrentUser); + const cal = new CalendarManager(context.ml!.mlClient); return cal.getCalendar(calendarId); } function newCalendar(context: RequestHandlerContext, calendar: FormCalendar) { - const cal = new CalendarManager(context.ml!.mlClient.callAsCurrentUser); + const cal = new CalendarManager(context.ml!.mlClient); return cal.newCalendar(calendar); } function updateCalendar(context: RequestHandlerContext, calendarId: string, calendar: Calendar) { - const cal = new CalendarManager(context.ml!.mlClient.callAsCurrentUser); + const cal = new CalendarManager(context.ml!.mlClient); return cal.updateCalendar(calendarId, calendar); } function deleteCalendar(context: RequestHandlerContext, calendarId: string) { - const cal = new CalendarManager(context.ml!.mlClient.callAsCurrentUser); + const cal = new CalendarManager(context.ml!.mlClient); return cal.deleteCalendar(calendarId); } function getCalendarsByIds(context: RequestHandlerContext, calendarIds: string) { - const cal = new CalendarManager(context.ml!.mlClient.callAsCurrentUser); + const cal = new CalendarManager(context.ml!.mlClient); return cal.getCalendarsByIds(calendarIds); } diff --git a/x-pack/plugins/ml/server/routes/data_frame_analytics.ts b/x-pack/plugins/ml/server/routes/data_frame_analytics.ts index 24be23332e4cf8..3e6c6f5f6a2f82 100644 --- a/x-pack/plugins/ml/server/routes/data_frame_analytics.ts +++ b/x-pack/plugins/ml/server/routes/data_frame_analytics.ts @@ -19,6 +19,7 @@ import { } from './schemas/data_analytics_schema'; import { IndexPatternHandler } from '../models/data_frame_analytics/index_patterns'; import { DeleteDataFrameAnalyticsWithIndexStatus } from '../../common/types/data_frame_analytics'; +import { getAuthorizationHeader } from '../lib/request_authorization'; function getIndexPatternId(context: RequestHandlerContext, patternName: string) { const iph = new IndexPatternHandler(context.core.savedObjects.client); @@ -77,7 +78,7 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const results = await context.ml!.mlClient.callAsCurrentUser('ml.getDataFrameAnalytics'); + const results = await context.ml!.mlClient.callAsInternalUser('ml.getDataFrameAnalytics'); return response.ok({ body: results, }); @@ -109,7 +110,7 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const { analyticsId } = request.params; - const results = await context.ml!.mlClient.callAsCurrentUser('ml.getDataFrameAnalytics', { + const results = await context.ml!.mlClient.callAsInternalUser('ml.getDataFrameAnalytics', { analyticsId, }); return response.ok({ @@ -138,7 +139,7 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const results = await context.ml!.mlClient.callAsCurrentUser( + const results = await context.ml!.mlClient.callAsInternalUser( 'ml.getDataFrameAnalyticsStats' ); return response.ok({ @@ -172,7 +173,7 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const { analyticsId } = request.params; - const results = await context.ml!.mlClient.callAsCurrentUser( + const results = await context.ml!.mlClient.callAsInternalUser( 'ml.getDataFrameAnalyticsStats', { analyticsId, @@ -212,11 +213,12 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const { analyticsId } = request.params; - const results = await context.ml!.mlClient.callAsCurrentUser( + const results = await context.ml!.mlClient.callAsInternalUser( 'ml.createDataFrameAnalytics', { body: request.body, analyticsId, + ...getAuthorizationHeader(request), } ); return response.ok({ @@ -249,10 +251,11 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const results = await context.ml!.mlClient.callAsCurrentUser( + const results = await context.ml!.mlClient.callAsInternalUser( 'ml.evaluateDataFrameAnalytics', { body: request.body, + ...getAuthorizationHeader(request), } ); return response.ok({ @@ -286,7 +289,7 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const results = await context.ml!.mlClient.callAsCurrentUser( + const results = await context.ml!.mlClient.callAsInternalUser( 'ml.explainDataFrameAnalytics', { body: request.body, @@ -335,7 +338,7 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat // Check if analyticsId is valid and get destination index if (deleteDestIndex || deleteDestIndexPattern) { try { - const dfa = await context.ml!.mlClient.callAsCurrentUser('ml.getDataFrameAnalytics', { + const dfa = await context.ml!.mlClient.callAsInternalUser('ml.getDataFrameAnalytics', { analyticsId, }); if (Array.isArray(dfa.data_frame_analytics) && dfa.data_frame_analytics.length > 0) { @@ -381,7 +384,7 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat // Delete the data frame analytics try { - await context.ml!.mlClient.callAsCurrentUser('ml.deleteDataFrameAnalytics', { + await context.ml!.mlClient.callAsInternalUser('ml.deleteDataFrameAnalytics', { analyticsId, }); analyticsJobDeleted.success = true; @@ -427,9 +430,12 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const { analyticsId } = request.params; - const results = await context.ml!.mlClient.callAsCurrentUser('ml.startDataFrameAnalytics', { - analyticsId, - }); + const results = await context.ml!.mlClient.callAsInternalUser( + 'ml.startDataFrameAnalytics', + { + analyticsId, + } + ); return response.ok({ body: results, }); @@ -465,13 +471,13 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat const options: { analyticsId: string; force?: boolean | undefined } = { analyticsId: request.params.analyticsId, }; - // @ts-ignore TODO: update types + // @ts-expect-error TODO: update types if (request.url?.query?.force !== undefined) { - // @ts-ignore TODO: update types + // @ts-expect-error TODO: update types options.force = request.url.query.force; } - const results = await context.ml!.mlClient.callAsCurrentUser( + const results = await context.ml!.mlClient.callAsInternalUser( 'ml.stopDataFrameAnalytics', options ); @@ -545,9 +551,7 @@ export function dataFrameAnalyticsRoutes({ router, mlLicense }: RouteInitializat mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const { analyticsId } = request.params; - const { getAnalyticsAuditMessages } = analyticsAuditMessagesProvider( - context.ml!.mlClient.callAsCurrentUser - ); + const { getAnalyticsAuditMessages } = analyticsAuditMessagesProvider(context.ml!.mlClient); const results = await getAnalyticsAuditMessages(analyticsId); return response.ok({ diff --git a/x-pack/plugins/ml/server/routes/data_visualizer.ts b/x-pack/plugins/ml/server/routes/data_visualizer.ts index 04008a896a1a22..818e981835ced2 100644 --- a/x-pack/plugins/ml/server/routes/data_visualizer.ts +++ b/x-pack/plugins/ml/server/routes/data_visualizer.ts @@ -7,8 +7,9 @@ import { RequestHandlerContext } from 'kibana/server'; import { wrapError } from '../client/error_wrapper'; import { DataVisualizer } from '../models/data_visualizer'; -import { Field } from '../models/data_visualizer/data_visualizer'; +import { Field, HistogramField } from '../models/data_visualizer/data_visualizer'; import { + dataVisualizerFieldHistogramsSchema, dataVisualizerFieldStatsSchema, dataVisualizerOverallStatsSchema, indexPatternTitleSchema, @@ -26,7 +27,7 @@ function getOverallStats( earliestMs: number, latestMs: number ) { - const dv = new DataVisualizer(context.ml!.mlClient.callAsCurrentUser); + const dv = new DataVisualizer(context.ml!.mlClient); return dv.getOverallStats( indexPatternTitle, query, @@ -51,7 +52,7 @@ function getStatsForFields( interval: number, maxExamples: number ) { - const dv = new DataVisualizer(context.ml!.mlClient.callAsCurrentUser); + const dv = new DataVisualizer(context.ml!.mlClient); return dv.getStatsForFields( indexPatternTitle, query, @@ -65,10 +66,68 @@ function getStatsForFields( ); } +function getHistogramsForFields( + context: RequestHandlerContext, + indexPatternTitle: string, + query: any, + fields: HistogramField[], + samplerShardSize: number +) { + const dv = new DataVisualizer(context.ml!.mlClient); + return dv.getHistogramsForFields(indexPatternTitle, query, fields, samplerShardSize); +} + /** * Routes for the index data visualizer. */ export function dataVisualizerRoutes({ router, mlLicense }: RouteInitialization) { + /** + * @apiGroup DataVisualizer + * + * @api {post} /api/ml/data_visualizer/get_field_stats/:indexPatternTitle Get histograms for fields + * @apiName GetHistogramsForFields + * @apiDescription Returns the histograms on a list fields in the specified index pattern. + * + * @apiSchema (params) indexPatternTitleSchema + * @apiSchema (body) dataVisualizerFieldHistogramsSchema + * + * @apiSuccess {Object} fieldName histograms by field, keyed on the name of the field. + */ + router.post( + { + path: '/api/ml/data_visualizer/get_field_histograms/{indexPatternTitle}', + validate: { + params: indexPatternTitleSchema, + body: dataVisualizerFieldHistogramsSchema, + }, + options: { + tags: ['access:ml:canAccessML'], + }, + }, + mlLicense.basicLicenseAPIGuard(async (context, request, response) => { + try { + const { + params: { indexPatternTitle }, + body: { query, fields, samplerShardSize }, + } = request; + + const results = await getHistogramsForFields( + context, + indexPatternTitle, + query, + fields, + samplerShardSize + ); + + return response.ok({ + body: results, + }); + } catch (e) { + return response.customError(wrapError(e)); + } + }) + ); + /** * @apiGroup DataVisualizer * diff --git a/x-pack/plugins/ml/server/routes/datafeeds.ts b/x-pack/plugins/ml/server/routes/datafeeds.ts index 1fa1d408372da3..855b64b0ffed06 100644 --- a/x-pack/plugins/ml/server/routes/datafeeds.ts +++ b/x-pack/plugins/ml/server/routes/datafeeds.ts @@ -12,6 +12,7 @@ import { datafeedIdSchema, deleteDatafeedQuerySchema, } from './schemas/datafeeds_schema'; +import { getAuthorizationHeader } from '../lib/request_authorization'; /** * Routes for datafeed service @@ -34,7 +35,7 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const resp = await context.ml!.mlClient.callAsCurrentUser('ml.datafeeds'); + const resp = await context.ml!.mlClient.callAsInternalUser('ml.datafeeds'); return response.ok({ body: resp, @@ -67,7 +68,7 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const datafeedId = request.params.datafeedId; - const resp = await context.ml!.mlClient.callAsCurrentUser('ml.datafeeds', { datafeedId }); + const resp = await context.ml!.mlClient.callAsInternalUser('ml.datafeeds', { datafeedId }); return response.ok({ body: resp, @@ -95,7 +96,7 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const resp = await context.ml!.mlClient.callAsCurrentUser('ml.datafeedStats'); + const resp = await context.ml!.mlClient.callAsInternalUser('ml.datafeedStats'); return response.ok({ body: resp, @@ -128,7 +129,7 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const datafeedId = request.params.datafeedId; - const resp = await context.ml!.mlClient.callAsCurrentUser('ml.datafeedStats', { + const resp = await context.ml!.mlClient.callAsInternalUser('ml.datafeedStats', { datafeedId, }); @@ -165,9 +166,10 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const datafeedId = request.params.datafeedId; - const resp = await context.ml!.mlClient.callAsCurrentUser('ml.addDatafeed', { + const resp = await context.ml!.mlClient.callAsInternalUser('ml.addDatafeed', { datafeedId, body: request.body, + ...getAuthorizationHeader(request), }); return response.ok({ @@ -203,9 +205,10 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const datafeedId = request.params.datafeedId; - const resp = await context.ml!.mlClient.callAsCurrentUser('ml.updateDatafeed', { + const resp = await context.ml!.mlClient.callAsInternalUser('ml.updateDatafeed', { datafeedId, body: request.body, + ...getAuthorizationHeader(request), }); return response.ok({ @@ -248,7 +251,7 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { options.force = force; } - const resp = await context.ml!.mlClient.callAsCurrentUser('ml.deleteDatafeed', options); + const resp = await context.ml!.mlClient.callAsInternalUser('ml.deleteDatafeed', options); return response.ok({ body: resp, @@ -285,7 +288,7 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { const datafeedId = request.params.datafeedId; const { start, end } = request.body; - const resp = await context.ml!.mlClient.callAsCurrentUser('ml.startDatafeed', { + const resp = await context.ml!.mlClient.callAsInternalUser('ml.startDatafeed', { datafeedId, start, end, @@ -323,7 +326,7 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { try { const datafeedId = request.params.datafeedId; - const resp = await context.ml!.mlClient.callAsCurrentUser('ml.stopDatafeed', { + const resp = await context.ml!.mlClient.callAsInternalUser('ml.stopDatafeed', { datafeedId, }); @@ -358,8 +361,9 @@ export function dataFeedRoutes({ router, mlLicense }: RouteInitialization) { mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const datafeedId = request.params.datafeedId; - const resp = await context.ml!.mlClient.callAsCurrentUser('ml.datafeedPreview', { + const resp = await context.ml!.mlClient.callAsInternalUser('ml.datafeedPreview', { datafeedId, + ...getAuthorizationHeader(request), }); return response.ok({ diff --git a/x-pack/plugins/ml/server/routes/fields_service.ts b/x-pack/plugins/ml/server/routes/fields_service.ts index b0f13df294145e..b83f846b1685d1 100644 --- a/x-pack/plugins/ml/server/routes/fields_service.ts +++ b/x-pack/plugins/ml/server/routes/fields_service.ts @@ -14,13 +14,13 @@ import { import { fieldsServiceProvider } from '../models/fields_service'; function getCardinalityOfFields(context: RequestHandlerContext, payload: any) { - const fs = fieldsServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const fs = fieldsServiceProvider(context.ml!.mlClient); const { index, fieldNames, query, timeFieldName, earliestMs, latestMs } = payload; return fs.getCardinalityOfFields(index, fieldNames, query, timeFieldName, earliestMs, latestMs); } function getTimeFieldRange(context: RequestHandlerContext, payload: any) { - const fs = fieldsServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const fs = fieldsServiceProvider(context.ml!.mlClient); const { index, timeFieldName, query } = payload; return fs.getTimeFieldRange(index, timeFieldName, query); } diff --git a/x-pack/plugins/ml/server/routes/file_data_visualizer.ts b/x-pack/plugins/ml/server/routes/file_data_visualizer.ts index 0f389f9505943b..b57eda5ad56a19 100644 --- a/x-pack/plugins/ml/server/routes/file_data_visualizer.ts +++ b/x-pack/plugins/ml/server/routes/file_data_visualizer.ts @@ -29,7 +29,7 @@ import { } from './schemas/file_data_visualizer_schema'; function analyzeFiles(context: RequestHandlerContext, data: InputData, overrides: InputOverrides) { - const { analyzeFile } = fileDataVisualizerProvider(context.ml!.mlClient.callAsCurrentUser); + const { analyzeFile } = fileDataVisualizerProvider(context.ml!.mlClient); return analyzeFile(data, overrides); } @@ -42,7 +42,7 @@ function importData( ingestPipeline: IngestPipelineWrapper, data: InputData ) { - const { importData: importDataFunc } = importDataProvider(context.ml!.mlClient.callAsCurrentUser); + const { importData: importDataFunc } = importDataProvider(context.ml!.mlClient); return importDataFunc(id, index, settings, mappings, ingestPipeline, data); } diff --git a/x-pack/plugins/ml/server/routes/filters.ts b/x-pack/plugins/ml/server/routes/filters.ts index d5287c349a8fca..dcdb4caa6cd3bd 100644 --- a/x-pack/plugins/ml/server/routes/filters.ts +++ b/x-pack/plugins/ml/server/routes/filters.ts @@ -13,32 +13,32 @@ import { FilterManager, FormFilter } from '../models/filter'; // TODO - add function for returning a list of just the filter IDs. // TODO - add function for returning a list of filter IDs plus item count. function getAllFilters(context: RequestHandlerContext) { - const mgr = new FilterManager(context.ml!.mlClient.callAsCurrentUser); + const mgr = new FilterManager(context.ml!.mlClient); return mgr.getAllFilters(); } function getAllFilterStats(context: RequestHandlerContext) { - const mgr = new FilterManager(context.ml!.mlClient.callAsCurrentUser); + const mgr = new FilterManager(context.ml!.mlClient); return mgr.getAllFilterStats(); } function getFilter(context: RequestHandlerContext, filterId: string) { - const mgr = new FilterManager(context.ml!.mlClient.callAsCurrentUser); + const mgr = new FilterManager(context.ml!.mlClient); return mgr.getFilter(filterId); } function newFilter(context: RequestHandlerContext, filter: FormFilter) { - const mgr = new FilterManager(context.ml!.mlClient.callAsCurrentUser); + const mgr = new FilterManager(context.ml!.mlClient); return mgr.newFilter(filter); } function updateFilter(context: RequestHandlerContext, filterId: string, filter: FormFilter) { - const mgr = new FilterManager(context.ml!.mlClient.callAsCurrentUser); + const mgr = new FilterManager(context.ml!.mlClient); return mgr.updateFilter(filterId, filter); } function deleteFilter(context: RequestHandlerContext, filterId: string) { - const mgr = new FilterManager(context.ml!.mlClient.callAsCurrentUser); + const mgr = new FilterManager(context.ml!.mlClient); return mgr.deleteFilter(filterId); } diff --git a/x-pack/plugins/ml/server/routes/job_audit_messages.ts b/x-pack/plugins/ml/server/routes/job_audit_messages.ts index 5acc89e7d13be7..d4840ed650a324 100644 --- a/x-pack/plugins/ml/server/routes/job_audit_messages.ts +++ b/x-pack/plugins/ml/server/routes/job_audit_messages.ts @@ -39,9 +39,7 @@ export function jobAuditMessagesRoutes({ router, mlLicense }: RouteInitializatio }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { getJobAuditMessages } = jobAuditMessagesProvider( - context.ml!.mlClient.callAsCurrentUser - ); + const { getJobAuditMessages } = jobAuditMessagesProvider(context.ml!.mlClient); const { jobId } = request.params; const { from } = request.query; const resp = await getJobAuditMessages(jobId, from); @@ -76,9 +74,7 @@ export function jobAuditMessagesRoutes({ router, mlLicense }: RouteInitializatio }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { getJobAuditMessages } = jobAuditMessagesProvider( - context.ml!.mlClient.callAsCurrentUser - ); + const { getJobAuditMessages } = jobAuditMessagesProvider(context.ml!.mlClient); const { from } = request.query; const resp = await getJobAuditMessages(undefined, from); diff --git a/x-pack/plugins/ml/server/routes/job_service.ts b/x-pack/plugins/ml/server/routes/job_service.ts index 10d1c9952b540d..e03dbb40d623a4 100644 --- a/x-pack/plugins/ml/server/routes/job_service.ts +++ b/x-pack/plugins/ml/server/routes/job_service.ts @@ -50,7 +50,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { forceStartDatafeeds } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { forceStartDatafeeds } = jobServiceProvider(context.ml!.mlClient); const { datafeedIds, start, end } = request.body; const resp = await forceStartDatafeeds(datafeedIds, start, end); @@ -84,7 +84,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { stopDatafeeds } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { stopDatafeeds } = jobServiceProvider(context.ml!.mlClient); const { datafeedIds } = request.body; const resp = await stopDatafeeds(datafeedIds); @@ -118,7 +118,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { deleteJobs } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { deleteJobs } = jobServiceProvider(context.ml!.mlClient); const { jobIds } = request.body; const resp = await deleteJobs(jobIds); @@ -152,7 +152,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { closeJobs } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { closeJobs } = jobServiceProvider(context.ml!.mlClient); const { jobIds } = request.body; const resp = await closeJobs(jobIds); @@ -186,7 +186,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { forceStopAndCloseJob } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { forceStopAndCloseJob } = jobServiceProvider(context.ml!.mlClient); const { jobId } = request.body; const resp = await forceStopAndCloseJob(jobId); @@ -225,7 +225,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { jobsSummary } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { jobsSummary } = jobServiceProvider(context.ml!.mlClient); const { jobIds } = request.body; const resp = await jobsSummary(jobIds); @@ -259,7 +259,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { jobsWithTimerange } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { jobsWithTimerange } = jobServiceProvider(context.ml!.mlClient); const resp = await jobsWithTimerange(); return response.ok({ @@ -292,7 +292,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { createFullJobsList } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { createFullJobsList } = jobServiceProvider(context.ml!.mlClient); const { jobIds } = request.body; const resp = await createFullJobsList(jobIds); @@ -322,7 +322,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { getAllGroups } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { getAllGroups } = jobServiceProvider(context.ml!.mlClient); const resp = await getAllGroups(); return response.ok({ @@ -355,7 +355,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { updateGroups } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { updateGroups } = jobServiceProvider(context.ml!.mlClient); const { jobs } = request.body; const resp = await updateGroups(jobs); @@ -385,7 +385,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { deletingJobTasks } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { deletingJobTasks } = jobServiceProvider(context.ml!.mlClient); const resp = await deletingJobTasks(); return response.ok({ @@ -418,7 +418,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { jobsExist } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { jobsExist } = jobServiceProvider(context.ml!.mlClient); const { jobIds } = request.body; const resp = await jobsExist(jobIds); @@ -454,7 +454,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { const { indexPattern } = request.params; const isRollup = request.query.rollup === 'true'; const savedObjectsClient = context.core.savedObjects.client; - const { newJobCaps } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { newJobCaps } = jobServiceProvider(context.ml!.mlClient); const resp = await newJobCaps(indexPattern, isRollup, savedObjectsClient); return response.ok({ @@ -499,7 +499,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { splitFieldValue, } = request.body; - const { newJobLineChart } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { newJobLineChart } = jobServiceProvider(context.ml!.mlClient); const resp = await newJobLineChart( indexPatternTitle, timeField, @@ -553,9 +553,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { splitFieldName, } = request.body; - const { newJobPopulationChart } = jobServiceProvider( - context.ml!.mlClient.callAsCurrentUser - ); + const { newJobPopulationChart } = jobServiceProvider(context.ml!.mlClient); const resp = await newJobPopulationChart( indexPatternTitle, timeField, @@ -593,7 +591,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { getAllJobAndGroupIds } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { getAllJobAndGroupIds } = jobServiceProvider(context.ml!.mlClient); const resp = await getAllJobAndGroupIds(); return response.ok({ @@ -626,7 +624,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { getLookBackProgress } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { getLookBackProgress } = jobServiceProvider(context.ml!.mlClient); const { jobId, start, end } = request.body; const resp = await getLookBackProgress(jobId, start, end); @@ -660,10 +658,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { validateCategoryExamples } = categorizationExamplesProvider( - context.ml!.mlClient.callAsCurrentUser, - context.ml!.mlClient.callAsInternalUser - ); + const { validateCategoryExamples } = categorizationExamplesProvider(context.ml!.mlClient); const { indexPatternTitle, timeField, @@ -716,7 +711,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { topCategories } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { topCategories } = jobServiceProvider(context.ml!.mlClient); const { jobId, count } = request.body; const resp = await topCategories(jobId, count); @@ -750,7 +745,7 @@ export function jobServiceRoutes({ router, mlLicense }: RouteInitialization) { }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const { revertModelSnapshot } = jobServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const { revertModelSnapshot } = jobServiceProvider(context.ml!.mlClient); const { jobId, snapshotId, diff --git a/x-pack/plugins/ml/server/routes/job_validation.ts b/x-pack/plugins/ml/server/routes/job_validation.ts index 0af8141a2a6411..e52c6b76e918b3 100644 --- a/x-pack/plugins/ml/server/routes/job_validation.ts +++ b/x-pack/plugins/ml/server/routes/job_validation.ts @@ -32,7 +32,7 @@ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization, ) { const { analysisConfig, indexPattern, query, timeFieldName, earliestMs, latestMs } = payload; - return calculateModelMemoryLimitProvider(context.ml!.mlClient.callAsCurrentUser)( + return calculateModelMemoryLimitProvider(context.ml!.mlClient)( analysisConfig as AnalysisConfig, indexPattern, query, @@ -64,11 +64,7 @@ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { let errorResp; - const resp = await estimateBucketSpanFactory( - context.ml!.mlClient.callAsCurrentUser, - context.ml!.mlClient.callAsInternalUser, - mlLicense.isSecurityEnabled() === false - )(request.body) + const resp = await estimateBucketSpanFactory(context.ml!.mlClient)(request.body) // this catch gets triggered when the estimation code runs without error // but isn't able to come up with a bucket span estimation. // this doesn't return a HTTP error but an object with an error message @@ -147,10 +143,7 @@ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization, }, mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { - const resp = await validateCardinality( - context.ml!.mlClient.callAsCurrentUser, - request.body - ); + const resp = await validateCardinality(context.ml!.mlClient, request.body); return response.ok({ body: resp, @@ -184,10 +177,9 @@ export function jobValidationRoutes({ router, mlLicense }: RouteInitialization, try { // version corresponds to the version used in documentation links. const resp = await validateJob( - context.ml!.mlClient.callAsCurrentUser, + context.ml!.mlClient, request.body, version, - context.ml!.mlClient.callAsInternalUser, mlLicense.isSecurityEnabled() === false ); diff --git a/x-pack/plugins/ml/server/routes/modules.ts b/x-pack/plugins/ml/server/routes/modules.ts index 88d24a1b86b6d3..463babb86304f3 100644 --- a/x-pack/plugins/ml/server/routes/modules.ts +++ b/x-pack/plugins/ml/server/routes/modules.ts @@ -6,7 +6,7 @@ import { TypeOf } from '@kbn/config-schema'; -import { RequestHandlerContext } from 'kibana/server'; +import { RequestHandlerContext, KibanaRequest } from 'kibana/server'; import { DatafeedOverride, JobOverride } from '../../common/types/modules'; import { wrapError } from '../client/error_wrapper'; import { DataRecognizer } from '../models/data_recognizer'; @@ -18,19 +18,17 @@ import { } from './schemas/modules'; import { RouteInitialization } from '../types'; -function recognize(context: RequestHandlerContext, indexPatternTitle: string) { - const dr = new DataRecognizer( - context.ml!.mlClient.callAsCurrentUser, - context.core.savedObjects.client - ); +function recognize( + context: RequestHandlerContext, + request: KibanaRequest, + indexPatternTitle: string +) { + const dr = new DataRecognizer(context.ml!.mlClient, context.core.savedObjects.client, request); return dr.findMatches(indexPatternTitle); } -function getModule(context: RequestHandlerContext, moduleId: string) { - const dr = new DataRecognizer( - context.ml!.mlClient.callAsCurrentUser, - context.core.savedObjects.client - ); +function getModule(context: RequestHandlerContext, request: KibanaRequest, moduleId: string) { + const dr = new DataRecognizer(context.ml!.mlClient, context.core.savedObjects.client, request); if (moduleId === undefined) { return dr.listModules(); } else { @@ -40,6 +38,7 @@ function getModule(context: RequestHandlerContext, moduleId: string) { function setup( context: RequestHandlerContext, + request: KibanaRequest, moduleId: string, prefix?: string, groups?: string[], @@ -53,10 +52,7 @@ function setup( datafeedOverrides?: DatafeedOverride | DatafeedOverride[], estimateModelMemory?: boolean ) { - const dr = new DataRecognizer( - context.ml!.mlClient.callAsCurrentUser, - context.core.savedObjects.client - ); + const dr = new DataRecognizer(context.ml!.mlClient, context.core.savedObjects.client, request); return dr.setup( moduleId, prefix, @@ -73,11 +69,12 @@ function setup( ); } -function dataRecognizerJobsExist(context: RequestHandlerContext, moduleId: string) { - const dr = new DataRecognizer( - context.ml!.mlClient.callAsCurrentUser, - context.core.savedObjects.client - ); +function dataRecognizerJobsExist( + context: RequestHandlerContext, + request: KibanaRequest, + moduleId: string +) { + const dr = new DataRecognizer(context.ml!.mlClient, context.core.savedObjects.client, request); return dr.dataRecognizerJobsExist(moduleId); } @@ -125,7 +122,7 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) { mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const { indexPatternTitle } = request.params; - const results = await recognize(context, indexPatternTitle); + const results = await recognize(context, request, indexPatternTitle); return response.ok({ body: results }); } catch (e) { @@ -260,7 +257,7 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) { // the moduleId will be an empty string. moduleId = undefined; } - const results = await getModule(context, moduleId); + const results = await getModule(context, request, moduleId); return response.ok({ body: results }); } catch (e) { @@ -440,6 +437,7 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) { const result = await setup( context, + request, moduleId, prefix, groups, @@ -526,7 +524,7 @@ export function dataRecognizer({ router, mlLicense }: RouteInitialization) { mlLicense.fullLicenseAPIGuard(async (context, request, response) => { try { const { moduleId } = request.params; - const result = await dataRecognizerJobsExist(context, moduleId); + const result = await dataRecognizerJobsExist(context, request, moduleId); return response.ok({ body: result }); } catch (e) { diff --git a/x-pack/plugins/ml/server/routes/results_service.ts b/x-pack/plugins/ml/server/routes/results_service.ts index 94ca0827ccfa59..c7fcebd2a29a51 100644 --- a/x-pack/plugins/ml/server/routes/results_service.ts +++ b/x-pack/plugins/ml/server/routes/results_service.ts @@ -17,7 +17,7 @@ import { import { resultsServiceProvider } from '../models/results_service'; function getAnomaliesTableData(context: RequestHandlerContext, payload: any) { - const rs = resultsServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const rs = resultsServiceProvider(context.ml!.mlClient); const { jobIds, criteriaFields, @@ -47,24 +47,24 @@ function getAnomaliesTableData(context: RequestHandlerContext, payload: any) { } function getCategoryDefinition(context: RequestHandlerContext, payload: any) { - const rs = resultsServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const rs = resultsServiceProvider(context.ml!.mlClient); return rs.getCategoryDefinition(payload.jobId, payload.categoryId); } function getCategoryExamples(context: RequestHandlerContext, payload: any) { - const rs = resultsServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const rs = resultsServiceProvider(context.ml!.mlClient); const { jobId, categoryIds, maxExamples } = payload; return rs.getCategoryExamples(jobId, categoryIds, maxExamples); } function getMaxAnomalyScore(context: RequestHandlerContext, payload: any) { - const rs = resultsServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const rs = resultsServiceProvider(context.ml!.mlClient); const { jobIds, earliestMs, latestMs } = payload; return rs.getMaxAnomalyScore(jobIds, earliestMs, latestMs); } function getPartitionFieldsValues(context: RequestHandlerContext, payload: any) { - const rs = resultsServiceProvider(context.ml!.mlClient.callAsCurrentUser); + const rs = resultsServiceProvider(context.ml!.mlClient); const { jobId, searchTerm, criteriaFields, earliestMs, latestMs } = payload; return rs.getPartitionFieldsValues(jobId, searchTerm, criteriaFields, earliestMs, latestMs); } diff --git a/x-pack/plugins/ml/server/routes/schemas/data_visualizer_schema.ts b/x-pack/plugins/ml/server/routes/schemas/data_visualizer_schema.ts index b2d665954bd4dc..24e45514e1efce 100644 --- a/x-pack/plugins/ml/server/routes/schemas/data_visualizer_schema.ts +++ b/x-pack/plugins/ml/server/routes/schemas/data_visualizer_schema.ts @@ -11,6 +11,15 @@ export const indexPatternTitleSchema = schema.object({ indexPatternTitle: schema.string(), }); +export const dataVisualizerFieldHistogramsSchema = schema.object({ + /** Query to match documents in the index. */ + query: schema.any(), + /** The fields to return histogram data. */ + fields: schema.arrayOf(schema.any()), + /** Number of documents to be collected in the sample processed on each shard, or -1 for no sampling. */ + samplerShardSize: schema.number(), +}); + export const dataVisualizerFieldStatsSchema = schema.object({ /** Query to match documents in the index. */ query: schema.any(), diff --git a/x-pack/plugins/ml/server/routes/system.ts b/x-pack/plugins/ml/server/routes/system.ts index d78c1cf3aa6af3..410d540ecb8f72 100644 --- a/x-pack/plugins/ml/server/routes/system.ts +++ b/x-pack/plugins/ml/server/routes/system.ts @@ -60,9 +60,10 @@ export function systemRoutes( }, mlLicense.basicLicenseAPIGuard(async (context, request, response) => { try { + const { callAsCurrentUser, callAsInternalUser } = context.ml!.mlClient; let upgradeInProgress = false; try { - const info = await context.ml!.mlClient.callAsCurrentUser('ml.info'); + const info = await callAsInternalUser('ml.info'); // if ml indices are currently being migrated, upgrade_mode will be set to true // pass this back with the privileges to allow for the disabling of UI controls. upgradeInProgress = info.upgrade_mode === true; @@ -90,7 +91,7 @@ export function systemRoutes( }); } else { const body = request.body; - const resp = await context.ml!.mlClient.callAsCurrentUser('ml.privilegeCheck', { body }); + const resp = await callAsCurrentUser('ml.privilegeCheck', { body }); resp.upgradeInProgress = upgradeInProgress; return response.ok({ body: resp, @@ -128,7 +129,7 @@ export function systemRoutes( } const { getCapabilities } = capabilitiesProvider( - context.ml!.mlClient.callAsCurrentUser, + context.ml!.mlClient, mlCapabilities, mlLicense, isMlEnabledInSpace @@ -154,43 +155,15 @@ export function systemRoutes( path: '/api/ml/ml_node_count', validate: false, options: { - tags: ['access:ml:canGetJobs'], + tags: ['access:ml:canGetJobs', 'access:ml:canGetDatafeeds'], }, }, mlLicense.basicLicenseAPIGuard(async (context, request, response) => { try { - // check for basic license first for consistency with other - // security disabled checks - if (mlLicense.isSecurityEnabled() === false) { - return response.ok({ - body: await getNodeCount(context), - }); - } else { - // if security is enabled, check that the user has permission to - // view jobs before calling getNodeCount. - // getNodeCount calls the _nodes endpoint as the internal user - // and so could give the user access to more information than - // they are entitled to. - const requiredPrivileges = [ - 'cluster:monitor/xpack/ml/job/get', - 'cluster:monitor/xpack/ml/job/stats/get', - 'cluster:monitor/xpack/ml/datafeeds/get', - 'cluster:monitor/xpack/ml/datafeeds/stats/get', - ]; - const body = { cluster: requiredPrivileges }; - const resp = await context.ml!.mlClient.callAsCurrentUser('ml.privilegeCheck', { body }); - - if (resp.has_all_requested) { - return response.ok({ - body: await getNodeCount(context), - }); - } else { - // if the user doesn't have permission to create jobs - // return a 403 - return response.forbidden(); - } - } + return response.ok({ + body: await getNodeCount(context), + }); } catch (e) { return response.customError(wrapError(e)); } @@ -214,7 +187,7 @@ export function systemRoutes( }, mlLicense.basicLicenseAPIGuard(async (context, request, response) => { try { - const info = await context.ml!.mlClient.callAsCurrentUser('ml.info'); + const info = await context.ml!.mlClient.callAsInternalUser('ml.info'); const cloudId = cloud && cloud.cloudId; return response.ok({ body: { ...info, cloudId }, diff --git a/x-pack/plugins/ml/server/shared.ts b/x-pack/plugins/ml/server/shared.ts index 3fca8ea1ba0478..100433b23f7d13 100644 --- a/x-pack/plugins/ml/server/shared.ts +++ b/x-pack/plugins/ml/server/shared.ts @@ -8,3 +8,4 @@ export * from '../common/types/anomalies'; export * from '../common/types/anomaly_detection_jobs'; export * from './lib/capabilities/errors'; export { ModuleSetupPayload } from './shared_services/providers/modules'; +export { getHistogramsForFields } from './models/data_visualizer/'; diff --git a/x-pack/plugins/ml/server/shared_services/providers/anomaly_detectors.ts b/x-pack/plugins/ml/server/shared_services/providers/anomaly_detectors.ts index 3ae05152ae6303..1140af0b764049 100644 --- a/x-pack/plugins/ml/server/shared_services/providers/anomaly_detectors.ts +++ b/x-pack/plugins/ml/server/shared_services/providers/anomaly_detectors.ts @@ -4,13 +4,13 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller, KibanaRequest } from 'kibana/server'; +import { ILegacyScopedClusterClient, KibanaRequest } from 'kibana/server'; import { Job } from '../../../common/types/anomaly_detection_jobs'; import { SharedServicesChecks } from '../shared_services'; export interface AnomalyDetectorsProvider { anomalyDetectorsProvider( - callAsCurrentUser: LegacyAPICaller, + mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest ): { jobs(jobId?: string): Promise<{ count: number; jobs: Job[] }>; @@ -22,13 +22,16 @@ export function getAnomalyDetectorsProvider({ getHasMlCapabilities, }: SharedServicesChecks): AnomalyDetectorsProvider { return { - anomalyDetectorsProvider(callAsCurrentUser: LegacyAPICaller, request: KibanaRequest) { + anomalyDetectorsProvider(mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest) { const hasMlCapabilities = getHasMlCapabilities(request); return { async jobs(jobId?: string) { isFullLicense(); await hasMlCapabilities(['canGetJobs']); - return callAsCurrentUser('ml.jobs', jobId !== undefined ? { jobId } : {}); + return mlClusterClient.callAsInternalUser( + 'ml.jobs', + jobId !== undefined ? { jobId } : {} + ); }, }; }, diff --git a/x-pack/plugins/ml/server/shared_services/providers/job_service.ts b/x-pack/plugins/ml/server/shared_services/providers/job_service.ts index e5a42090163f87..c734dcc1583a1b 100644 --- a/x-pack/plugins/ml/server/shared_services/providers/job_service.ts +++ b/x-pack/plugins/ml/server/shared_services/providers/job_service.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller, KibanaRequest } from 'kibana/server'; +import { ILegacyScopedClusterClient, KibanaRequest } from 'kibana/server'; import { jobServiceProvider } from '../../models/job_service'; import { SharedServicesChecks } from '../shared_services'; @@ -12,7 +12,7 @@ type OrigJobServiceProvider = ReturnType; export interface JobServiceProvider { jobServiceProvider( - callAsCurrentUser: LegacyAPICaller, + mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest ): { jobsSummary: OrigJobServiceProvider['jobsSummary']; @@ -24,9 +24,9 @@ export function getJobServiceProvider({ getHasMlCapabilities, }: SharedServicesChecks): JobServiceProvider { return { - jobServiceProvider(callAsCurrentUser: LegacyAPICaller, request: KibanaRequest) { + jobServiceProvider(mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest) { // const hasMlCapabilities = getHasMlCapabilities(request); - const { jobsSummary } = jobServiceProvider(callAsCurrentUser); + const { jobsSummary } = jobServiceProvider(mlClusterClient); return { async jobsSummary(...args) { isFullLicense(); diff --git a/x-pack/plugins/ml/server/shared_services/providers/modules.ts b/x-pack/plugins/ml/server/shared_services/providers/modules.ts index 27935fd6fe21d8..33c8d28399a32e 100644 --- a/x-pack/plugins/ml/server/shared_services/providers/modules.ts +++ b/x-pack/plugins/ml/server/shared_services/providers/modules.ts @@ -4,7 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller, KibanaRequest, SavedObjectsClientContract } from 'kibana/server'; +import { + ILegacyScopedClusterClient, + KibanaRequest, + SavedObjectsClientContract, +} from 'kibana/server'; import { TypeOf } from '@kbn/config-schema'; import { DataRecognizer } from '../../models/data_recognizer'; import { SharedServicesChecks } from '../shared_services'; @@ -15,7 +19,7 @@ export type ModuleSetupPayload = TypeOf & export interface ModulesProvider { modulesProvider( - callAsCurrentUser: LegacyAPICaller, + mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest, savedObjectsClient: SavedObjectsClientContract ): { @@ -32,12 +36,12 @@ export function getModulesProvider({ }: SharedServicesChecks): ModulesProvider { return { modulesProvider( - callAsCurrentUser: LegacyAPICaller, + mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest, savedObjectsClient: SavedObjectsClientContract ) { const hasMlCapabilities = getHasMlCapabilities(request); - const dr = dataRecognizerFactory(callAsCurrentUser, savedObjectsClient); + const dr = dataRecognizerFactory(mlClusterClient, savedObjectsClient, request); return { async recognize(...args) { isFullLicense(); @@ -82,8 +86,9 @@ export function getModulesProvider({ } function dataRecognizerFactory( - callAsCurrentUser: LegacyAPICaller, - savedObjectsClient: SavedObjectsClientContract + mlClusterClient: ILegacyScopedClusterClient, + savedObjectsClient: SavedObjectsClientContract, + request: KibanaRequest ) { - return new DataRecognizer(callAsCurrentUser, savedObjectsClient); + return new DataRecognizer(mlClusterClient, savedObjectsClient, request); } diff --git a/x-pack/plugins/ml/server/shared_services/providers/results_service.ts b/x-pack/plugins/ml/server/shared_services/providers/results_service.ts index e9448a67cd98a3..366a1f8b8c6f41 100644 --- a/x-pack/plugins/ml/server/shared_services/providers/results_service.ts +++ b/x-pack/plugins/ml/server/shared_services/providers/results_service.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller, KibanaRequest } from 'kibana/server'; +import { ILegacyScopedClusterClient, KibanaRequest } from 'kibana/server'; import { resultsServiceProvider } from '../../models/results_service'; import { SharedServicesChecks } from '../shared_services'; @@ -12,7 +12,7 @@ type OrigResultsServiceProvider = ReturnType; export interface ResultsServiceProvider { resultsServiceProvider( - callAsCurrentUser: LegacyAPICaller, + mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest ): { getAnomaliesTableData: OrigResultsServiceProvider['getAnomaliesTableData']; @@ -24,9 +24,9 @@ export function getResultsServiceProvider({ getHasMlCapabilities, }: SharedServicesChecks): ResultsServiceProvider { return { - resultsServiceProvider(callAsCurrentUser: LegacyAPICaller, request: KibanaRequest) { + resultsServiceProvider(mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest) { const hasMlCapabilities = getHasMlCapabilities(request); - const { getAnomaliesTableData } = resultsServiceProvider(callAsCurrentUser); + const { getAnomaliesTableData } = resultsServiceProvider(mlClusterClient); return { async getAnomaliesTableData(...args) { isFullLicense(); diff --git a/x-pack/plugins/ml/server/shared_services/providers/system.ts b/x-pack/plugins/ml/server/shared_services/providers/system.ts index 00124a67e52372..ec2662014546e2 100644 --- a/x-pack/plugins/ml/server/shared_services/providers/system.ts +++ b/x-pack/plugins/ml/server/shared_services/providers/system.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { LegacyAPICaller, KibanaRequest } from 'kibana/server'; +import { ILegacyScopedClusterClient, KibanaRequest } from 'kibana/server'; import { SearchResponse, SearchParams } from 'elasticsearch'; import { MlServerLicense } from '../../lib/license'; import { CloudSetup } from '../../../../cloud/server'; @@ -18,7 +18,7 @@ import { SharedServicesChecks } from '../shared_services'; export interface MlSystemProvider { mlSystemProvider( - callAsCurrentUser: LegacyAPICaller, + mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest ): { mlCapabilities(): Promise; @@ -35,8 +35,9 @@ export function getMlSystemProvider( resolveMlCapabilities: ResolveMlCapabilities ): MlSystemProvider { return { - mlSystemProvider(callAsCurrentUser: LegacyAPICaller, request: KibanaRequest) { + mlSystemProvider(mlClusterClient: ILegacyScopedClusterClient, request: KibanaRequest) { // const hasMlCapabilities = getHasMlCapabilities(request); + const { callAsCurrentUser, callAsInternalUser } = mlClusterClient; return { async mlCapabilities() { isMinimumLicense(); @@ -52,7 +53,7 @@ export function getMlSystemProvider( } const { getCapabilities } = capabilitiesProvider( - callAsCurrentUser, + mlClusterClient, mlCapabilities, mlLicense, isMlEnabledInSpace @@ -62,7 +63,7 @@ export function getMlSystemProvider( async mlInfo(): Promise { isMinimumLicense(); - const info = await callAsCurrentUser('ml.info'); + const info = await callAsInternalUser('ml.info'); const cloudId = cloud && cloud.cloudId; return { ...info, diff --git a/x-pack/plugins/observability/public/data_handler.ts b/x-pack/plugins/observability/public/data_handler.ts index d7f8c471ad9aa7..73e34f214da288 100644 --- a/x-pack/plugins/observability/public/data_handler.ts +++ b/x-pack/plugins/observability/public/data_handler.ts @@ -31,6 +31,6 @@ export function getDataHandler(appName: T) { export async function fetchHasData() { const apps: ObservabilityApp[] = ['apm', 'uptime', 'infra_logs', 'infra_metrics']; const promises = apps.map((app) => getDataHandler(app)?.hasData()); - const [apm, uptime, logs, metrics] = await Promise.all(promises); + const [apm, uptime, logs, metrics] = await Promise.allSettled(promises); return { apm, uptime, infra_logs: logs, infra_metrics: metrics }; } diff --git a/x-pack/plugins/observability/public/plugin.ts b/x-pack/plugins/observability/public/plugin.ts index bbda1026606f16..335ce897dce7b3 100644 --- a/x-pack/plugins/observability/public/plugin.ts +++ b/x-pack/plugins/observability/public/plugin.ts @@ -9,8 +9,10 @@ import { DEFAULT_APP_CATEGORIES, Plugin as PluginClass, PluginInitializerContext, + CoreStart, } from '../../../../src/core/public'; import { registerDataHandler } from './data_handler'; +import { toggleOverviewLinkInNav } from './toggle_overview_link_in_nav'; export interface ObservabilityPluginSetup { dashboard: { register: typeof registerDataHandler }; @@ -43,5 +45,7 @@ export class Plugin implements PluginClass { + const update = jest.fn(); + afterEach(() => { + update.mockClear(); + }); + it('hides overview menu', () => { + const core = ({ + application: { + capabilities: { + navLinks: { + apm: false, + logs: false, + metrics: false, + uptime: false, + }, + }, + }, + chrome: { navLinks: { update } }, + } as unknown) as CoreStart; + toggleOverviewLinkInNav(core); + expect(update).toHaveBeenCalledWith('observability-overview', { hidden: true }); + }); + it('shows overview menu', () => { + const core = ({ + application: { + capabilities: { + navLinks: { + apm: true, + logs: false, + metrics: false, + uptime: false, + }, + }, + }, + chrome: { navLinks: { update } }, + } as unknown) as CoreStart; + toggleOverviewLinkInNav(core); + expect(update).not.toHaveBeenCalled(); + }); +}); diff --git a/x-pack/plugins/observability/public/toggle_overview_link_in_nav.tsx b/x-pack/plugins/observability/public/toggle_overview_link_in_nav.tsx new file mode 100644 index 00000000000000..c33ca45e4fcd81 --- /dev/null +++ b/x-pack/plugins/observability/public/toggle_overview_link_in_nav.tsx @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { CoreStart } from 'kibana/public'; + +export function toggleOverviewLinkInNav(core: CoreStart) { + const { apm, logs, metrics, uptime } = core.application.capabilities.navLinks; + const someVisible = Object.values({ apm, logs, metrics, uptime }).some((visible) => visible); + if (!someVisible) { + core.chrome.navLinks.update('observability-overview', { hidden: true }); + } +} diff --git a/x-pack/plugins/security/server/authentication/api_keys.test.ts b/x-pack/plugins/security/server/authentication/api_keys.test.ts index 631a6f9ab213c3..5164099f9ff672 100644 --- a/x-pack/plugins/security/server/authentication/api_keys.test.ts +++ b/x-pack/plugins/security/server/authentication/api_keys.test.ts @@ -162,7 +162,10 @@ describe('API Keys', () => { describe('grantAsInternalUser()', () => { it('returns null when security feature is disabled', async () => { mockLicense.isEnabled.mockReturnValue(false); - const result = await apiKeys.grantAsInternalUser(httpServerMock.createKibanaRequest()); + const result = await apiKeys.grantAsInternalUser(httpServerMock.createKibanaRequest(), { + name: 'test_api_key', + role_descriptors: {}, + }); expect(result).toBeNull(); expect(mockClusterClient.callAsInternalUser).not.toHaveBeenCalled(); @@ -174,21 +177,33 @@ describe('API Keys', () => { id: '123', name: 'key-name', api_key: 'abc123', + expires: '1d', }); const result = await apiKeys.grantAsInternalUser( httpServerMock.createKibanaRequest({ headers: { authorization: `Basic ${encodeToBase64('foo:bar')}`, }, - }) + }), + { + name: 'test_api_key', + role_descriptors: { foo: true }, + expiration: '1d', + } ); expect(result).toEqual({ api_key: 'abc123', id: '123', name: 'key-name', + expires: '1d', }); expect(mockClusterClient.callAsInternalUser).toHaveBeenCalledWith('shield.grantAPIKey', { body: { + api_key: { + name: 'test_api_key', + role_descriptors: { foo: true }, + expiration: '1d', + }, grant_type: 'password', username: 'foo', password: 'bar', @@ -208,7 +223,12 @@ describe('API Keys', () => { headers: { authorization: `Bearer foo-access-token`, }, - }) + }), + { + name: 'test_api_key', + role_descriptors: { foo: true }, + expiration: '1d', + } ); expect(result).toEqual({ api_key: 'abc123', @@ -217,6 +237,11 @@ describe('API Keys', () => { }); expect(mockClusterClient.callAsInternalUser).toHaveBeenCalledWith('shield.grantAPIKey', { body: { + api_key: { + name: 'test_api_key', + role_descriptors: { foo: true }, + expiration: '1d', + }, grant_type: 'access_token', access_token: 'foo-access-token', }, @@ -231,7 +256,12 @@ describe('API Keys', () => { headers: { authorization: `Digest username="foo"`, }, - }) + }), + { + name: 'test_api_key', + role_descriptors: { foo: true }, + expiration: '1d', + } ) ).rejects.toThrowErrorMatchingInlineSnapshot( `"Unsupported scheme \\"Digest\\" for granting API Key"` diff --git a/x-pack/plugins/security/server/authentication/api_keys.ts b/x-pack/plugins/security/server/authentication/api_keys.ts index 3b6aee72651e29..19922ce3c890d0 100644 --- a/x-pack/plugins/security/server/authentication/api_keys.ts +++ b/x-pack/plugins/security/server/authentication/api_keys.ts @@ -29,6 +29,7 @@ export interface CreateAPIKeyParams { } interface GrantAPIKeyParams { + api_key: CreateAPIKeyParams; grant_type: 'password' | 'access_token'; username?: string; password?: string; @@ -188,7 +189,7 @@ export class APIKeys { * Tries to grant an API key for the current user. * @param request Request instance. */ - async grantAsInternalUser(request: KibanaRequest) { + async grantAsInternalUser(request: KibanaRequest, createParams: CreateAPIKeyParams) { if (!this.license.isEnabled()) { return null; } @@ -200,7 +201,7 @@ export class APIKeys { `Unable to grant an API Key, request does not contain an authorization header` ); } - const params = this.getGrantParams(authorizationHeader); + const params = this.getGrantParams(createParams, authorizationHeader); // User needs `manage_api_key` or `grant_api_key` privilege to use this API let result: GrantAPIKeyResult; @@ -281,9 +282,13 @@ export class APIKeys { return disabledFeature === 'api_keys'; } - private getGrantParams(authorizationHeader: HTTPAuthorizationHeader): GrantAPIKeyParams { + private getGrantParams( + createParams: CreateAPIKeyParams, + authorizationHeader: HTTPAuthorizationHeader + ): GrantAPIKeyParams { if (authorizationHeader.scheme.toLowerCase() === 'bearer') { return { + api_key: createParams, grant_type: 'access_token', access_token: authorizationHeader.credentials, }; @@ -294,6 +299,7 @@ export class APIKeys { authorizationHeader.credentials ); return { + api_key: createParams, grant_type: 'password', username: basicCredentials.username, password: basicCredentials.password, diff --git a/x-pack/plugins/security/server/authentication/index.test.ts b/x-pack/plugins/security/server/authentication/index.test.ts index 56d44e6628a872..a125d9a62afb70 100644 --- a/x-pack/plugins/security/server/authentication/index.test.ts +++ b/x-pack/plugins/security/server/authentication/index.test.ts @@ -374,7 +374,10 @@ describe('setupAuthentication()', () => { }); describe('grantAPIKeyAsInternalUser()', () => { - let grantAPIKeyAsInternalUser: (request: KibanaRequest) => Promise; + let grantAPIKeyAsInternalUser: ( + request: KibanaRequest, + params: CreateAPIKeyParams + ) => Promise; beforeEach(async () => { grantAPIKeyAsInternalUser = (await setupAuthentication(mockSetupAuthenticationParams)) .grantAPIKeyAsInternalUser; @@ -384,10 +387,13 @@ describe('setupAuthentication()', () => { const request = httpServerMock.createKibanaRequest(); const apiKeysInstance = jest.requireMock('./api_keys').APIKeys.mock.instances[0]; apiKeysInstance.grantAsInternalUser.mockResolvedValueOnce({ api_key: 'foo' }); - await expect(grantAPIKeyAsInternalUser(request)).resolves.toEqual({ + + const createParams = { name: 'test_key', role_descriptors: {} }; + + await expect(grantAPIKeyAsInternalUser(request, createParams)).resolves.toEqual({ api_key: 'foo', }); - expect(apiKeysInstance.grantAsInternalUser).toHaveBeenCalledWith(request); + expect(apiKeysInstance.grantAsInternalUser).toHaveBeenCalledWith(request, createParams); }); }); diff --git a/x-pack/plugins/security/server/authentication/index.ts b/x-pack/plugins/security/server/authentication/index.ts index 659a378388a13c..ed631e221b7a37 100644 --- a/x-pack/plugins/security/server/authentication/index.ts +++ b/x-pack/plugins/security/server/authentication/index.ts @@ -187,7 +187,8 @@ export async function setupAuthentication({ areAPIKeysEnabled: () => apiKeys.areAPIKeysEnabled(), createAPIKey: (request: KibanaRequest, params: CreateAPIKeyParams) => apiKeys.create(request, params), - grantAPIKeyAsInternalUser: (request: KibanaRequest) => apiKeys.grantAsInternalUser(request), + grantAPIKeyAsInternalUser: (request: KibanaRequest, params: CreateAPIKeyParams) => + apiKeys.grantAsInternalUser(request, params), invalidateAPIKey: (request: KibanaRequest, params: InvalidateAPIKeyParams) => apiKeys.invalidate(request, params), invalidateAPIKeyAsInternalUser: (params: InvalidateAPIKeyParams) => diff --git a/x-pack/plugins/security_solution/common/endpoint/generate_data.test.ts b/x-pack/plugins/security_solution/common/endpoint/generate_data.test.ts index f64462f71a87b0..fcea86be4ae9e1 100644 --- a/x-pack/plugins/security_solution/common/endpoint/generate_data.test.ts +++ b/x-pack/plugins/security_solution/common/endpoint/generate_data.test.ts @@ -120,7 +120,7 @@ describe('data generator', () => { it('creates all events with an empty ancestry array', () => { for (const event of tree.allEvents) { - expect(event.process.Ext.ancestry.length).toEqual(0); + expect(event.process.Ext!.ancestry!.length).toEqual(0); } }); }); @@ -188,24 +188,24 @@ describe('data generator', () => { }; const verifyAncestry = (event: Event, genTree: Tree) => { - if (event.process.Ext.ancestry!.length > 0) { - expect(event.process.parent?.entity_id).toBe(event.process.Ext.ancestry![0]); + if (event.process.Ext!.ancestry!.length > 0) { + expect(event.process.parent?.entity_id).toBe(event.process.Ext!.ancestry![0]); } - for (let i = 0; i < event.process.Ext.ancestry!.length; i++) { - const ancestor = event.process.Ext.ancestry![i]; + for (let i = 0; i < event.process.Ext!.ancestry!.length; i++) { + const ancestor = event.process.Ext!.ancestry![i]; const parent = genTree.children.get(ancestor) || genTree.ancestry.get(ancestor); expect(ancestor).toBe(parent?.lifecycle[0].process.entity_id); // the next ancestor should be the grandparent - if (i + 1 < event.process.Ext.ancestry!.length) { - const grandparent = event.process.Ext.ancestry![i + 1]; + if (i + 1 < event.process.Ext!.ancestry!.length) { + const grandparent = event.process.Ext!.ancestry![i + 1]; expect(grandparent).toBe(parent?.lifecycle[0].process.parent?.entity_id); } } }; it('has ancestry array defined', () => { - expect(tree.origin.lifecycle[0].process.Ext.ancestry!.length).toBe(ANCESTRY_LIMIT); + expect(tree.origin.lifecycle[0].process.Ext!.ancestry!.length).toBe(ANCESTRY_LIMIT); for (const event of tree.allEvents) { verifyAncestry(event, tree); } diff --git a/x-pack/plugins/security_solution/common/endpoint/generate_data.ts b/x-pack/plugins/security_solution/common/endpoint/generate_data.ts index 339e5554ccb121..66e786cb02e637 100644 --- a/x-pack/plugins/security_solution/common/endpoint/generate_data.ts +++ b/x-pack/plugins/security_solution/common/endpoint/generate_data.ts @@ -823,7 +823,7 @@ export class EndpointDocGenerator { timestamp, parentEntityID: ancestor.process.entity_id, // add the parent to the ancestry array - ancestry: [ancestor.process.entity_id, ...(ancestor.process.Ext.ancestry ?? [])], + ancestry: [ancestor.process.entity_id, ...(ancestor.process.Ext?.ancestry ?? [])], ancestryArrayLimit: opts.ancestryArraySize, parentPid: ancestor.process.pid, pid: this.randomN(5000), @@ -840,7 +840,7 @@ export class EndpointDocGenerator { parentEntityID: ancestor.process.parent?.entity_id, eventCategory: 'process', eventType: 'end', - ancestry: ancestor.process.Ext.ancestry, + ancestry: ancestor.process.Ext?.ancestry, ancestryArrayLimit: opts.ancestryArraySize, }) ); @@ -864,7 +864,7 @@ export class EndpointDocGenerator { timestamp, ancestor.process.entity_id, ancestor.process.parent?.entity_id, - ancestor.process.Ext.ancestry + ancestor.process.Ext?.ancestry ) ); return events; @@ -914,7 +914,7 @@ export class EndpointDocGenerator { parentEntityID: currentState.event.process.entity_id, ancestry: [ currentState.event.process.entity_id, - ...(currentState.event.process.Ext.ancestry ?? []), + ...(currentState.event.process.Ext?.ancestry ?? []), ], ancestryArrayLimit: opts.ancestryArraySize, }); @@ -938,7 +938,7 @@ export class EndpointDocGenerator { parentEntityID: child.process.parent?.entity_id, eventCategory: 'process', eventType: 'end', - ancestry: child.process.Ext.ancestry, + ancestry: child.process.Ext?.ancestry, ancestryArrayLimit: opts.ancestryArraySize, }); } @@ -984,7 +984,7 @@ export class EndpointDocGenerator { parentEntityID: node.process.parent?.entity_id, eventCategory: eventInfo.category, eventType: eventInfo.creationType, - ancestry: node.process.Ext.ancestry, + ancestry: node.process.Ext?.ancestry, }); } } @@ -1007,7 +1007,7 @@ export class EndpointDocGenerator { ts, node.process.entity_id, node.process.parent?.entity_id, - node.process.Ext.ancestry + node.process.Ext?.ancestry ); } } diff --git a/x-pack/plugins/security_solution/common/endpoint/models/event.ts b/x-pack/plugins/security_solution/common/endpoint/models/event.ts index 9b4550f52ff22f..f8a6807196557b 100644 --- a/x-pack/plugins/security_solution/common/endpoint/models/event.ts +++ b/x-pack/plugins/security_solution/common/endpoint/models/event.ts @@ -57,7 +57,9 @@ export function ancestryArray(event: ResolverEvent): string[] | undefined { if (isLegacyEvent(event)) { return undefined; } - return event.process.Ext.ancestry; + // this is to guard against the endpoint accidentally not sending the ancestry array + // otherwise the request will fail when really we should just try using the parent entity id + return event.process.Ext?.ancestry; } export function getAncestryAsArray(event: ResolverEvent | undefined): string[] { diff --git a/x-pack/plugins/security_solution/common/endpoint/types.ts b/x-pack/plugins/security_solution/common/endpoint/types.ts index b75d4b2190fe87..b477207b1c5a3e 100644 --- a/x-pack/plugins/security_solution/common/endpoint/types.ts +++ b/x-pack/plugins/security_solution/common/endpoint/types.ts @@ -334,13 +334,13 @@ export interface AlertEvent { start: number; thread?: ThreadFields[]; uptime: number; - Ext: { + Ext?: { /* * The array has a special format. The entity_ids towards the beginning of the array are closer ancestors and the * values towards the end of the array are more distant ancestors (grandparents). Therefore * ancestry_array[0] == process.parent.entity_id and ancestry_array[1] == process.parent.parent.entity_id */ - ancestry: string[]; + ancestry?: string[]; code_signature: Array<{ subject_name: string; trusted: boolean; @@ -539,8 +539,8 @@ export interface EndpointEvent { * values towards the end of the array are more distant ancestors (grandparents). Therefore * ancestry_array[0] == process.parent.entity_id and ancestry_array[1] == process.parent.parent.entity_id */ - Ext: { - ancestry: string[]; + Ext?: { + ancestry?: string[]; }; }; user?: { diff --git a/x-pack/plugins/security_solution/public/common/components/events_viewer/events_viewer.tsx b/x-pack/plugins/security_solution/public/common/components/events_viewer/events_viewer.tsx index 0a1f95d51e3009..a81c5facb07182 100644 --- a/x-pack/plugins/security_solution/public/common/components/events_viewer/events_viewer.tsx +++ b/x-pack/plugins/security_solution/public/common/components/events_viewer/events_viewer.tsx @@ -67,6 +67,8 @@ interface Props { sort: Sort; toggleColumn: (column: ColumnHeaderOptions) => void; utilityBar?: (refetch: inputsModel.Refetch, totalCount: number) => React.ReactNode; + // If truthy, the graph viewer (Resolver) is showing + graphEventId: string | undefined; } const EventsViewerComponent: React.FC = ({ @@ -90,6 +92,7 @@ const EventsViewerComponent: React.FC = ({ sort, toggleColumn, utilityBar, + graphEventId, }) => { const columnsHeader = isEmpty(columns) ? defaultHeaders : columns; const kibana = useKibana(); @@ -191,22 +194,28 @@ const EventsViewerComponent: React.FC = ({ toggleColumn={toggleColumn} /> -