diff --git a/docs/developer/plugin/development-plugin-resources.asciidoc b/docs/developer/plugin/development-plugin-resources.asciidoc index 71c442aaf52e87..a2fd0e23d0be4a 100644 --- a/docs/developer/plugin/development-plugin-resources.asciidoc +++ b/docs/developer/plugin/development-plugin-resources.asciidoc @@ -66,3 +66,8 @@ To enable TypeScript support, create a `tsconfig.json` file at the root of your TypeScript code is automatically converted into JavaScript during development, but not in the distributable version of Kibana. If you use the {repo}blob/{branch}/packages/kbn-plugin-helpers[@kbn/plugin-helpers] to build your plugin, then your `.ts` and `.tsx` files will be permanently transpiled before your plugin is archived. If you have your own build process, make sure to run the TypeScript compiler on your source files and ship the compilation output so that your plugin will work with the distributable version of Kibana. + +==== {kib} platform migration guide + +{repo}blob/{branch}/src/core/MIGRATION.md#migrating-legacy-plugins-to-the-new-platform[This guide] +provides an action plan for moving a legacy plugin to the new platform. diff --git a/package.json b/package.json index 82a43537dc8468..e801f75b976f8f 100644 --- a/package.json +++ b/package.json @@ -120,7 +120,7 @@ "@elastic/charts": "^17.0.2", "@elastic/datemath": "5.0.2", "@elastic/ems-client": "7.6.0", - "@elastic/eui": "18.3.0", + "@elastic/eui": "19.0.0", "@elastic/filesaver": "1.1.2", "@elastic/good": "8.1.1-kibana2", "@elastic/numeral": "2.3.5", diff --git a/packages/kbn-config-schema/src/types/duration_type.test.ts b/packages/kbn-config-schema/src/types/duration_type.test.ts index 09e92ce727f2a2..57e917dc99b2b3 100644 --- a/packages/kbn-config-schema/src/types/duration_type.test.ts +++ b/packages/kbn-config-schema/src/types/duration_type.test.ts @@ -101,7 +101,7 @@ describe('#defaultValue', () => { source: duration({ defaultValue: 600 }), target: duration({ defaultValue: siblingRef('source') }), fromContext: duration({ defaultValue: contextRef('val') }), - }).validate(undefined, { val: momentDuration(700, 'ms') }) + }).validate({}, { val: momentDuration(700, 'ms') }) ).toMatchInlineSnapshot(` Object { "fromContext": "PT0.7S", @@ -115,7 +115,7 @@ Object { source: duration({ defaultValue: '1h' }), target: duration({ defaultValue: siblingRef('source') }), fromContext: duration({ defaultValue: contextRef('val') }), - }).validate(undefined, { val: momentDuration(2, 'hour') }) + }).validate({}, { val: momentDuration(2, 'hour') }) ).toMatchInlineSnapshot(` Object { "fromContext": "PT2H", @@ -129,7 +129,7 @@ Object { source: duration({ defaultValue: momentDuration(1, 'hour') }), target: duration({ defaultValue: siblingRef('source') }), fromContext: duration({ defaultValue: contextRef('val') }), - }).validate(undefined, { val: momentDuration(2, 'hour') }) + }).validate({}, { val: momentDuration(2, 'hour') }) ).toMatchInlineSnapshot(` Object { "fromContext": "PT2H", diff --git a/packages/kbn-config-schema/src/types/maybe_type.test.ts b/packages/kbn-config-schema/src/types/maybe_type.test.ts index ecc1d218e186d4..c35fa18593520a 100644 --- a/packages/kbn-config-schema/src/types/maybe_type.test.ts +++ b/packages/kbn-config-schema/src/types/maybe_type.test.ts @@ -60,3 +60,41 @@ test('includes namespace in failure', () => { const type = schema.maybe(schema.string()); expect(() => type.validate(null, {}, 'foo-namespace')).toThrowErrorMatchingSnapshot(); }); + +describe('maybe + object', () => { + test('returns undefined if undefined object', () => { + const type = schema.maybe(schema.object({})); + expect(type.validate(undefined)).toEqual(undefined); + }); + + test('returns undefined if undefined object with no defaults', () => { + const type = schema.maybe( + schema.object({ + type: schema.string(), + id: schema.string(), + }) + ); + + expect(type.validate(undefined)).toEqual(undefined); + }); + + test('returns empty object if maybe keys', () => { + const type = schema.object({ + name: schema.maybe(schema.string()), + }); + expect(type.validate({})).toEqual({}); + }); + + test('returns empty object if maybe nested object', () => { + const type = schema.object({ + name: schema.maybe( + schema.object({ + type: schema.string(), + id: schema.string(), + }) + ), + }); + + expect(type.validate({})).toEqual({}); + }); +}); diff --git a/packages/kbn-config-schema/src/types/maybe_type.ts b/packages/kbn-config-schema/src/types/maybe_type.ts index 06a93691102036..415f6315c57231 100644 --- a/packages/kbn-config-schema/src/types/maybe_type.ts +++ b/packages/kbn-config-schema/src/types/maybe_type.ts @@ -25,7 +25,7 @@ export class MaybeType extends Type { type .getSchema() .optional() - .default() + .default(() => undefined, 'undefined') ); } } diff --git a/packages/kbn-config-schema/src/types/object_type.test.ts b/packages/kbn-config-schema/src/types/object_type.test.ts index 5786984cf7ebdc..64739d7a4c4daa 100644 --- a/packages/kbn-config-schema/src/types/object_type.test.ts +++ b/packages/kbn-config-schema/src/types/object_type.test.ts @@ -30,6 +30,11 @@ test('returns value by default', () => { expect(type.validate(value)).toEqual({ name: 'test' }); }); +test('returns empty object if undefined', () => { + const type = schema.object({}); + expect(type.validate(undefined)).toEqual({}); +}); + test('properly parse the value if input is a string', () => { const type = schema.object({ name: schema.string(), @@ -112,14 +117,26 @@ test('undefined object within object', () => { }), }); + expect(type.validate(undefined)).toEqual({ + foo: { + bar: 'hello world', + }, + }); + expect(type.validate({})).toEqual({ foo: { bar: 'hello world', }, }); + + expect(type.validate({ foo: {} })).toEqual({ + foo: { + bar: 'hello world', + }, + }); }); -test('object within object with required', () => { +test('object within object with key without defaultValue', () => { const type = schema.object({ foo: schema.object({ bar: schema.string(), @@ -127,6 +144,9 @@ test('object within object with required', () => { }); const value = { foo: {} }; + expect(() => type.validate(undefined)).toThrowErrorMatchingInlineSnapshot( + `"[foo.bar]: expected value of type [string] but got [undefined]"` + ); expect(() => type.validate(value)).toThrowErrorMatchingInlineSnapshot( `"[foo.bar]: expected value of type [string] but got [undefined]"` ); diff --git a/packages/kbn-config-schema/src/types/object_type.ts b/packages/kbn-config-schema/src/types/object_type.ts index d2e6c708c263ca..4f3d68a6bac97d 100644 --- a/packages/kbn-config-schema/src/types/object_type.ts +++ b/packages/kbn-config-schema/src/types/object_type.ts @@ -33,23 +33,23 @@ export type ObjectResultType

= Readonly<{ [K in keyof P]: TypeO export type ObjectTypeOptions

= TypeOptions< { [K in keyof P]: TypeOf } > & { + /** Should uknown keys not be defined in the schema be allowed. Defaults to `false` */ allowUnknowns?: boolean; }; export class ObjectType

extends Type> { private props: Record; - constructor(props: P, options: ObjectTypeOptions

= {}) { + constructor(props: P, { allowUnknowns = false, ...typeOptions }: ObjectTypeOptions

= {}) { const schemaKeys = {} as Record; for (const [key, value] of Object.entries(props)) { schemaKeys[key] = value.getSchema(); } - const { allowUnknowns, ...typeOptions } = options; const schema = internals .object() .keys(schemaKeys) - .optional() .default() + .optional() .unknown(Boolean(allowUnknowns)); super(schema, typeOptions); diff --git a/packages/kbn-ui-shared-deps/package.json b/packages/kbn-ui-shared-deps/package.json index 0b1a31619fdf92..4b4db9d7f37f37 100644 --- a/packages/kbn-ui-shared-deps/package.json +++ b/packages/kbn-ui-shared-deps/package.json @@ -11,7 +11,7 @@ "devDependencies": { "@elastic/charts": "^17.0.2", "abort-controller": "^3.0.0", - "@elastic/eui": "18.3.0", + "@elastic/eui": "19.0.0", "@kbn/dev-utils": "1.0.0", "@kbn/i18n": "1.0.0", "@yarnpkg/lockfile": "^1.1.0", diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/__snapshots__/no_results.test.js.snap b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/__snapshots__/no_results.test.js.snap index 98cb3ccf6dd91e..4126bd9d27ffd5 100644 --- a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/__snapshots__/no_results.test.js.snap +++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/__snapshots__/no_results.test.js.snap @@ -77,12 +77,8 @@ Array [

- - + + 200 @@ -101,12 +97,8 @@ Array [
- - + + status:200 @@ -125,12 +117,8 @@ Array [
- - + + status:[400 TO 499] @@ -149,12 +137,8 @@ Array [
- - + + status:[400 TO 499] AND extension:PHP @@ -173,12 +157,8 @@ Array [
- - + + status:[400 TO 499] AND (extension:php OR extension:html) @@ -291,15 +271,9 @@ Array [
-
-
-              
+          
+
+              
                 {"reason":"Awful error"}
               
             
@@ -320,15 +294,9 @@ Array [
-
-
-              
+          
+
+              
                 {"reason":"Bad error"}
               
             
diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/histogram.tsx b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/histogram.tsx index 77bbab97d95c7c..8db3c77ba0f472 100644 --- a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/histogram.tsx +++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/histogram.tsx @@ -41,7 +41,7 @@ import { } from '@elastic/charts'; import { i18n } from '@kbn/i18n'; -import { EuiChartThemeType } from '@elastic/eui/src/themes/charts/themes'; +import { EuiChartThemeType } from '@elastic/eui/dist/eui_charts_theme'; import { Subscription } from 'rxjs'; import { getServices, timezoneProvider } from '../../../kibana_services'; diff --git a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/no_results.test.js b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/no_results.test.js index 7de792c6129931..98a4a926a282e9 100644 --- a/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/no_results.test.js +++ b/src/legacy/core_plugins/kibana/public/discover/np_ready/angular/directives/no_results.test.js @@ -36,6 +36,31 @@ jest.mock('../../../kibana_services', () => { }; }); +// Mocking to prevent errors with React portal. +// Temporary until https://github.com/elastic/kibana/pull/55877 provides other alternatives. +jest.mock('@elastic/eui/lib/components/code/code_block', () => { + const React = require.requireActual('react'); + return { + EuiCodeBlock: ({ children }) => ( +
+
+          {children}
+        
+
+ ), + }; +}); +jest.mock('@elastic/eui/lib/components/code/code', () => { + const React = require.requireActual('react'); + return { + EuiCode: ({ children }) => ( + + {children} + + ), + }; +}); + beforeEach(() => { jest.clearAllMocks(); }); diff --git a/src/legacy/server/sass/build.test.js b/src/legacy/server/sass/build.test.js index 7092f6ad129217..46a898c30f84e1 100644 --- a/src/legacy/server/sass/build.test.js +++ b/src/legacy/server/sass/build.test.js @@ -47,28 +47,7 @@ it('builds light themed SASS', async () => { expect(readFileSync(targetPath, 'utf8').replace(/(\/\*# sourceMappingURL=).*( \*\/)/, '$1...$2')) .toMatchInlineSnapshot(` - "/* 1 */ - /* 1 */ - /** - * 1. Extend beta badges to at least 40% of the container's width - * 2. Fix for IE to ensure badges are visible outside of a
+ } + > + xpack.monitoring.collection.enabled + - xpack.monitoring.collection.enabled - + /> @@ -214,15 +221,22 @@ exports[`ExplainCollectionEnabled should explain about xpack.monitoring.collecti paddingSize="l" transparentBackground={false} > + + -1 +
+ } + > + -1 + - -1 - + /> diff --git a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/collection_interval/__tests__/__snapshots__/collection_interval.test.js.snap b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/collection_interval/__tests__/__snapshots__/collection_interval.test.js.snap index ac3dce3bfaef6b..3cf35609acd07a 100644 --- a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/collection_interval/__tests__/__snapshots__/collection_interval.test.js.snap +++ b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/collection_interval/__tests__/__snapshots__/collection_interval.test.js.snap @@ -366,15 +366,22 @@ exports[`ExplainCollectionInterval collection interval setting updates should sh paddingSize="l" transparentBackground={false} > + + xpack.monitoring.collection.interval +
+ } + > + xpack.monitoring.collection.interval + - xpack.monitoring.collection.interval - + /> @@ -387,15 +394,22 @@ exports[`ExplainCollectionInterval collection interval setting updates should sh paddingSize="l" transparentBackground={false} > + + -1 +
+ } + > + -1 + - -1 - + /> @@ -682,15 +696,22 @@ exports[`ExplainCollectionInterval should explain about xpack.monitoring.collect paddingSize="l" transparentBackground={false} > + + xpack.monitoring.collection.interval +
+ } + > + xpack.monitoring.collection.interval + - xpack.monitoring.collection.interval - + /> @@ -703,15 +724,22 @@ exports[`ExplainCollectionInterval should explain about xpack.monitoring.collect paddingSize="l" transparentBackground={false} > + + -1 +
+ } + > + -1 + - -1 - + /> diff --git a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/exporters/__tests__/__snapshots__/exporters.test.js.snap b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/exporters/__tests__/__snapshots__/exporters.test.js.snap index 89cd3e5852f82b..fb06ff2d866bbe 100644 --- a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/exporters/__tests__/__snapshots__/exporters.test.js.snap +++ b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/exporters/__tests__/__snapshots__/exporters.test.js.snap @@ -26,32 +26,20 @@ Array [ >

We checked the - - + + esProd001 settings for - - + + xpack.monitoring.exporters , and found the reason: - - + + myMonitoringClusterExporter1 @@ -59,32 +47,20 @@ Array [

Using monitoring exporters to ship the monitoring data to a remote monitoring cluster is highly recommended as it keeps the integrity of the monitoring data safe no matter what the state of the production cluster. However, as this instance of Kibana could not find any monitoring data, there seems to be a problem with the - - + + xpack.monitoring.exporters configuration, or the - - + + xpack.monitoring.elasticsearch settings in - - + + kibana.yml @@ -92,22 +68,14 @@ Array [

Check that the intended exporters are enabled for sending statistics to the monitoring cluster, and that the monitoring cluster host matches the - - + + xpack.monitoring.elasticsearch setting in - - + + kibana.yml diff --git a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/exporters/__tests__/exporters.test.js b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/exporters/__tests__/exporters.test.js index bdeb469daee467..c9147037f0022d 100644 --- a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/exporters/__tests__/exporters.test.js +++ b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/exporters/__tests__/exporters.test.js @@ -8,6 +8,19 @@ import React from 'react'; import { renderWithIntl } from '../../../../../../../../../test_utils/enzyme_helpers'; import { ExplainExporters, ExplainExportersCloud } from '../exporters'; +// Mocking to prevent errors with React portal. +// Temporary until https://github.com/elastic/kibana/pull/55877 provides other alternatives. +jest.mock('@elastic/eui/lib/components/code/code', () => { + const React = require.requireActual('react'); + return { + EuiCode: ({ children }) => ( + + {children} + + ), + }; +}); + describe('ExplainExporters', () => { test('should explain about xpack.monitoring.exporters setting', () => { const reason = { diff --git a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/plugin_enabled/__tests__/__snapshots__/plugin_enabled.test.js.snap b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/plugin_enabled/__tests__/__snapshots__/plugin_enabled.test.js.snap index 8871d8caadd1c9..63053c3f7c0cd0 100644 --- a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/plugin_enabled/__tests__/__snapshots__/plugin_enabled.test.js.snap +++ b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/plugin_enabled/__tests__/__snapshots__/plugin_enabled.test.js.snap @@ -26,32 +26,20 @@ Array [ >

We checked the cluster settings and found that - - + + xpack.monitoring.enabled is set to - - + + false set, which disables monitoring. Removing the - - + + xpack.monitoring.enabled: false diff --git a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/plugin_enabled/__tests__/plugin_enabled.test.js b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/plugin_enabled/__tests__/plugin_enabled.test.js index b962d136ba642f..56536a8e4270b1 100644 --- a/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/plugin_enabled/__tests__/plugin_enabled.test.js +++ b/x-pack/legacy/plugins/monitoring/public/components/no_data/explanations/plugin_enabled/__tests__/plugin_enabled.test.js @@ -8,6 +8,19 @@ import React from 'react'; import { renderWithIntl } from '../../../../../../../../../test_utils/enzyme_helpers'; import { ExplainPluginEnabled } from '../plugin_enabled'; +// Mocking to prevent errors with React portal. +// Temporary until https://github.com/elastic/kibana/pull/55877 provides other alternatives. +jest.mock('@elastic/eui/lib/components/code/code', () => { + const React = require.requireActual('react'); + return { + EuiCode: ({ children }) => ( + + {children} + + ), + }; +}); + describe('ExplainPluginEnabled', () => { test('should explain about xpack.monitoring.enabled setting', () => { const reason = { diff --git a/x-pack/legacy/plugins/monitoring/public/components/no_data/reasons/__tests__/__snapshots__/reason_found.test.js.snap b/x-pack/legacy/plugins/monitoring/public/components/no_data/reasons/__tests__/__snapshots__/reason_found.test.js.snap index fadf7c5757bf86..898be82b139d13 100644 --- a/x-pack/legacy/plugins/monitoring/public/components/no_data/reasons/__tests__/__snapshots__/reason_found.test.js.snap +++ b/x-pack/legacy/plugins/monitoring/public/components/no_data/reasons/__tests__/__snapshots__/reason_found.test.js.snap @@ -26,22 +26,14 @@ Array [ >

We checked the cluster settings and found that - - + + xpack.monitoring.collection.interval is set to - - + + -1 @@ -109,32 +101,20 @@ Array [ >

We checked the - - + + node001foo settings for - - + + xpack.monitoring.exporters , and found the reason: - - + + myMonitoringClusterExporter1 @@ -142,32 +122,20 @@ Array [

Using monitoring exporters to ship the monitoring data to a remote monitoring cluster is highly recommended as it keeps the integrity of the monitoring data safe no matter what the state of the production cluster. However, as this instance of Kibana could not find any monitoring data, there seems to be a problem with the - - + + xpack.monitoring.exporters configuration, or the - - + + xpack.monitoring.elasticsearch settings in - - + + kibana.yml @@ -175,22 +143,14 @@ Array [

Check that the intended exporters are enabled for sending statistics to the monitoring cluster, and that the monitoring cluster host matches the - - + + xpack.monitoring.elasticsearch setting in - - + + kibana.yml @@ -277,32 +237,20 @@ Array [ >

We checked the node001foo settings and found that - - + + xpack.monitoring.enabled is set to - - + + false set, which disables monitoring. Removing the - - + + xpack.monitoring.enabled: false diff --git a/x-pack/legacy/plugins/monitoring/public/components/no_data/reasons/__tests__/reason_found.test.js b/x-pack/legacy/plugins/monitoring/public/components/no_data/reasons/__tests__/reason_found.test.js index a51817db324b73..e9b2ff11538abb 100644 --- a/x-pack/legacy/plugins/monitoring/public/components/no_data/reasons/__tests__/reason_found.test.js +++ b/x-pack/legacy/plugins/monitoring/public/components/no_data/reasons/__tests__/reason_found.test.js @@ -8,6 +8,19 @@ import React from 'react'; import { renderWithIntl } from '../../../../../../../../test_utils/enzyme_helpers'; import { ReasonFound } from '../'; +// Mocking to prevent errors with React portal. +// Temporary until https://github.com/elastic/kibana/pull/55877 provides other alternatives. +jest.mock('@elastic/eui/lib/components/code/code', () => { + const React = require.requireActual('react'); + return { + EuiCode: ({ children }) => ( + + {children} + + ), + }; +}); + const enabler = {}; describe('ReasonFound', () => { diff --git a/x-pack/legacy/plugins/remote_clusters/public/app/sections/components/remote_cluster_form/__snapshots__/remote_cluster_form.test.js.snap b/x-pack/legacy/plugins/remote_clusters/public/app/sections/components/remote_cluster_form/__snapshots__/remote_cluster_form.test.js.snap index 65fc455417fe38..45751997eb0d50 100644 --- a/x-pack/legacy/plugins/remote_clusters/public/app/sections/components/remote_cluster_form/__snapshots__/remote_cluster_form.test.js.snap +++ b/x-pack/legacy/plugins/remote_clusters/public/app/sections/components/remote_cluster_form/__snapshots__/remote_cluster_form.test.js.snap @@ -5,27 +5,28 @@ Array [

-
+
-

- Name -

+ + +
-
-
+
+
-

- Seed nodes for cluster discovery -

+ + +
-
-
+
+
-

- Make remote cluster optional -

+ + +
-
+
,
{ +beforeEach(async () => { + mockReportingPlugin = await createMockReportingCore(); mockServer = createMockServer(''); }); @@ -148,56 +151,76 @@ describe('conditions', () => { }); test('uses basePath from job when creating saved object service', async () => { + const mockGetSavedObjectsClient = jest.fn(); + mockReportingPlugin.getSavedObjectsClient = mockGetSavedObjectsClient; + const permittedHeaders = { foo: 'bar', baz: 'quix', }; - const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayload, filteredHeaders: permittedHeaders, server: mockServer, }); - - const logo = 'custom-logo'; - mockServer.uiSettingsServiceFactory().get.mockReturnValue(logo); - const jobBasePath = '/sbp/s/marketing'; await getCustomLogo({ + reporting: mockReportingPlugin, job: { basePath: jobBasePath } as JobDocPayloadPDF, conditionalHeaders, server: mockServer, }); - expect(mockServer.savedObjects.getScopedSavedObjectsClient.mock.calls[0][0].getBasePath()).toBe( - jobBasePath - ); + const getBasePath = mockGetSavedObjectsClient.mock.calls[0][0].getBasePath; + expect(getBasePath()).toBe(jobBasePath); }); test(`uses basePath from server if job doesn't have a basePath when creating saved object service`, async () => { + const mockGetSavedObjectsClient = jest.fn(); + mockReportingPlugin.getSavedObjectsClient = mockGetSavedObjectsClient; + const permittedHeaders = { foo: 'bar', baz: 'quix', }; - const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayload, filteredHeaders: permittedHeaders, server: mockServer, }); - const logo = 'custom-logo'; - mockServer.uiSettingsServiceFactory().get.mockReturnValue(logo); - await getCustomLogo({ + reporting: mockReportingPlugin, job: {} as JobDocPayloadPDF, conditionalHeaders, server: mockServer, }); - expect(mockServer.savedObjects.getScopedSavedObjectsClient.mock.calls[0][0].getBasePath()).toBe( - '/sbp' - ); + const getBasePath = mockGetSavedObjectsClient.mock.calls[0][0].getBasePath; + expect(getBasePath()).toBe(`/sbp`); + expect(mockGetSavedObjectsClient.mock.calls[0]).toMatchInlineSnapshot(` + Array [ + Object { + "getBasePath": [Function], + "headers": Object { + "baz": "quix", + "foo": "bar", + }, + "path": "/", + "raw": Object { + "req": Object { + "url": "/", + }, + }, + "route": Object { + "settings": Object {}, + }, + "url": Object { + "href": "/", + }, + }, + ] + `); }); describe('config formatting', () => { diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts index ff2c44026315dc..fa53f474dfba7b 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.test.ts @@ -4,12 +4,16 @@ * you may not use this file except in compliance with the Elastic License. */ -import { createMockServer } from '../../../test_helpers/create_mock_server'; -import { getConditionalHeaders, getCustomLogo } from './index'; +import { ReportingCore } from '../../../server'; +import { createMockReportingCore, createMockServer } from '../../../test_helpers'; +import { ServerFacade } from '../../../types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; +import { getConditionalHeaders, getCustomLogo } from './index'; -let mockServer: any; -beforeEach(() => { +let mockReportingPlugin: ReportingCore; +let mockServer: ServerFacade; +beforeEach(async () => { + mockReportingPlugin = await createMockReportingCore(); mockServer = createMockServer(''); }); @@ -19,6 +23,17 @@ test(`gets logo from uiSettings`, async () => { baz: 'quix', }; + const mockGet = jest.fn(); + mockGet.mockImplementationOnce((...args: any[]) => { + if (args[0] === 'xpackReporting:customPdfLogo') { + return 'purple pony'; + } + throw new Error('wrong caller args!'); + }); + mockReportingPlugin.getUiSettingsServiceFactory = jest.fn().mockResolvedValue({ + get: mockGet, + }); + const conditionalHeaders = await getConditionalHeaders({ job: {} as JobDocPayloadPDF, filteredHeaders: permittedHeaders, @@ -26,12 +41,12 @@ test(`gets logo from uiSettings`, async () => { }); const { logo } = await getCustomLogo({ + reporting: mockReportingPlugin, job: {} as JobDocPayloadPDF, conditionalHeaders, server: mockServer, }); - mockServer.uiSettingsServiceFactory().get.mockReturnValue(logo); - - expect(mockServer.uiSettingsServiceFactory().get).toBeCalledWith('xpackReporting:customPdfLogo'); + expect(mockGet).toBeCalledWith('xpackReporting:customPdfLogo'); + expect(logo).toBe('purple pony'); }); diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts index 0059276f6df718..7af5edab41ab77 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_custom_logo.ts @@ -5,14 +5,17 @@ */ import { UI_SETTINGS_CUSTOM_PDF_LOGO } from '../../../common/constants'; +import { ReportingCore } from '../../../server'; import { ConditionalHeaders, ServerFacade } from '../../../types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; // Logo is PDF only export const getCustomLogo = async ({ + reporting, server, job, conditionalHeaders, }: { + reporting: ReportingCore; server: ServerFacade; job: JobDocPayloadPDF; conditionalHeaders: ConditionalHeaders; @@ -27,19 +30,12 @@ export const getCustomLogo = async ({ getBasePath: () => job.basePath || serverBasePath, path: '/', route: { settings: {} }, - url: { - href: '/', - }, - raw: { - req: { - url: '/', - }, - }, + url: { href: '/' }, + raw: { req: { url: '/' } }, }; - const savedObjects = server.savedObjects; - const savedObjectsClient = savedObjects.getScopedSavedObjectsClient(fakeRequest); - const uiSettings = server.uiSettingsServiceFactory({ savedObjectsClient }); + const savedObjectsClient = await reporting.getSavedObjectsClient(fakeRequest); + const uiSettings = await reporting.getUiSettingsServiceFactory(savedObjectsClient); const logo: string = await uiSettings.get(UI_SETTINGS_CUSTOM_PDF_LOGO); return { conditionalHeaders, logo }; }; diff --git a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts index 9b2a065427f70b..27e772195f7260 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/execute_job/get_full_urls.test.ts @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import { createMockServer } from '../../../test_helpers/create_mock_server'; +import { createMockServer } from '../../../test_helpers'; import { ServerFacade } from '../../../types'; import { JobDocPayloadPNG } from '../../png/types'; import { JobDocPayloadPDF } from '../../printable_pdf/types'; diff --git a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/index.ts b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/index.ts index 9fd3ee391ddbbf..62b5e29e88ecfd 100644 --- a/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/index.ts @@ -5,19 +5,19 @@ */ import * as Rx from 'rxjs'; -import { first, concatMap, take, toArray, mergeMap } from 'rxjs/operators'; -import { ServerFacade, CaptureConfig, HeadlessChromiumDriverFactory } from '../../../../types'; -import { ScreenshotResults, ScreenshotObservableOpts } from './types'; -import { injectCustomCss } from './inject_css'; -import { openUrl } from './open_url'; -import { waitForRenderComplete } from './wait_for_render'; -import { getNumberOfItems } from './get_number_of_items'; -import { waitForElementsToBeInDOM } from './wait_for_dom_elements'; -import { getTimeRange } from './get_time_range'; +import { concatMap, first, mergeMap, take, toArray } from 'rxjs/operators'; +import { CaptureConfig, HeadlessChromiumDriverFactory, ServerFacade } from '../../../../types'; import { getElementPositionAndAttributes } from './get_element_position_data'; +import { getNumberOfItems } from './get_number_of_items'; import { getScreenshots } from './get_screenshots'; +import { getTimeRange } from './get_time_range'; +import { injectCustomCss } from './inject_css'; +import { openUrl } from './open_url'; import { scanPage } from './scan_page'; import { skipTelemetry } from './skip_telemetry'; +import { ScreenshotObservableOpts, ScreenshotResults } from './types'; +import { waitForElementsToBeInDOM } from './wait_for_dom_elements'; +import { waitForRenderComplete } from './wait_for_render'; export function screenshotsObservableFactory( server: ServerFacade, diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts index 063ac7f77704cc..7ea67277015ab6 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/create_job.ts @@ -4,19 +4,20 @@ * you may not use this file except in compliance with the Elastic License. */ +import { ReportingCore } from '../../../server'; import { cryptoFactory } from '../../../server/lib/crypto'; import { - CreateJobFactory, ConditionalHeaders, - ServerFacade, - RequestFacade, + CreateJobFactory, ESQueueCreateJobFn, + RequestFacade, + ServerFacade, } from '../../../types'; import { JobParamsDiscoverCsv } from '../types'; export const createJobFactory: CreateJobFactory> = function createJobFactoryFn(server: ServerFacade) { +>> = function createJobFactoryFn(reporting: ReportingCore, server: ServerFacade) { const crypto = cryptoFactory(server); return async function createJob( diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js index b21d6283320274..f12916b734dbf6 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js @@ -9,6 +9,7 @@ import sinon from 'sinon'; import nodeCrypto from '@elastic/node-crypto'; import { CancellationToken } from '../../../common/cancellation_token'; import { fieldFormats } from '../../../../../../../src/plugins/data/server'; +import { createMockReportingCore } from '../../../test_helpers'; import { LevelLogger } from '../../../server/lib/level_logger'; import { executeJobFactory } from './execute_job'; import { setFieldFormats } from '../../../server/services'; @@ -36,16 +37,19 @@ describe('CSV Execute Job', function() { let encryptedHeaders; let cancellationToken; + let mockReportingPlugin; let mockServer; let clusterStub; let callAsCurrentUserStub; - let uiSettingsGetStub; const mockElasticsearch = { dataClient: { asScoped: () => clusterStub, }, }; + const mockUiSettingsClient = { + get: sinon.stub(), + }; beforeAll(async function() { const crypto = nodeCrypto({ encryptionKey }); @@ -53,6 +57,8 @@ describe('CSV Execute Job', function() { }); beforeEach(async function() { + mockReportingPlugin = await createMockReportingCore(); + mockReportingPlugin.getUiSettingsServiceFactory = () => mockUiSettingsClient; cancellationToken = new CancellationToken(); defaultElasticsearchResponse = { @@ -70,9 +76,8 @@ describe('CSV Execute Job', function() { .resolves(defaultElasticsearchResponse); const configGetStub = sinon.stub(); - uiSettingsGetStub = sinon.stub(); - uiSettingsGetStub.withArgs('csv:separator').returns(','); - uiSettingsGetStub.withArgs('csv:quoteValues').returns(true); + mockUiSettingsClient.get.withArgs('csv:separator').returns(','); + mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(true); setFieldFormats({ fieldFormatServiceFactory: function() { @@ -90,26 +95,11 @@ describe('CSV Execute Job', function() { }); mockServer = { - expose: function() {}, - plugins: { - elasticsearch: { - getCluster: function() { - return clusterStub; - }, - }, - }, config: function() { return { get: configGetStub, }; }, - savedObjects: { - getScopedSavedObjectsClient: sinon.stub(), - }, - uiSettingsServiceFactory: sinon.stub().returns({ - get: uiSettingsGetStub, - }), - log: function() {}, }; mockServer .config() @@ -125,83 +115,14 @@ describe('CSV Execute Job', function() { .returns({}); }); - describe('calls getScopedSavedObjectsClient with request', function() { - it('containing decrypted headers', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); - await executeJob( - 'job456', - { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, - cancellationToken - ); - expect(mockServer.savedObjects.getScopedSavedObjectsClient.calledOnce).toBe(true); - expect(mockServer.savedObjects.getScopedSavedObjectsClient.firstCall.args[0].headers).toEqual( - headers - ); - }); - - it(`containing getBasePath() returning server's basePath if the job doesn't have one`, async function() { - const serverBasePath = '/foo-server/basePath/'; - mockServer - .config() - .get.withArgs('server.basePath') - .returns(serverBasePath); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); - await executeJob( - 'job456', - { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, - cancellationToken - ); - expect(mockServer.savedObjects.getScopedSavedObjectsClient.calledOnce).toBe(true); - expect( - mockServer.savedObjects.getScopedSavedObjectsClient.firstCall.args[0].getBasePath() - ).toEqual(serverBasePath); - }); - - it(`containing getBasePath() returning job's basePath if the job has one`, async function() { - const serverBasePath = '/foo-server/basePath/'; - mockServer - .config() - .get.withArgs('server.basePath') - .returns(serverBasePath); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); - const jobBasePath = 'foo-job/basePath/'; - await executeJob( - 'job789', - { - headers: encryptedHeaders, - fields: [], - searchRequest: { index: null, body: null }, - basePath: jobBasePath, - }, - cancellationToken - ); - expect(mockServer.savedObjects.getScopedSavedObjectsClient.calledOnce).toBe(true); - expect( - mockServer.savedObjects.getScopedSavedObjectsClient.firstCall.args[0].getBasePath() - ).toEqual(jobBasePath); - }); - }); - - describe('uiSettings', function() { - it('passed scoped SavedObjectsClient to uiSettingsServiceFactory', async function() { - const returnValue = Symbol(); - mockServer.savedObjects.getScopedSavedObjectsClient.returns(returnValue); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); - await executeJob( - 'job456', - { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, - cancellationToken - ); - expect(mockServer.uiSettingsServiceFactory.calledOnce).toBe(true); - expect(mockServer.uiSettingsServiceFactory.firstCall.args[0].savedObjectsClient).toBe( - returnValue - ); - }); - }); - describe('basic Elasticsearch call behavior', function() { it('should decrypt encrypted headers and pass to callAsCurrentUser', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -217,7 +138,12 @@ describe('CSV Execute Job', function() { testBody: true, }; - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const job = { headers: encryptedHeaders, fields: [], @@ -244,7 +170,12 @@ describe('CSV Execute Job', function() { _scroll_id: scrollId, }); callAsCurrentUserStub.onSecondCall().resolves(defaultElasticsearchResponse); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -258,7 +189,12 @@ describe('CSV Execute Job', function() { }); it('should not execute scroll if there are no hits from the search', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -288,7 +224,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -323,7 +264,12 @@ describe('CSV Execute Job', function() { _scroll_id: lastScrollId, }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -351,7 +297,12 @@ describe('CSV Execute Job', function() { _scroll_id: lastScrollId, }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -381,7 +332,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -409,7 +365,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['=SUM(A1:A2)', 'two'], @@ -437,7 +398,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -465,7 +431,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -485,7 +456,12 @@ describe('CSV Execute Job', function() { describe('Elasticsearch call errors', function() { it('should reject Promise if search call errors out', async function() { callAsCurrentUserStub.rejects(new Error()); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -504,7 +480,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); callAsCurrentUserStub.onSecondCall().rejects(new Error()); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -525,7 +506,12 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -546,7 +532,12 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -574,7 +565,12 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -602,7 +598,12 @@ describe('CSV Execute Job', function() { _scroll_id: undefined, }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: [], @@ -638,7 +639,12 @@ describe('CSV Execute Job', function() { }); it('should stop calling Elasticsearch when cancellationToken.cancel is called', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -653,7 +659,12 @@ describe('CSV Execute Job', function() { }); it(`shouldn't call clearScroll if it never got a scrollId`, async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -667,7 +678,12 @@ describe('CSV Execute Job', function() { }); it('should call clearScroll if it got a scrollId', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -685,7 +701,12 @@ describe('CSV Execute Job', function() { describe('csv content', function() { it('should write column headers to output, even if there are no results', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -696,8 +717,13 @@ describe('CSV Execute Job', function() { }); it('should use custom uiSettings csv:separator for header', async function() { - uiSettingsGetStub.withArgs('csv:separator').returns(';'); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + mockUiSettingsClient.get.withArgs('csv:separator').returns(';'); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -708,8 +734,13 @@ describe('CSV Execute Job', function() { }); it('should escape column headers if uiSettings csv:quoteValues is true', async function() { - uiSettingsGetStub.withArgs('csv:quoteValues').returns(true); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(true); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one and a half', 'two', 'three-and-four', 'five & six'], @@ -720,8 +751,13 @@ describe('CSV Execute Job', function() { }); it(`shouldn't escape column headers if uiSettings csv:quoteValues is false`, async function() { - uiSettingsGetStub.withArgs('csv:quoteValues').returns(false); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(false); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one and a half', 'two', 'three-and-four', 'five & six'], @@ -732,7 +768,12 @@ describe('CSV Execute Job', function() { }); it('should write column headers to output, when there are results', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ one: '1', two: '2' }], @@ -752,7 +793,12 @@ describe('CSV Execute Job', function() { }); it('should use comma separated values of non-nested fields from _source', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -773,7 +819,12 @@ describe('CSV Execute Job', function() { }); it('should concatenate the hits from multiple responses', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -801,7 +852,12 @@ describe('CSV Execute Job', function() { }); it('should use field formatters to format fields', async function() { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], @@ -846,7 +902,12 @@ describe('CSV Execute Job', function() { .get.withArgs('xpack.reporting.csv.maxSizeBytes') .returns(1); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -879,7 +940,12 @@ describe('CSV Execute Job', function() { .get.withArgs('xpack.reporting.csv.maxSizeBytes') .returns(9); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -919,7 +985,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -948,6 +1019,7 @@ describe('CSV Execute Job', function() { let maxSizeReached; beforeEach(async function() { + mockReportingPlugin.getUiSettingsServiceFactory = () => mockUiSettingsClient; mockServer .config() .get.withArgs('xpack.reporting.csv.maxSizeBytes') @@ -960,7 +1032,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1000,7 +1077,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1029,7 +1111,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1058,7 +1145,12 @@ describe('CSV Execute Job', function() { _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + const executeJob = await executeJobFactory( + mockReportingPlugin, + mockServer, + mockElasticsearch, + mockLogger + ); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts index 9f94a755cf6555..15799858910532 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts @@ -4,20 +4,26 @@ * you may not use this file except in compliance with the Elastic License. */ -import Hapi from 'hapi'; import { i18n } from '@kbn/i18n'; -import { ElasticsearchServiceSetup, KibanaRequest } from '../../../../../../../src/core/server'; +import Hapi from 'hapi'; +import { + ElasticsearchServiceSetup, + IUiSettingsClient, + KibanaRequest, +} from '../../../../../../../src/core/server'; import { CSV_JOB_TYPE } from '../../../common/constants'; +import { ReportingCore } from '../../../server'; import { cryptoFactory } from '../../../server/lib'; +import { getFieldFormats } from '../../../server/services'; import { ESQueueWorkerExecuteFn, ExecuteJobFactory, Logger, ServerFacade } from '../../../types'; import { JobDocPayloadDiscoverCsv } from '../types'; import { fieldFormatMapFactory } from './lib/field_format_map'; import { createGenerateCsv } from './lib/generate_csv'; -import { getFieldFormats } from '../../../server/services'; export const executeJobFactory: ExecuteJobFactory> = function executeJobFactoryFn( +>> = async function executeJobFactoryFn( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, parentLogger: Logger @@ -40,83 +46,78 @@ export const executeJobFactory: ExecuteJobFactory { + let decryptedHeaders; + try { + decryptedHeaders = await crypto.decrypt(headers); + } catch (err) { + logger.error(err); + throw new Error( + i18n.translate( + 'xpack.reporting.exportTypes.csv.executeJob.failedToDecryptReportJobDataErrorMessage', + { + defaultMessage: 'Failed to decrypt report job data. Please ensure that {encryptionKey} is set and re-generate this report. {err}', + values: { encryptionKey: 'xpack.reporting.encryptionKey', err: err.toString() }, + } + ) + ); // prettier-ignore + } + return decryptedHeaders; + }; - const fakeRequest = { - headers: decryptedHeaders, + const fakeRequest = KibanaRequest.from({ + headers: await decryptHeaders(), // This is used by the spaces SavedObjectClientWrapper to determine the existing space. // We use the basePath from the saved job, which we'll have post spaces being implemented; // or we use the server base path, which uses the default space getBasePath: () => basePath || serverBasePath, path: '/', route: { settings: {} }, - url: { - href: '/', - }, - raw: { - req: { - url: '/', - }, - }, - }; + url: { href: '/' }, + raw: { req: { url: '/' } }, + } as Hapi.Request); + + const { callAsCurrentUser } = elasticsearch.dataClient.asScoped(fakeRequest); + const callEndpoint = (endpoint: string, clientParams = {}, options = {}) => + callAsCurrentUser(endpoint, clientParams, options); - const { callAsCurrentUser } = elasticsearch.dataClient.asScoped( - KibanaRequest.from(fakeRequest as Hapi.Request) - ); - const callEndpoint = (endpoint: string, clientParams = {}, options = {}) => { - return callAsCurrentUser(endpoint, clientParams, options); + const savedObjectsClient = await reporting.getSavedObjectsClient(fakeRequest); + const uiSettingsClient = await reporting.getUiSettingsServiceFactory(savedObjectsClient); + + const getFormatsMap = async (client: IUiSettingsClient) => { + const fieldFormats = await getFieldFormats().fieldFormatServiceFactory(client); + return fieldFormatMapFactory(indexPatternSavedObject, fieldFormats); }; - const savedObjects = server.savedObjects; - const savedObjectsClient = savedObjects.getScopedSavedObjectsClient( - (fakeRequest as unknown) as KibanaRequest - ); - const uiConfig = server.uiSettingsServiceFactory({ - savedObjectsClient, - }); + const getUiSettings = async (client: IUiSettingsClient) => { + const [separator, quoteValues, timezone] = await Promise.all([ + client.get('csv:separator'), + client.get('csv:quoteValues'), + client.get('dateFormat:tz'), + ]); - const [formatsMap, uiSettings] = await Promise.all([ - (async () => { - const fieldFormats = await getFieldFormats().fieldFormatServiceFactory(uiConfig); - return fieldFormatMapFactory(indexPatternSavedObject, fieldFormats); - })(), - (async () => { - const [separator, quoteValues, timezone] = await Promise.all([ - uiConfig.get('csv:separator'), - uiConfig.get('csv:quoteValues'), - uiConfig.get('dateFormat:tz'), - ]); + if (timezone === 'Browser') { + logger.warn( + i18n.translate('xpack.reporting.exportTypes.csv.executeJob.dateFormateSetting', { + defaultMessage: 'Kibana Advanced Setting "{dateFormatTimezone}" is set to "Browser". Dates will be formatted as UTC to avoid ambiguity.', + values: { dateFormatTimezone: 'dateFormat:tz' } + }) + ); // prettier-ignore + } - if (timezone === 'Browser') { - jobLogger.warn( - `Kibana Advanced Setting "dateFormat:tz" is set to "Browser". Dates will be formatted as UTC to avoid ambiguity.` - ); - } + return { + separator, + quoteValues, + timezone, + }; + }; - return { - separator, - quoteValues, - timezone, - }; - })(), + const [formatsMap, uiSettings] = await Promise.all([ + getFormatsMap(uiSettingsClient), + getUiSettings(uiSettingsClient), ]); const generateCsv = createGenerateCsv(jobLogger); diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/lib/field_format_map.ts b/x-pack/legacy/plugins/reporting/export_types/csv/server/lib/field_format_map.ts index e1459e195d9f63..dac963635c469e 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/lib/field_format_map.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/lib/field_format_map.ts @@ -9,15 +9,7 @@ import { FieldFormatConfig, IFieldFormatsRegistry, } from '../../../../../../../../src/plugins/data/server'; - -interface IndexPatternSavedObject { - attributes: { - fieldFormatMap: string; - }; - id: string; - type: string; - version: string; -} +import { IndexPatternSavedObject } from '../../../../types'; /** * Create a map of FieldFormat instances for index pattern fields diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts index ddef2aa0a62688..17072d311b35f3 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts @@ -5,9 +5,10 @@ */ import { notFound, notImplemented } from 'boom'; -import { get } from 'lodash'; import { ElasticsearchServiceSetup } from 'kibana/server'; +import { get } from 'lodash'; import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../../common/constants'; +import { ReportingCore } from '../../../../server'; import { cryptoFactory } from '../../../../server/lib'; import { CreateJobFactory, @@ -37,6 +38,7 @@ interface VisData { export const createJobFactory: CreateJobFactory> = function createJobFactoryFn( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, parentLogger: Logger diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts index b1b7b7d818200e..6bb3e73fcfe84a 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts @@ -7,6 +7,7 @@ import { i18n } from '@kbn/i18n'; import { ElasticsearchServiceSetup } from 'kibana/server'; import { CONTENT_TYPE_CSV, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants'; +import { ReportingCore } from '../../../server'; import { cryptoFactory } from '../../../server/lib'; import { ExecuteJobFactory, @@ -22,13 +23,15 @@ import { createGenerateCsv } from './lib'; export const executeJobFactory: ExecuteJobFactory> = function executeJobFactoryFn( +>> = async function executeJobFactoryFn( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, parentLogger: Logger ) { const crypto = cryptoFactory(server); const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'execute-job']); + const generateCsv = createGenerateCsv(reporting, server, elasticsearch, parentLogger); return async function executeJob( jobId: string | null, @@ -86,11 +89,8 @@ export const executeJobFactory: ExecuteJobFactory { +const getEsQueryConfig = async (config: IUiSettingsClient) => { const configs = await Promise.all([ config.get('query:allowLeadingWildcards'), config.get('query:queryString:options'), @@ -49,7 +53,7 @@ const getEsQueryConfig = async (config: any) => { } as EsQueryConfig; }; -const getUiSettings = async (config: any) => { +const getUiSettings = async (config: IUiSettingsClient) => { const configs = await Promise.all([config.get('csv:separator'), config.get('csv:quoteValues')]); const [separator, quoteValues] = configs; return { separator, quoteValues }; @@ -57,14 +61,14 @@ const getUiSettings = async (config: any) => { export async function generateCsvSearch( req: RequestFacade, + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, logger: Logger, searchPanel: SearchPanel, jobParams: JobParamsDiscoverCsv ): Promise { - const { savedObjects, uiSettingsServiceFactory } = server; - const savedObjectsClient = savedObjects.getScopedSavedObjectsClient( + const savedObjectsClient = await reporting.getSavedObjectsClient( KibanaRequest.from(req.getRawRequest()) ); const { indexPatternSavedObjectId, timerange } = searchPanel; @@ -73,7 +77,8 @@ export async function generateCsvSearch( savedObjectsClient, indexPatternSavedObjectId ); - const uiConfig = uiSettingsServiceFactory({ savedObjectsClient }); + + const uiConfig = await reporting.getUiSettingsServiceFactory(savedObjectsClient); const esQueryConfig = await getEsQueryConfig(uiConfig); const { diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts index 3f03246106d3ef..a6911e1f147040 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/create_job/index.ts @@ -4,20 +4,21 @@ * you may not use this file except in compliance with the Elastic License. */ +import { validateUrls } from '../../../../common/validate_urls'; +import { ReportingCore } from '../../../../server'; +import { cryptoFactory } from '../../../../server/lib/crypto'; import { + ConditionalHeaders, CreateJobFactory, - ServerFacade, - RequestFacade, ESQueueCreateJobFn, - ConditionalHeaders, + RequestFacade, + ServerFacade, } from '../../../../types'; -import { validateUrls } from '../../../../common/validate_urls'; -import { cryptoFactory } from '../../../../server/lib/crypto'; import { JobParamsPNG } from '../../types'; export const createJobFactory: CreateJobFactory> = function createJobFactoryFn(server: ServerFacade) { +>> = function createJobFactoryFn(reporting: ReportingCore, server: ServerFacade) { const crypto = cryptoFactory(server); return async function createJob( diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js index bb33ef9c19a1dd..c0c21119e1d53c 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js @@ -6,6 +6,7 @@ import * as Rx from 'rxjs'; import { memoize } from 'lodash'; +import { createMockReportingCore } from '../../../../test_helpers'; import { cryptoFactory } from '../../../../server/lib/crypto'; import { executeJobFactory } from './index'; import { generatePngObservableFactory } from '../lib/generate_png'; @@ -19,7 +20,11 @@ const cancellationToken = { let config; let mockServer; -beforeEach(() => { +let mockReporting; + +beforeEach(async () => { + mockReporting = await createMockReportingCore(); + config = { 'xpack.reporting.encryptionKey': 'testencryptionkey', 'server.basePath': '/sbp', @@ -27,18 +32,11 @@ beforeEach(() => { 'server.port': 5601, }; mockServer = { - expose: () => {}, // NOTE: this is for oncePerServer config: memoize(() => ({ get: jest.fn() })), info: { protocol: 'http', }, - savedObjects: { - getScopedSavedObjectsClient: jest.fn(), - }, - uiSettingsServiceFactory: jest.fn().mockReturnValue({ get: jest.fn() }), - log: jest.fn(), }; - mockServer.config().get.mockImplementation(key => { return config[key]; }); @@ -67,9 +65,12 @@ test(`passes browserTimezone to generatePng`, async () => { const generatePngObservable = generatePngObservableFactory(); generatePngObservable.mockReturnValue(Rx.of(Buffer.from(''))); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { - browserDriverFactory: {}, - }); + const executeJob = await executeJobFactory( + mockReporting, + mockServer, + mockElasticsearch, + getMockLogger() + ); const browserTimezone = 'UTC'; await executeJob( 'pngJobId', @@ -87,9 +88,15 @@ test(`passes browserTimezone to generatePng`, async () => { }); test(`returns content_type of application/png`, async () => { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { - browserDriverFactory: {}, - }); + const executeJob = await executeJobFactory( + mockReporting, + mockServer, + mockElasticsearch, + getMockLogger(), + { + browserDriverFactory: {}, + } + ); const encryptedHeaders = await encryptHeaders({}); const generatePngObservable = generatePngObservableFactory(); @@ -109,9 +116,15 @@ test(`returns content of generatePng getBuffer base64 encoded`, async () => { const generatePngObservable = generatePngObservableFactory(); generatePngObservable.mockReturnValue(Rx.of(Buffer.from(testContent))); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { - browserDriverFactory: {}, - }); + const executeJob = await executeJobFactory( + mockReporting, + mockServer, + mockElasticsearch, + getMockLogger(), + { + browserDriverFactory: {}, + } + ); const encryptedHeaders = await encryptHeaders({}); const { content } = await executeJob( 'pngJobId', diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts index c9f370197da662..5cde2450809149 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts @@ -7,14 +7,9 @@ import * as Rx from 'rxjs'; import { ElasticsearchServiceSetup } from 'kibana/server'; import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators'; +import { ReportingCore } from '../../../../server'; import { PNG_JOB_TYPE } from '../../../../common/constants'; -import { - ServerFacade, - ExecuteJobFactory, - ESQueueWorkerExecuteFn, - HeadlessChromiumDriverFactory, - Logger, -} from '../../../../types'; +import { ServerFacade, ExecuteJobFactory, ESQueueWorkerExecuteFn, Logger } from '../../../../types'; import { decryptJobHeaders, omitBlacklistedHeaders, @@ -26,12 +21,13 @@ import { generatePngObservableFactory } from '../lib/generate_png'; type QueuedPngExecutorFactory = ExecuteJobFactory>; -export const executeJobFactory: QueuedPngExecutorFactory = function executeJobFactoryFn( +export const executeJobFactory: QueuedPngExecutorFactory = async function executeJobFactoryFn( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, - parentLogger: Logger, - { browserDriverFactory }: { browserDriverFactory: HeadlessChromiumDriverFactory } + parentLogger: Logger ) { + const browserDriverFactory = await reporting.getBrowserDriverFactory(); const generatePngObservable = generatePngObservableFactory(server, browserDriverFactory); const logger = parentLogger.clone([PNG_JOB_TYPE, 'execute']); diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts index a8cc71175cffeb..656c99991e1f61 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/create_job/index.ts @@ -4,20 +4,21 @@ * you may not use this file except in compliance with the Elastic License. */ +import { validateUrls } from '../../../../common/validate_urls'; +import { ReportingCore } from '../../../../server'; +import { cryptoFactory } from '../../../../server/lib/crypto'; import { + ConditionalHeaders, CreateJobFactory, ESQueueCreateJobFn, - ServerFacade, RequestFacade, - ConditionalHeaders, + ServerFacade, } from '../../../../types'; -import { validateUrls } from '../../../../common/validate_urls'; -import { cryptoFactory } from '../../../../server/lib/crypto'; import { JobParamsPDF } from '../../types'; export const createJobFactory: CreateJobFactory> = function createJobFactoryFn(server: ServerFacade) { +>> = function createJobFactoryFn(reporting: ReportingCore, server: ServerFacade) { const crypto = cryptoFactory(server); return async function createJobFn( diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js index c21d39f4922cbd..cc6b298bebdc54 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js @@ -6,6 +6,7 @@ import * as Rx from 'rxjs'; import { memoize } from 'lodash'; +import { createMockReportingCore } from '../../../../test_helpers'; import { cryptoFactory } from '../../../../server/lib/crypto'; import { executeJobFactory } from './index'; import { generatePdfObservableFactory } from '../lib/generate_pdf'; @@ -19,7 +20,11 @@ const cancellationToken = { let config; let mockServer; -beforeEach(() => { +let mockReporting; + +beforeEach(async () => { + mockReporting = await createMockReportingCore(); + config = { 'xpack.reporting.encryptionKey': 'testencryptionkey', 'server.basePath': '/sbp', @@ -27,18 +32,11 @@ beforeEach(() => { 'server.port': 5601, }; mockServer = { - expose: jest.fn(), - log: jest.fn(), config: memoize(() => ({ get: jest.fn() })), info: { protocol: 'http', }, - savedObjects: { - getScopedSavedObjectsClient: jest.fn(), - }, - uiSettingsServiceFactory: jest.fn().mockReturnValue({ get: jest.fn() }), }; - mockServer.config().get.mockImplementation(key => { return config[key]; }); @@ -60,38 +58,13 @@ const encryptHeaders = async headers => { return await crypto.encrypt(headers); }; -test(`passes browserTimezone to generatePdf`, async () => { - const encryptedHeaders = await encryptHeaders({}); - - const generatePdfObservable = generatePdfObservableFactory(); - generatePdfObservable.mockReturnValue(Rx.of(Buffer.from(''))); - - const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { - browserDriverFactory: {}, - }); - const browserTimezone = 'UTC'; - await executeJob( - 'pdfJobId', - { relativeUrls: [], browserTimezone, headers: encryptedHeaders }, - cancellationToken - ); - - expect(mockServer.uiSettingsServiceFactory().get).toBeCalledWith('xpackReporting:customPdfLogo'); - expect(generatePdfObservable).toBeCalledWith( - expect.any(LevelLogger), - undefined, - [], - browserTimezone, - expect.anything(), - undefined, - undefined - ); -}); - test(`returns content_type of application/pdf`, async () => { - const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { - browserDriverFactory: {}, - }); + const executeJob = await executeJobFactory( + mockReporting, + mockServer, + mockElasticsearch, + getMockLogger() + ); const encryptedHeaders = await encryptHeaders({}); const generatePdfObservable = generatePdfObservableFactory(); @@ -111,9 +84,12 @@ test(`returns content of generatePdf getBuffer base64 encoded`, async () => { const generatePdfObservable = generatePdfObservableFactory(); generatePdfObservable.mockReturnValue(Rx.of(Buffer.from(testContent))); - const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { - browserDriverFactory: {}, - }); + const executeJob = await executeJobFactory( + mockReporting, + mockServer, + mockElasticsearch, + getMockLogger() + ); const encryptedHeaders = await encryptHeaders({}); const { content } = await executeJob( 'pdfJobId', diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts index 162376e31216e0..e8461862bee823 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts @@ -7,13 +7,8 @@ import * as Rx from 'rxjs'; import { ElasticsearchServiceSetup } from 'kibana/server'; import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators'; -import { - ServerFacade, - ExecuteJobFactory, - ESQueueWorkerExecuteFn, - HeadlessChromiumDriverFactory, - Logger, -} from '../../../../types'; +import { ReportingCore } from '../../../../server'; +import { ServerFacade, ExecuteJobFactory, ESQueueWorkerExecuteFn, Logger } from '../../../../types'; import { JobDocPayloadPDF } from '../../types'; import { PDF_JOB_TYPE } from '../../../../common/constants'; import { generatePdfObservableFactory } from '../lib/generate_pdf'; @@ -27,12 +22,13 @@ import { type QueuedPdfExecutorFactory = ExecuteJobFactory>; -export const executeJobFactory: QueuedPdfExecutorFactory = function executeJobFactoryFn( +export const executeJobFactory: QueuedPdfExecutorFactory = async function executeJobFactoryFn( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, - parentLogger: Logger, - { browserDriverFactory }: { browserDriverFactory: HeadlessChromiumDriverFactory } + parentLogger: Logger ) { + const browserDriverFactory = await reporting.getBrowserDriverFactory(); const generatePdfObservable = generatePdfObservableFactory(server, browserDriverFactory); const logger = parentLogger.clone([PDF_JOB_TYPE, 'execute']); @@ -43,7 +39,7 @@ export const executeJobFactory: QueuedPdfExecutorFactory = function executeJobFa mergeMap(() => decryptJobHeaders({ server, job, logger })), map(decryptedHeaders => omitBlacklistedHeaders({ job, decryptedHeaders })), map(filteredHeaders => getConditionalHeaders({ server, job, filteredHeaders })), - mergeMap(conditionalHeaders => getCustomLogo({ server, job, conditionalHeaders })), + mergeMap(conditionalHeaders => getCustomLogo({ reporting, server, job, conditionalHeaders })), mergeMap(({ logo, conditionalHeaders }) => { const urls = getFullUrls({ server, job }); diff --git a/x-pack/legacy/plugins/reporting/index.ts b/x-pack/legacy/plugins/reporting/index.ts index cbafc4b1ecc4b3..9ce4e807f8ef86 100644 --- a/x-pack/legacy/plugins/reporting/index.ts +++ b/x-pack/legacy/plugins/reporting/index.ts @@ -10,7 +10,7 @@ import { resolve } from 'path'; import { PLUGIN_ID, UI_SETTINGS_CUSTOM_PDF_LOGO } from './common/constants'; import { config as reportingConfig } from './config'; import { legacyInit } from './server/legacy'; -import { ReportingConfigOptions, ReportingPluginSpecOptions } from './types.d'; +import { ReportingConfigOptions, ReportingPluginSpecOptions } from './types'; const kbToBase64Length = (kb: number) => { return Math.floor((kb * 1024 * 8) / 6); diff --git a/x-pack/legacy/plugins/reporting/public/components/__snapshots__/report_listing.test.tsx.snap b/x-pack/legacy/plugins/reporting/public/components/__snapshots__/report_listing.test.tsx.snap index 3b6d7a0b5f0032..b5304c6020c43e 100644 --- a/x-pack/legacy/plugins/reporting/public/components/__snapshots__/report_listing.test.tsx.snap +++ b/x-pack/legacy/plugins/reporting/public/components/__snapshots__/report_listing.test.tsx.snap @@ -91,10 +91,7 @@ Array [ > (); + private readonly pluginStart$ = new Rx.ReplaySubject(); + private exportTypesRegistry = getExportTypesRegistry(); + + constructor(private logger: LevelLogger) {} + + legacySetup( + xpackMainPlugin: XPackMainPlugin, + reporting: ReportingPluginSpecOptions, + __LEGACY: ServerFacade, + plugins: ReportingSetupDeps + ) { + mirrorPluginStatus(xpackMainPlugin, reporting); + const checkLicense = checkLicenseFactory(this.exportTypesRegistry); + (xpackMainPlugin as any).status.once('green', () => { + // Register a function that is called whenever the xpack info changes, + // to re-compute the license check results for this plugin + xpackMainPlugin.info.feature(PLUGIN_ID).registerLicenseCheckResultsGenerator(checkLicense); + }); + // Reporting routes + registerRoutes(this, __LEGACY, plugins, this.logger); + } + + public pluginSetup(reportingSetupDeps: ReportingInternalSetup) { + this.pluginSetup$.next(reportingSetupDeps); + } + + public pluginStart(reportingStartDeps: ReportingInternalStart) { + this.pluginStart$.next(reportingStartDeps); + } + + public pluginHasStarted(): Promise { + return this.pluginStart$.pipe(first(), mapTo(true)).toPromise(); + } + + /* + * Internal module dependencies + */ + public getExportTypesRegistry() { + return this.exportTypesRegistry; + } + + public async getEsqueue(): Promise { + return (await this.getPluginStartDeps()).esqueue; + } + + public async getEnqueueJob(): Promise { + return (await this.getPluginStartDeps()).enqueueJob; + } + + public async getBrowserDriverFactory(): Promise { + return (await this.getPluginSetupDeps()).browserDriverFactory; + } + + /* + * Kibana core module dependencies + */ + private async getPluginSetupDeps() { + if (this.pluginSetupDeps) { + return this.pluginSetupDeps; + } + return await this.pluginSetup$.pipe(first()).toPromise(); + } + + private async getPluginStartDeps() { + if (this.pluginStartDeps) { + return this.pluginStartDeps; + } + return await this.pluginStart$.pipe(first()).toPromise(); + } + + public async getSavedObjectsClient(fakeRequest: KibanaRequest): Promise { + const { savedObjects } = await this.getPluginStartDeps(); + return savedObjects.getScopedClient(fakeRequest) as SavedObjectsClient; + } + + public async getUiSettingsServiceFactory( + savedObjectsClient: SavedObjectsClient + ): Promise { + const { uiSettings: uiSettingsService } = await this.getPluginStartDeps(); + const scopedUiSettingsService = uiSettingsService.asScopedToClient(savedObjectsClient); + return scopedUiSettingsService; + } +} diff --git a/x-pack/legacy/plugins/reporting/server/index.ts b/x-pack/legacy/plugins/reporting/server/index.ts index 438a3fd595a107..24e2a954415d9c 100644 --- a/x-pack/legacy/plugins/reporting/server/index.ts +++ b/x-pack/legacy/plugins/reporting/server/index.ts @@ -10,3 +10,6 @@ import { ReportingPlugin as Plugin } from './plugin'; export const plugin = (context: PluginInitializerContext) => { return new Plugin(context); }; + +export { ReportingCore } from './core'; +export { ReportingPlugin } from './plugin'; diff --git a/x-pack/legacy/plugins/reporting/server/legacy.ts b/x-pack/legacy/plugins/reporting/server/legacy.ts index c80aef06cf270b..336ff5f4d2ee7c 100644 --- a/x-pack/legacy/plugins/reporting/server/legacy.ts +++ b/x-pack/legacy/plugins/reporting/server/legacy.ts @@ -8,7 +8,7 @@ import { PluginInitializerContext } from 'src/core/server'; import { SecurityPluginSetup } from '../../../../plugins/security/server'; import { ReportingPluginSpecOptions } from '../types'; import { plugin } from './index'; -import { LegacySetup, ReportingStartDeps } from './plugin'; +import { LegacySetup, ReportingStartDeps } from './types'; const buildLegacyDependencies = ( server: Legacy.Server, @@ -22,8 +22,6 @@ const buildLegacyDependencies = ( xpack_main: server.plugins.xpack_main, reporting: reportingPlugin, }, - savedObjects: server.savedObjects, - uiSettingsServiceFactory: server.uiSettingsServiceFactory, }); export const legacyInit = async ( @@ -33,17 +31,20 @@ export const legacyInit = async ( const coreSetup = server.newPlatform.setup.core; const pluginInstance = plugin(server.newPlatform.coreContext as PluginInitializerContext); + const __LEGACY = buildLegacyDependencies(server, reportingPlugin); await pluginInstance.setup(coreSetup, { elasticsearch: coreSetup.elasticsearch, security: server.newPlatform.setup.plugins.security as SecurityPluginSetup, usageCollection: server.newPlatform.setup.plugins.usageCollection, - __LEGACY: buildLegacyDependencies(server, reportingPlugin), + __LEGACY, }); // Schedule to call the "start" hook only after start dependencies are ready coreSetup.getStartServices().then(([core, plugins]) => pluginInstance.start(core, { + elasticsearch: coreSetup.elasticsearch, data: (plugins as ReportingStartDeps).data, + __LEGACY, }) ); }; diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts b/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts index c4e32b3ebcd99e..d593e4625cdf48 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts @@ -5,29 +5,19 @@ */ import { ElasticsearchServiceSetup } from 'kibana/server'; -import { - ServerFacade, - ExportTypesRegistry, - HeadlessChromiumDriverFactory, - QueueConfig, - Logger, -} from '../../types'; +import { ESQueueInstance, ServerFacade, QueueConfig, Logger } from '../../types'; +import { ReportingCore } from '../core'; // @ts-ignore import { Esqueue } from './esqueue'; import { createWorkerFactory } from './create_worker'; import { createTaggedLogger } from './create_tagged_logger'; // TODO remove createTaggedLogger once esqueue is removed -interface CreateQueueFactoryOpts { - exportTypesRegistry: ExportTypesRegistry; - browserDriverFactory: HeadlessChromiumDriverFactory; -} - -export function createQueueFactory( +export async function createQueueFactory( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, - logger: Logger, - { exportTypesRegistry, browserDriverFactory }: CreateQueueFactoryOpts -): Esqueue { + logger: Logger +): Promise { const queueConfig: QueueConfig = server.config().get('xpack.reporting.queue'); const index = server.config().get('xpack.reporting.index'); @@ -39,15 +29,12 @@ export function createQueueFactory( logger: createTaggedLogger(logger, ['esqueue', 'queue-worker']), }; - const queue: Esqueue = new Esqueue(index, queueOptions); + const queue: ESQueueInstance = new Esqueue(index, queueOptions); if (queueConfig.pollEnabled) { // create workers to poll the index for idle jobs waiting to be claimed and executed - const createWorker = createWorkerFactory(server, elasticsearch, logger, { - exportTypesRegistry, - browserDriverFactory, - }); - createWorker(queue); + const createWorker = createWorkerFactory(reporting, server, elasticsearch, logger); + await createWorker(queue); } else { logger.info( 'xpack.reporting.queue.pollEnabled is set to false. This Kibana instance ' + diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts b/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts index f5c42e5505cd1d..d4d913243e18d8 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts @@ -4,9 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -import * as sinon from 'sinon'; import { ElasticsearchServiceSetup } from 'kibana/server'; -import { HeadlessChromiumDriverFactory, ServerFacade } from '../../types'; +import * as sinon from 'sinon'; +import { ReportingCore } from '../../server'; +import { createMockReportingCore } from '../../test_helpers'; +import { ServerFacade } from '../../types'; import { createWorkerFactory } from './create_worker'; // @ts-ignore import { Esqueue } from './esqueue'; @@ -33,34 +35,34 @@ const getMockLogger = jest.fn(); const getMockExportTypesRegistry = ( exportTypes: any[] = [{ executeJobFactory: executeJobFactoryStub }] -) => ({ - getAll: () => exportTypes, -}); +) => + ({ + getAll: () => exportTypes, + } as ExportTypesRegistry); describe('Create Worker', () => { let queue: Esqueue; let client: ClientMock; + let mockReporting: ReportingCore; - beforeEach(() => { + beforeEach(async () => { + mockReporting = await createMockReportingCore(); client = new ClientMock(); queue = new Esqueue('reporting-queue', { client }); executeJobFactoryStub.reset(); }); test('Creates a single Esqueue worker for Reporting', async () => { - const exportTypesRegistry = getMockExportTypesRegistry(); + mockReporting.getExportTypesRegistry = () => getMockExportTypesRegistry(); const createWorker = createWorkerFactory( + mockReporting, getMockServer(), {} as ElasticsearchServiceSetup, - getMockLogger(), - { - exportTypesRegistry: exportTypesRegistry as ExportTypesRegistry, - browserDriverFactory: {} as HeadlessChromiumDriverFactory, - } + getMockLogger() ); const registerWorkerSpy = sinon.spy(queue, 'registerWorker'); - createWorker(queue); + await createWorker(queue); sinon.assert.callCount(executeJobFactoryStub, 1); sinon.assert.callCount(registerWorkerSpy, 1); @@ -88,18 +90,16 @@ Object { { executeJobFactory: executeJobFactoryStub }, { executeJobFactory: executeJobFactoryStub }, ]); + mockReporting.getExportTypesRegistry = () => exportTypesRegistry; const createWorker = createWorkerFactory( + mockReporting, getMockServer(), {} as ElasticsearchServiceSetup, - getMockLogger(), - { - exportTypesRegistry: exportTypesRegistry as ExportTypesRegistry, - browserDriverFactory: {} as HeadlessChromiumDriverFactory, - } + getMockLogger() ); const registerWorkerSpy = sinon.spy(queue, 'registerWorker'); - createWorker(queue); + await createWorker(queue); sinon.assert.callCount(executeJobFactoryStub, 5); sinon.assert.callCount(registerWorkerSpy, 1); diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts b/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts index 2ca638f641291d..35677123676081 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts @@ -5,34 +5,29 @@ */ import { ElasticsearchServiceSetup } from 'kibana/server'; -import { PLUGIN_ID } from '../../common/constants'; -import { ExportTypesRegistry, HeadlessChromiumDriverFactory } from '../../types'; import { CancellationToken } from '../../common/cancellation_token'; +import { PLUGIN_ID } from '../../common/constants'; import { ESQueueInstance, - QueueConfig, - ExportTypeDefinition, ESQueueWorkerExecuteFn, - JobDocPayload, + ExportTypeDefinition, ImmediateExecuteFn, + JobDocPayload, JobSource, + Logger, + QueueConfig, RequestFacade, ServerFacade, - Logger, } from '../../types'; +import { ReportingCore } from '../core'; // @ts-ignore untyped dependency import { events as esqueueEvents } from './esqueue'; -interface CreateWorkerFactoryOpts { - exportTypesRegistry: ExportTypesRegistry; - browserDriverFactory: HeadlessChromiumDriverFactory; -} - export function createWorkerFactory( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, - logger: Logger, - { exportTypesRegistry, browserDriverFactory }: CreateWorkerFactoryOpts + logger: Logger ) { type JobDocPayloadType = JobDocPayload; const config = server.config(); @@ -41,20 +36,23 @@ export function createWorkerFactory( const kibanaId: string = config.get('server.uuid'); // Once more document types are added, this will need to be passed in - return function createWorker(queue: ESQueueInstance) { + return async function createWorker(queue: ESQueueInstance) { // export type / execute job map const jobExecutors: Map< string, ImmediateExecuteFn | ESQueueWorkerExecuteFn > = new Map(); - for (const exportType of exportTypesRegistry.getAll() as Array< - ExportTypeDefinition + for (const exportType of reporting.getExportTypesRegistry().getAll() as Array< + ExportTypeDefinition >) { // TODO: the executeJobFn should be unwrapped in the register method of the export types registry - const jobExecutor = exportType.executeJobFactory(server, elasticsearch, logger, { - browserDriverFactory, - }); + const jobExecutor = await exportType.executeJobFactory( + reporting, + server, + elasticsearch, + logger + ); jobExecutors.set(exportType.jobType, jobExecutor); } diff --git a/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts b/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts index 1da8a3795aacc5..c215bdc3989045 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts @@ -16,11 +16,11 @@ import { ServerFacade, RequestFacade, Logger, - ExportTypesRegistry, CaptureConfig, QueueConfig, ConditionalHeaders, } from '../../types'; +import { ReportingCore } from '../core'; interface ConfirmedJob { id: string; @@ -29,16 +29,11 @@ interface ConfirmedJob { _primary_term: number; } -interface EnqueueJobFactoryOpts { - exportTypesRegistry: ExportTypesRegistry; - esqueue: any; -} - export function enqueueJobFactory( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, - parentLogger: Logger, - { exportTypesRegistry, esqueue }: EnqueueJobFactoryOpts + parentLogger: Logger ): EnqueueJobFn { const logger = parentLogger.clone(['queue-job']); const config = server.config(); @@ -56,14 +51,20 @@ export function enqueueJobFactory( ): Promise { type CreateJobFn = ESQueueCreateJobFn | ImmediateCreateJobFn; - const exportType = exportTypesRegistry.getById(exportTypeId); + const esqueue = await reporting.getEsqueue(); + const exportType = reporting.getExportTypesRegistry().getById(exportTypeId); if (exportType == null) { throw new Error(`Export type ${exportTypeId} does not exist in the registry!`); } // TODO: the createJobFn should be unwrapped in the register method of the export types registry - const createJob = exportType.createJobFactory(server, elasticsearch, logger) as CreateJobFn; + const createJob = exportType.createJobFactory( + reporting, + server, + elasticsearch, + logger + ) as CreateJobFn; const payload = await createJob(jobParams, headers, request); const options = { diff --git a/x-pack/legacy/plugins/reporting/server/lib/get_user.ts b/x-pack/legacy/plugins/reporting/server/lib/get_user.ts index ab02dfe0743f02..49d5c568c39818 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/get_user.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/get_user.ts @@ -7,7 +7,7 @@ import { Legacy } from 'kibana'; import { KibanaRequest } from '../../../../../../src/core/server'; import { ServerFacade } from '../../types'; -import { ReportingSetupDeps } from '../plugin'; +import { ReportingSetupDeps } from '../types'; export function getUserFactory(server: ServerFacade, security: ReportingSetupDeps['security']) { /* diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts index 028d8fa143487c..0fdbd858b8e3c7 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts @@ -16,8 +16,8 @@ import { validateServerHost } from './validate_server_host'; export async function runValidations( server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, - logger: Logger, - browserFactory: HeadlessChromiumDriverFactory + browserFactory: HeadlessChromiumDriverFactory, + logger: Logger ) { try { await Promise.all([ @@ -32,6 +32,7 @@ export async function runValidations( }) ); } catch (err) { + logger.error(err); logger.warning( i18n.translate('xpack.reporting.selfCheck.warning', { defaultMessage: `Reporting plugin self-check generated a warning: {err}`, diff --git a/x-pack/legacy/plugins/reporting/server/plugin.ts b/x-pack/legacy/plugins/reporting/server/plugin.ts index ef7b01f8e9c15b..4f24cc16b2277c 100644 --- a/x-pack/legacy/plugins/reporting/server/plugin.ts +++ b/x-pack/legacy/plugins/reporting/server/plugin.ts @@ -4,97 +4,66 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Legacy } from 'kibana'; -import { - CoreSetup, - CoreStart, - ElasticsearchServiceSetup, - Plugin, - PluginInitializerContext, -} from 'src/core/server'; -import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; -import { PluginStart as DataPluginStart } from '../../../../../src/plugins/data/server'; -import { SecurityPluginSetup } from '../../../../plugins/security/server'; -// @ts-ignore -import { mirrorPluginStatus } from '../../../server/lib/mirror_plugin_status'; -import { XPackMainPlugin } from '../../xpack_main/server/xpack_main'; -import { PLUGIN_ID } from '../common/constants'; +import { CoreSetup, CoreStart, Plugin, PluginInitializerContext } from 'src/core/server'; import { logConfiguration } from '../log_configuration'; -import { ReportingPluginSpecOptions } from '../types.d'; import { createBrowserDriverFactory } from './browsers'; -import { checkLicenseFactory, getExportTypesRegistry, LevelLogger, runValidations } from './lib'; -import { registerRoutes } from './routes'; +import { ReportingCore } from './core'; +import { createQueueFactory, enqueueJobFactory, LevelLogger, runValidations } from './lib'; import { setFieldFormats } from './services'; +import { ReportingSetup, ReportingSetupDeps, ReportingStart, ReportingStartDeps } from './types'; import { registerReportingUsageCollector } from './usage'; +// @ts-ignore no module definition +import { mirrorPluginStatus } from '../../../server/lib/mirror_plugin_status'; -export interface ReportingSetupDeps { - elasticsearch: ElasticsearchServiceSetup; - usageCollection: UsageCollectionSetup; - security: SecurityPluginSetup; - __LEGACY: LegacySetup; -} - -export interface ReportingStartDeps { - data: DataPluginStart; -} - -export interface LegacySetup { - config: Legacy.Server['config']; - info: Legacy.Server['info']; - plugins: { - elasticsearch: Legacy.Server['plugins']['elasticsearch']; - xpack_main: XPackMainPlugin & { - status?: any; - }; - reporting: ReportingPluginSpecOptions; - }; - route: Legacy.Server['route']; - savedObjects: Legacy.Server['savedObjects']; - uiSettingsServiceFactory: Legacy.Server['uiSettingsServiceFactory']; -} +export class ReportingPlugin + implements Plugin { + private logger: LevelLogger; + private reportingCore: ReportingCore; -export class ReportingPlugin implements Plugin { - constructor(private context: PluginInitializerContext) {} + constructor(context: PluginInitializerContext) { + this.logger = new LevelLogger(context.logger.get('reporting')); + this.reportingCore = new ReportingCore(this.logger); + } public async setup(core: CoreSetup, plugins: ReportingSetupDeps) { const { elasticsearch, usageCollection, __LEGACY } = plugins; - const exportTypesRegistry = getExportTypesRegistry(); - let isCollectorReady = false; + const browserDriverFactory = await createBrowserDriverFactory(__LEGACY, this.logger); // required for validations :( + runValidations(__LEGACY, elasticsearch, browserDriverFactory, this.logger); // this must run early, as it sets up config defaults + + const { xpack_main: xpackMainLegacy, reporting: reportingLegacy } = __LEGACY.plugins; + this.reportingCore.legacySetup(xpackMainLegacy, reportingLegacy, __LEGACY, plugins); // Register a function with server to manage the collection of usage stats - registerReportingUsageCollector( - usageCollection, - __LEGACY, - () => isCollectorReady, - exportTypesRegistry - ); + registerReportingUsageCollector(this.reportingCore, __LEGACY, usageCollection); - const logger = new LevelLogger(this.context.logger.get('reporting')); - const browserDriverFactory = await createBrowserDriverFactory(__LEGACY, logger); + // regsister setup internals + this.reportingCore.pluginSetup({ browserDriverFactory }); - logConfiguration(__LEGACY, logger); - runValidations(__LEGACY, elasticsearch, logger, browserDriverFactory); + return {}; + } - const { xpack_main: xpackMainPlugin, reporting } = __LEGACY.plugins; - mirrorPluginStatus(xpackMainPlugin, reporting); + public async start(core: CoreStart, plugins: ReportingStartDeps) { + const { reportingCore, logger } = this; + const { elasticsearch, __LEGACY } = plugins; - const checkLicense = checkLicenseFactory(exportTypesRegistry); + const esqueue = await createQueueFactory(reportingCore, __LEGACY, elasticsearch, logger); + const enqueueJob = enqueueJobFactory(reportingCore, __LEGACY, elasticsearch, logger); - (xpackMainPlugin as any).status.once('green', () => { - // Register a function that is called whenever the xpack info changes, - // to re-compute the license check results for this plugin - xpackMainPlugin.info.feature(PLUGIN_ID).registerLicenseCheckResultsGenerator(checkLicense); + this.reportingCore.pluginStart({ + savedObjects: core.savedObjects, + uiSettings: core.uiSettings, + esqueue, + enqueueJob, }); - // Post initialization of the above code, the collector is now ready to fetch its data - isCollectorReady = true; + setFieldFormats(plugins.data.fieldFormats); + logConfiguration(__LEGACY, this.logger); - // Reporting routes - registerRoutes(__LEGACY, plugins, exportTypesRegistry, browserDriverFactory, logger); + return {}; } - public start(core: CoreStart, plugins: ReportingStartDeps) { - setFieldFormats(plugins.data.fieldFormats); + public getReportingCore() { + return this.reportingCore; } } diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts index ed761b1e684ae2..49868bb7ad5d53 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_jobparams.ts @@ -10,7 +10,7 @@ import { Legacy } from 'kibana'; import rison from 'rison-node'; import { API_BASE_URL } from '../../common/constants'; import { Logger, ReportingResponseToolkit, ServerFacade } from '../../types'; -import { ReportingSetupDeps } from '../plugin'; +import { ReportingSetupDeps } from '../types'; import { makeRequestFacade } from './lib/make_request_facade'; import { GetRouteConfigFactoryFn, diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts index 8696f36a45c620..415b6b7d643669 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject.ts @@ -9,7 +9,7 @@ import { get } from 'lodash'; import { API_BASE_GENERATE_V1, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../common/constants'; import { getJobParamsFromRequest } from '../../export_types/csv_from_savedobject/server/lib/get_job_params_from_request'; import { Logger, ReportingResponseToolkit, ServerFacade } from '../../types'; -import { ReportingSetupDeps } from '../plugin'; +import { ReportingSetupDeps } from '../types'; import { makeRequestFacade } from './lib/make_request_facade'; import { getRouteOptionsCsv } from './lib/route_config_factories'; import { HandlerErrorFunction, HandlerFunction, QueuedJobPayload } from './types'; diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts index fd1d85fef0f21b..5d17fa2e82b8c6 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts @@ -10,14 +10,13 @@ import { createJobFactory, executeJobFactory } from '../../export_types/csv_from import { getJobParamsFromRequest } from '../../export_types/csv_from_savedobject/server/lib/get_job_params_from_request'; import { JobDocPayloadPanelCsv } from '../../export_types/csv_from_savedobject/types'; import { - HeadlessChromiumDriverFactory, JobDocOutput, Logger, ReportingResponseToolkit, ResponseFacade, ServerFacade, } from '../../types'; -import { ReportingSetupDeps } from '../plugin'; +import { ReportingSetupDeps, ReportingCore } from '../types'; import { makeRequestFacade } from './lib/make_request_facade'; import { getRouteOptionsCsv } from './lib/route_config_factories'; @@ -31,6 +30,7 @@ import { getRouteOptionsCsv } from './lib/route_config_factories'; * - local (transient) changes the user made to the saved object */ export function registerGenerateCsvFromSavedObjectImmediate( + reporting: ReportingCore, server: ServerFacade, plugins: ReportingSetupDeps, parentLogger: Logger @@ -58,10 +58,8 @@ export function registerGenerateCsvFromSavedObjectImmediate( * * Calling an execute job factory requires passing a browserDriverFactory option, so we should not call the factory from here */ - const createJobFn = createJobFactory(server, elasticsearch, logger); - const executeJobFn = executeJobFactory(server, elasticsearch, logger, { - browserDriverFactory: {} as HeadlessChromiumDriverFactory, - }); + const createJobFn = createJobFactory(reporting, server, elasticsearch, logger); + const executeJobFn = await executeJobFactory(reporting, server, elasticsearch, logger); const jobDocPayload: JobDocPayloadPanelCsv = await createJobFn( jobParams, request.headers, diff --git a/x-pack/legacy/plugins/reporting/server/routes/generation.ts b/x-pack/legacy/plugins/reporting/server/routes/generation.ts index 02a9541484bc63..096ba84b63d1ac 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generation.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generation.ts @@ -8,15 +8,8 @@ import boom from 'boom'; import { errors as elasticsearchErrors } from 'elasticsearch'; import { Legacy } from 'kibana'; import { API_BASE_URL } from '../../common/constants'; -import { - ExportTypesRegistry, - HeadlessChromiumDriverFactory, - Logger, - ReportingResponseToolkit, - ServerFacade, -} from '../../types'; -import { createQueueFactory, enqueueJobFactory } from '../lib'; -import { ReportingSetupDeps } from '../plugin'; +import { Logger, ReportingResponseToolkit, ServerFacade } from '../../types'; +import { ReportingSetupDeps, ReportingCore } from '../types'; import { registerGenerateFromJobParams } from './generate_from_jobparams'; import { registerGenerateCsvFromSavedObject } from './generate_from_savedobject'; import { registerGenerateCsvFromSavedObjectImmediate } from './generate_from_savedobject_immediate'; @@ -25,23 +18,13 @@ import { makeRequestFacade } from './lib/make_request_facade'; const esErrors = elasticsearchErrors as Record; export function registerJobGenerationRoutes( + reporting: ReportingCore, server: ServerFacade, plugins: ReportingSetupDeps, - exportTypesRegistry: ExportTypesRegistry, - browserDriverFactory: HeadlessChromiumDriverFactory, logger: Logger ) { const config = server.config(); const DOWNLOAD_BASE_URL = config.get('server.basePath') + `${API_BASE_URL}/jobs/download`; - const { elasticsearch } = plugins; - const esqueue = createQueueFactory(server, elasticsearch, logger, { - exportTypesRegistry, - browserDriverFactory, - }); - const enqueueJob = enqueueJobFactory(server, elasticsearch, logger, { - exportTypesRegistry, - esqueue, - }); /* * Generates enqueued job details to use in responses @@ -56,6 +39,7 @@ export function registerJobGenerationRoutes( const user = request.pre.user; const headers = request.headers; + const enqueueJob = await reporting.getEnqueueJob(); const job = await enqueueJob(exportTypeId, jobParams, user, headers, request); // return the queue's job information @@ -87,6 +71,6 @@ export function registerJobGenerationRoutes( // Register beta panel-action download-related API's if (config.get('xpack.reporting.csv.enablePanelActionDownload')) { registerGenerateCsvFromSavedObject(server, plugins, handler, handleError, logger); - registerGenerateCsvFromSavedObjectImmediate(server, plugins, logger); + registerGenerateCsvFromSavedObjectImmediate(reporting, server, plugins, logger); } } diff --git a/x-pack/legacy/plugins/reporting/server/routes/index.ts b/x-pack/legacy/plugins/reporting/server/routes/index.ts index 4cfa9dd465eabf..610ab4907d3698 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/index.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/index.ts @@ -4,23 +4,17 @@ * you may not use this file except in compliance with the Elastic License. */ -import { - ExportTypesRegistry, - HeadlessChromiumDriverFactory, - Logger, - ServerFacade, -} from '../../types'; -import { ReportingSetupDeps } from '../plugin'; +import { Logger, ServerFacade } from '../../types'; +import { ReportingCore, ReportingSetupDeps } from '../types'; import { registerJobGenerationRoutes } from './generation'; import { registerJobInfoRoutes } from './jobs'; export function registerRoutes( + reporting: ReportingCore, server: ServerFacade, plugins: ReportingSetupDeps, - exportTypesRegistry: ExportTypesRegistry, - browserDriverFactory: HeadlessChromiumDriverFactory, logger: Logger ) { - registerJobGenerationRoutes(server, plugins, exportTypesRegistry, browserDriverFactory, logger); - registerJobInfoRoutes(server, plugins, exportTypesRegistry, logger); + registerJobGenerationRoutes(reporting, server, plugins, logger); + registerJobInfoRoutes(reporting, server, plugins, logger); } diff --git a/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js b/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js index 811c81c502b812..071b401d2321bc 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js +++ b/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js @@ -5,30 +5,30 @@ */ import Hapi from 'hapi'; -import { difference, memoize } from 'lodash'; -import { registerJobInfoRoutes } from './jobs'; +import { memoize } from 'lodash'; +import { createMockReportingCore } from '../../test_helpers'; import { ExportTypesRegistry } from '../lib/export_types_registry'; -jest.mock('./lib/authorized_user_pre_routing', () => { - return { - authorizedUserPreRoutingFactory: () => () => ({}), - }; -}); -jest.mock('./lib/reporting_feature_pre_routing', () => { - return { - reportingFeaturePreRoutingFactory: () => () => () => ({ - jobTypes: ['unencodedJobType', 'base64EncodedJobType'], - }), - }; -}); + +jest.mock('./lib/authorized_user_pre_routing', () => ({ + authorizedUserPreRoutingFactory: () => () => ({}), +})); +jest.mock('./lib/reporting_feature_pre_routing', () => ({ + reportingFeaturePreRoutingFactory: () => () => () => ({ + jobTypes: ['unencodedJobType', 'base64EncodedJobType'], + }), +})); + +import { registerJobInfoRoutes } from './jobs'; let mockServer; let exportTypesRegistry; +let mockReportingPlugin; const mockLogger = { error: jest.fn(), debug: jest.fn(), }; -beforeEach(() => { +beforeEach(async () => { mockServer = new Hapi.Server({ debug: false, port: 8080, routes: { log: { collect: true } } }); mockServer.config = memoize(() => ({ get: jest.fn() })); exportTypesRegistry = new ExportTypesRegistry(); @@ -43,6 +43,8 @@ beforeEach(() => { jobContentEncoding: 'base64', jobContentExtension: 'pdf', }); + mockReportingPlugin = await createMockReportingCore(); + mockReportingPlugin.getExportTypesRegistry = () => exportTypesRegistry; }); const mockPlugins = { @@ -60,12 +62,15 @@ const getHits = (...sources) => { }; }; +const getErrorsFromRequest = request => + request.logs.filter(log => log.tags.includes('error')).map(log => log.error); + test(`returns 404 if job not found`, async () => { mockPlugins.elasticsearch.adminClient = { callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(getHits())), }; - registerJobInfoRoutes(mockServer, mockPlugins, exportTypesRegistry, mockLogger); + registerJobInfoRoutes(mockReportingPlugin, mockServer, mockPlugins, mockLogger); const request = { method: 'GET', @@ -84,7 +89,7 @@ test(`returns 401 if not valid job type`, async () => { .mockReturnValue(Promise.resolve(getHits({ jobtype: 'invalidJobType' }))), }; - registerJobInfoRoutes(mockServer, mockPlugins, exportTypesRegistry, mockLogger); + registerJobInfoRoutes(mockReportingPlugin, mockServer, mockPlugins, mockLogger); const request = { method: 'GET', @@ -105,7 +110,7 @@ describe(`when job is incomplete`, () => { ), }; - registerJobInfoRoutes(mockServer, mockPlugins, exportTypesRegistry, mockLogger); + registerJobInfoRoutes(mockReportingPlugin, mockServer, mockPlugins, mockLogger); const request = { method: 'GET', @@ -147,7 +152,7 @@ describe(`when job is failed`, () => { callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)), }; - registerJobInfoRoutes(mockServer, mockPlugins, exportTypesRegistry, mockLogger); + registerJobInfoRoutes(mockReportingPlugin, mockServer, mockPlugins, mockLogger); const request = { method: 'GET', @@ -192,7 +197,7 @@ describe(`when job is completed`, () => { callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)), }; - registerJobInfoRoutes(mockServer, mockPlugins, exportTypesRegistry, mockLogger); + registerJobInfoRoutes(mockReportingPlugin, mockServer, mockPlugins, mockLogger); const request = { method: 'GET', @@ -203,72 +208,115 @@ describe(`when job is completed`, () => { }; test(`sets statusCode to 200`, async () => { - const { statusCode } = await getCompletedResponse(); + const { statusCode, request } = await getCompletedResponse(); + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toEqual([]); expect(statusCode).toBe(200); }); test(`doesn't encode output content for not-specified jobTypes`, async () => { - const { payload } = await getCompletedResponse({ + const { payload, request } = await getCompletedResponse({ jobType: 'unencodedJobType', outputContent: 'test', }); + + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toEqual([]); + expect(payload).toBe('test'); }); test(`base64 encodes output content for configured jobTypes`, async () => { - const { payload } = await getCompletedResponse({ + const { payload, request } = await getCompletedResponse({ jobType: 'base64EncodedJobType', outputContent: 'test', }); + + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toEqual([]); + expect(payload).toBe(Buffer.from('test', 'base64').toString()); }); test(`specifies text/csv; charset=utf-8 contentType header from the job output`, async () => { - const { headers } = await getCompletedResponse({ outputContentType: 'text/csv' }); + const { headers, request } = await getCompletedResponse({ outputContentType: 'text/csv' }); + + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toEqual([]); + expect(headers['content-type']).toBe('text/csv; charset=utf-8'); }); test(`specifies default filename in content-disposition header if no title`, async () => { - const { headers } = await getCompletedResponse({}); + const { headers, request } = await getCompletedResponse({}); + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toEqual([]); expect(headers['content-disposition']).toBe('inline; filename="report.csv"'); }); test(`specifies payload title in content-disposition header`, async () => { - const { headers } = await getCompletedResponse({ title: 'something' }); + const { headers, request } = await getCompletedResponse({ title: 'something' }); + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toEqual([]); expect(headers['content-disposition']).toBe('inline; filename="something.csv"'); }); test(`specifies jobContentExtension in content-disposition header`, async () => { - const { headers } = await getCompletedResponse({ jobType: 'base64EncodedJobType' }); + const { headers, request } = await getCompletedResponse({ jobType: 'base64EncodedJobType' }); + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toEqual([]); expect(headers['content-disposition']).toBe('inline; filename="report.pdf"'); }); test(`specifies application/pdf contentType header from the job output`, async () => { - const { headers } = await getCompletedResponse({ outputContentType: 'application/pdf' }); + const { headers, request } = await getCompletedResponse({ + outputContentType: 'application/pdf', + }); + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toEqual([]); expect(headers['content-type']).toBe('application/pdf'); }); describe(`when non-whitelisted contentType specified in job output`, () => { test(`sets statusCode to 500`, async () => { - const { statusCode } = await getCompletedResponse({ outputContentType: 'application/html' }); + const { statusCode, request } = await getCompletedResponse({ + outputContentType: 'application/html', + }); + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toMatchInlineSnapshot(` + Array [ + [Error: Unsupported content-type of application/html specified by job output], + [Error: Unsupported content-type of application/html specified by job output], + ] + `); expect(statusCode).toBe(500); }); test(`doesn't include job output content in payload`, async () => { - const { payload } = await getCompletedResponse({ outputContentType: 'application/html' }); - expect(payload).not.toMatch(/job output content/); + const { payload, request } = await getCompletedResponse({ + outputContentType: 'application/html', + }); + expect(payload).toMatchInlineSnapshot( + `"{\\"statusCode\\":500,\\"error\\":\\"Internal Server Error\\",\\"message\\":\\"An internal server error occurred\\"}"` + ); + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toMatchInlineSnapshot(` + Array [ + [Error: Unsupported content-type of application/html specified by job output], + [Error: Unsupported content-type of application/html specified by job output], + ] + `); }); test(`logs error message about invalid content type`, async () => { - const { - request: { logs }, - } = await getCompletedResponse({ outputContentType: 'application/html' }); - const errorLogs = logs.filter( - log => difference(['internal', 'implementation', 'error'], log.tags).length === 0 - ); - expect(errorLogs).toHaveLength(1); - expect(errorLogs[0].error).toBeInstanceOf(Error); - expect(errorLogs[0].error.message).toMatch(/Unsupported content-type of application\/html/); + const { request } = await getCompletedResponse({ outputContentType: 'application/html' }); + const errorLogs = getErrorsFromRequest(request); + expect(errorLogs).toMatchInlineSnapshot(` + Array [ + [Error: Unsupported content-type of application/html specified by job output], + [Error: Unsupported content-type of application/html specified by job output], + ] + `); }); }); }); diff --git a/x-pack/legacy/plugins/reporting/server/routes/jobs.ts b/x-pack/legacy/plugins/reporting/server/routes/jobs.ts index daabc2cf22f4e2..2de420e6577c3c 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/jobs.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/jobs.ts @@ -9,7 +9,6 @@ import { ResponseObject } from 'hapi'; import { Legacy } from 'kibana'; import { API_BASE_URL } from '../../common/constants'; import { - ExportTypesRegistry, JobDocOutput, JobSource, ListQuery, @@ -18,7 +17,7 @@ import { ServerFacade, } from '../../types'; import { jobsQueryFactory } from '../lib/jobs_query'; -import { ReportingSetupDeps } from '../plugin'; +import { ReportingSetupDeps, ReportingCore } from '../types'; import { jobResponseHandlerFactory } from './lib/job_response_handler'; import { makeRequestFacade } from './lib/make_request_facade'; import { @@ -33,9 +32,9 @@ function isResponse(response: Boom | ResponseObject): response is Response } export function registerJobInfoRoutes( + reporting: ReportingCore, server: ServerFacade, plugins: ReportingSetupDeps, - exportTypesRegistry: ExportTypesRegistry, logger: Logger ) { const { elasticsearch } = plugins; @@ -138,6 +137,7 @@ export function registerJobInfoRoutes( }); // trigger a download of the output from a job + const exportTypesRegistry = reporting.getExportTypesRegistry(); const jobResponseHandler = jobResponseHandlerFactory(server, elasticsearch, exportTypesRegistry); server.route({ path: `${MAIN_ENTRY}/download/{docId}`, diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts index 57c3fcee222da9..c5f8c78016f618 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/authorized_user_pre_routing.ts @@ -9,7 +9,7 @@ import { Legacy } from 'kibana'; import { AuthenticatedUser } from '../../../../../../plugins/security/server'; import { Logger, ServerFacade } from '../../../types'; import { getUserFactory } from '../../lib/get_user'; -import { ReportingSetupDeps } from '../../plugin'; +import { ReportingSetupDeps } from '../../types'; const superuserRole = 'superuser'; diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts index 7367fceb508572..9e618ff1fe40a2 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/reporting_feature_pre_routing.ts @@ -7,7 +7,7 @@ import Boom from 'boom'; import { Legacy } from 'kibana'; import { Logger, ServerFacade } from '../../../types'; -import { ReportingSetupDeps } from '../../plugin'; +import { ReportingSetupDeps } from '../../types'; export type GetReportingFeatureIdFn = (request: Legacy.Request) => string; diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts index 931f642397bf8e..82ba9ba22c7061 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/route_config_factories.ts @@ -7,7 +7,7 @@ import Joi from 'joi'; import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants'; import { Logger, ServerFacade } from '../../../types'; -import { ReportingSetupDeps } from '../../plugin'; +import { ReportingSetupDeps } from '../../types'; import { authorizedUserPreRoutingFactory } from './authorized_user_pre_routing'; import { GetReportingFeatureIdFn, diff --git a/x-pack/legacy/plugins/reporting/server/types.d.ts b/x-pack/legacy/plugins/reporting/server/types.d.ts new file mode 100644 index 00000000000000..20673423aa4484 --- /dev/null +++ b/x-pack/legacy/plugins/reporting/server/types.d.ts @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { Legacy } from 'kibana'; +import { + ElasticsearchServiceSetup, + SavedObjectsServiceStart, + UiSettingsServiceStart, +} from 'src/core/server'; +import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; +import { PluginStart as DataPluginStart } from '../../../../../src/plugins/data/server'; +import { SecurityPluginSetup } from '../../../../plugins/security/server'; +import { XPackMainPlugin } from '../../xpack_main/server/xpack_main'; +import { EnqueueJobFn, ESQueueInstance, ReportingPluginSpecOptions } from '../types'; +import { HeadlessChromiumDriverFactory } from './browsers/chromium/driver_factory'; + +export interface ReportingSetupDeps { + elasticsearch: ElasticsearchServiceSetup; + security: SecurityPluginSetup; + usageCollection: UsageCollectionSetup; + __LEGACY: LegacySetup; +} + +export interface ReportingStartDeps { + elasticsearch: ElasticsearchServiceSetup; + data: DataPluginStart; + __LEGACY: LegacySetup; +} + +export type ReportingSetup = object; + +export type ReportingStart = object; + +export interface LegacySetup { + config: Legacy.Server['config']; + info: Legacy.Server['info']; + plugins: { + elasticsearch: Legacy.Server['plugins']['elasticsearch']; + xpack_main: XPackMainPlugin & { + status?: any; + }; + reporting: ReportingPluginSpecOptions; + }; + route: Legacy.Server['route']; +} + +export { ReportingCore } from './core'; diff --git a/x-pack/legacy/plugins/reporting/server/usage/decorate_range_stats.ts b/x-pack/legacy/plugins/reporting/server/usage/decorate_range_stats.ts index 0118dea38d9857..359bcc45230c34 100644 --- a/x-pack/legacy/plugins/reporting/server/usage/decorate_range_stats.ts +++ b/x-pack/legacy/plugins/reporting/server/usage/decorate_range_stats.ts @@ -6,7 +6,7 @@ import { uniq } from 'lodash'; import { CSV_JOB_TYPE, PDF_JOB_TYPE, PNG_JOB_TYPE } from '../../common/constants'; -import { AvailableTotal, FeatureAvailabilityMap, RangeStats, ExportType } from './types.d'; +import { AvailableTotal, FeatureAvailabilityMap, RangeStats, ExportType } from './types'; function getForFeature( range: Partial, diff --git a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js index f761f0d2d270b2..a6d753f9b107a2 100644 --- a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js +++ b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.test.js @@ -3,9 +3,14 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ + import sinon from 'sinon'; +import { createMockReportingCore } from '../../test_helpers'; import { getExportTypesRegistry } from '../lib/export_types_registry'; -import { getReportingUsageCollector } from './reporting_usage_collector'; +import { + registerReportingUsageCollector, + getReportingUsageCollector, +} from './reporting_usage_collector'; const exportTypesRegistry = getExportTypesRegistry(); @@ -70,9 +75,8 @@ describe('license checks', () => { const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); const usageCollection = getMockUsageCollection(); const { fetch: getReportingUsage } = getReportingUsageCollector( - usageCollection, serverWithBasicLicenseMock, - () => {}, + usageCollection, exportTypesRegistry ); usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); @@ -101,9 +105,8 @@ describe('license checks', () => { const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); const usageCollection = getMockUsageCollection(); const { fetch: getReportingUsage } = getReportingUsageCollector( - usageCollection, serverWithNoLicenseMock, - () => {}, + usageCollection, exportTypesRegistry ); usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); @@ -132,9 +135,8 @@ describe('license checks', () => { const callClusterMock = jest.fn(() => Promise.resolve(getResponseMock())); const usageCollection = getMockUsageCollection(); const { fetch: getReportingUsage } = getReportingUsageCollector( - usageCollection, serverWithPlatinumLicenseMock, - () => {}, + usageCollection, exportTypesRegistry ); usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); @@ -163,9 +165,8 @@ describe('license checks', () => { const callClusterMock = jest.fn(() => Promise.resolve({})); const usageCollection = getMockUsageCollection(); const { fetch: getReportingUsage } = getReportingUsageCollector( - usageCollection, serverWithBasicLicenseMock, - () => {}, + usageCollection, exportTypesRegistry ); usageStats = await getReportingUsage(callClusterMock, exportTypesRegistry); @@ -190,9 +191,8 @@ describe('data modeling', () => { .stub() .returns('platinum'); ({ fetch: getReportingUsage } = getReportingUsageCollector( - usageCollection, serverWithPlatinumLicenseMock, - () => {}, + usageCollection, exportTypesRegistry )); }); @@ -322,94 +322,124 @@ describe('data modeling', () => { const usageStats = await getReportingUsage(callClusterMock); expect(usageStats).toMatchInlineSnapshot(` -Object { - "PNG": Object { - "available": true, - "total": 4, - }, - "_all": 54, - "available": true, - "browser_type": undefined, - "csv": Object { - "available": true, - "total": 27, - }, - "enabled": true, - "last7Days": Object { - "PNG": Object { - "available": true, - "total": 4, - }, - "_all": 27, - "csv": Object { - "available": true, - "total": 10, - }, - "printable_pdf": Object { - "app": Object { - "dashboard": 13, - "visualization": 0, - }, - "available": true, - "layout": Object { - "preserve_layout": 3, - "print": 10, - }, - "total": 13, - }, - "status": Object { - "completed": 0, - "failed": 0, - "pending": 27, - }, - }, - "lastDay": Object { - "PNG": Object { - "available": true, - "total": 4, - }, - "_all": 11, - "csv": Object { - "available": true, - "total": 5, - }, - "printable_pdf": Object { - "app": Object { - "dashboard": 2, - "visualization": 0, - }, - "available": true, - "layout": Object { - "preserve_layout": 0, - "print": 2, - }, - "total": 2, - }, - "status": Object { - "completed": 0, - "failed": 0, - "pending": 11, - }, - }, - "printable_pdf": Object { - "app": Object { - "dashboard": 23, - "visualization": 0, - }, - "available": true, - "layout": Object { - "preserve_layout": 13, - "print": 10, - }, - "total": 23, - }, - "status": Object { - "completed": 20, - "failed": 0, - "pending": 33, - "processing": 1, - }, -} -`); + Object { + "PNG": Object { + "available": true, + "total": 4, + }, + "_all": 54, + "available": true, + "browser_type": undefined, + "csv": Object { + "available": true, + "total": 27, + }, + "enabled": true, + "last7Days": Object { + "PNG": Object { + "available": true, + "total": 4, + }, + "_all": 27, + "csv": Object { + "available": true, + "total": 10, + }, + "printable_pdf": Object { + "app": Object { + "dashboard": 13, + "visualization": 0, + }, + "available": true, + "layout": Object { + "preserve_layout": 3, + "print": 10, + }, + "total": 13, + }, + "status": Object { + "completed": 0, + "failed": 0, + "pending": 27, + }, + }, + "lastDay": Object { + "PNG": Object { + "available": true, + "total": 4, + }, + "_all": 11, + "csv": Object { + "available": true, + "total": 5, + }, + "printable_pdf": Object { + "app": Object { + "dashboard": 2, + "visualization": 0, + }, + "available": true, + "layout": Object { + "preserve_layout": 0, + "print": 2, + }, + "total": 2, + }, + "status": Object { + "completed": 0, + "failed": 0, + "pending": 11, + }, + }, + "printable_pdf": Object { + "app": Object { + "dashboard": 23, + "visualization": 0, + }, + "available": true, + "layout": Object { + "preserve_layout": 13, + "print": 10, + }, + "total": 23, + }, + "status": Object { + "completed": 20, + "failed": 0, + "pending": 33, + "processing": 1, + }, + } + `); + }); +}); + +describe('Ready for collection observable', () => { + let mockReporting; + + beforeEach(async () => { + mockReporting = await createMockReportingCore(); + }); + + test('converts observable to promise', async () => { + const serverWithBasicLicenseMock = getServerMock(); + const makeCollectorSpy = sinon.spy(); + const usageCollection = { + makeUsageCollector: makeCollectorSpy, + registerCollector: sinon.stub(), + }; + registerReportingUsageCollector(mockReporting, serverWithBasicLicenseMock, usageCollection); + + const [args] = makeCollectorSpy.firstCall.args; + expect(args).toMatchInlineSnapshot(` + Object { + "fetch": [Function], + "formatForBulkUpload": [Function], + "isReady": [Function], + "type": "reporting", + } + `); + + await expect(args.isReady()).resolves.toBe(true); }); }); diff --git a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts index 567838391d2e7a..14202530fb6c7b 100644 --- a/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts +++ b/x-pack/legacy/plugins/reporting/server/usage/reporting_usage_collector.ts @@ -5,8 +5,9 @@ */ import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; -import { ServerFacade, ExportTypesRegistry, ESCallCluster } from '../../types'; import { KIBANA_REPORTING_TYPE } from '../../common/constants'; +import { ReportingCore } from '../../server'; +import { ESCallCluster, ExportTypesRegistry, ServerFacade } from '../../types'; import { getReportingUsage } from './get_reporting_usage'; import { RangeStats } from './types'; @@ -18,16 +19,16 @@ const METATYPE = 'kibana_stats'; * @return {Object} kibana usage stats type collection object */ export function getReportingUsageCollector( - usageCollection: UsageCollectionSetup, server: ServerFacade, - isReady: () => boolean, - exportTypesRegistry: ExportTypesRegistry + usageCollection: UsageCollectionSetup, + exportTypesRegistry: ExportTypesRegistry, + isReady: () => Promise ) { return usageCollection.makeUsageCollector({ type: KIBANA_REPORTING_TYPE, - isReady, fetch: (callCluster: ESCallCluster) => getReportingUsage(server, callCluster, exportTypesRegistry), + isReady, /* * Format the response data into a model for internal upload @@ -50,16 +51,18 @@ export function getReportingUsageCollector( } export function registerReportingUsageCollector( - usageCollection: UsageCollectionSetup, + reporting: ReportingCore, server: ServerFacade, - isReady: () => boolean, - exportTypesRegistry: ExportTypesRegistry + usageCollection: UsageCollectionSetup ) { + const exportTypesRegistry = reporting.getExportTypesRegistry(); + const collectionIsReady = reporting.pluginHasStarted.bind(reporting); + const collector = getReportingUsageCollector( - usageCollection, server, - isReady, - exportTypesRegistry + usageCollection, + exportTypesRegistry, + collectionIsReady ); usageCollection.registerCollector(collector); } diff --git a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts new file mode 100644 index 00000000000000..2cd129d47b3f96 --- /dev/null +++ b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_reportingplugin.ts @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +jest.mock('../server/routes'); +jest.mock('../server/usage'); +jest.mock('../server/browsers'); +jest.mock('../server/browsers'); +jest.mock('../server/lib/create_queue'); +jest.mock('../server/lib/enqueue_job'); +jest.mock('../server/lib/validate'); +jest.mock('../log_configuration'); + +import { EventEmitter } from 'events'; +// eslint-disable-next-line @kbn/eslint/no-restricted-paths +import { coreMock } from 'src/core/server/mocks'; +import { ReportingPlugin, ReportingCore } from '../server'; +import { ReportingSetupDeps, ReportingStartDeps } from '../server/types'; + +export const createMockSetupDeps = (setupMock?: any): ReportingSetupDeps => ({ + elasticsearch: setupMock.elasticsearch, + security: setupMock.security, + usageCollection: {} as any, + __LEGACY: { plugins: { xpack_main: { status: new EventEmitter() } } } as any, +}); + +export const createMockStartDeps = (startMock?: any): ReportingStartDeps => ({ + data: startMock.data, + elasticsearch: startMock.elasticsearch, + __LEGACY: {} as any, +}); + +const createMockReportingPlugin = async (config = {}): Promise => { + const plugin = new ReportingPlugin(coreMock.createPluginInitializerContext(config)); + const setupMock = coreMock.createSetup(); + const coreStartMock = coreMock.createStart(); + const startMock = { + ...coreStartMock, + data: { fieldFormats: {} }, + }; + + await plugin.setup(setupMock, createMockSetupDeps(setupMock)); + await plugin.start(startMock, createMockStartDeps(startMock)); + + return plugin; +}; + +export const createMockReportingCore = async (config = {}): Promise => { + const plugin = await createMockReportingPlugin(config); + return plugin.getReportingCore(); +}; diff --git a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts index 226355f5edc615..bb7851ba036a90 100644 --- a/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts +++ b/x-pack/legacy/plugins/reporting/test_helpers/create_mock_server.ts @@ -8,9 +8,6 @@ import { ServerFacade } from '../types'; export const createMockServer = ({ settings = {} }: any): ServerFacade => { const mockServer = { - expose: () => { - ' '; - }, config: memoize(() => ({ get: jest.fn() })), info: { protocol: 'http', @@ -24,10 +21,6 @@ export const createMockServer = ({ settings = {} }: any): ServerFacade => { }), }, }, - savedObjects: { - getScopedSavedObjectsClient: jest.fn(), - }, - uiSettingsServiceFactory: jest.fn().mockReturnValue({ get: jest.fn() }), }; const defaultSettings: any = { diff --git a/x-pack/legacy/plugins/reporting/test_helpers/index.ts b/x-pack/legacy/plugins/reporting/test_helpers/index.ts new file mode 100644 index 00000000000000..7fbc5661d52117 --- /dev/null +++ b/x-pack/legacy/plugins/reporting/test_helpers/index.ts @@ -0,0 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export { createMockServer } from './create_mock_server'; +export { createMockReportingCore } from './create_mock_reportingplugin'; diff --git a/x-pack/legacy/plugins/reporting/types.d.ts b/x-pack/legacy/plugins/reporting/types.d.ts index a4ff39b23747dd..1549c173b3d6e5 100644 --- a/x-pack/legacy/plugins/reporting/types.d.ts +++ b/x-pack/legacy/plugins/reporting/types.d.ts @@ -6,16 +6,15 @@ import { EventEmitter } from 'events'; import { ResponseObject } from 'hapi'; -import { ElasticsearchServiceSetup } from 'kibana/server'; import { Legacy } from 'kibana'; +import { ElasticsearchServiceSetup } from 'kibana/server'; import { CallCluster } from '../../../../src/legacy/core_plugins/elasticsearch'; import { CancellationToken } from './common/cancellation_token'; import { HeadlessChromiumDriverFactory } from './server/browsers/chromium/driver_factory'; import { BrowserType } from './server/browsers/types'; import { LevelLogger } from './server/lib/level_logger'; -import { LegacySetup, ReportingSetupDeps } from './server/plugin'; - -export type ReportingPlugin = object; // For Plugin contract +import { ReportingCore } from './server/core'; +import { LegacySetup, ReportingStartDeps, ReportingSetup, ReportingStart } from './server/types'; export type Job = EventEmitter & { id: string; @@ -65,6 +64,7 @@ interface GenerateExportTypePayload { /* * Legacy System + * TODO: move to server/types */ export type ServerFacade = LegacySetup; @@ -179,6 +179,15 @@ export interface CryptoFactory { decrypt: (headers?: string) => any; } +export interface IndexPatternSavedObject { + attributes: { + fieldFormatMap: string; + }; + id: string; + type: string; + version: string; +} + export interface TimeRangeParams { timezone: string; min: Date | string | number; @@ -214,10 +223,6 @@ export interface JobDocOutput { size: number; } -export interface ESQueue { - addJob: (type: string, payload: object, options: object) => Job; -} - export interface ESQueueWorker { on: (event: string, handler: any) => void; } @@ -267,8 +272,9 @@ type GenericWorkerFn = ( ...workerRestArgs: any[] ) => void | Promise; -export interface ESQueueInstance { - registerWorker: ( +export interface ESQueueInstance { + addJob: (type: string, payload: unknown, options: object) => Job; + registerWorker: ( pluginId: string, workerFn: GenericWorkerFn, workerOptions: ESQueueWorkerOptions @@ -276,18 +282,17 @@ export interface ESQueueInstance { } export type CreateJobFactory = ( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, logger: LevelLogger ) => CreateJobFnType; export type ExecuteJobFactory = ( + reporting: ReportingCore, server: ServerFacade, elasticsearch: ElasticsearchServiceSetup, - logger: LevelLogger, - opts: { - browserDriverFactory: HeadlessChromiumDriverFactory; - } -) => ExecuteJobFnType; + logger: LevelLogger +) => Promise; export interface ExportTypeDefinition< JobParamsType, @@ -309,7 +314,6 @@ export { CancellationToken } from './common/cancellation_token'; export { HeadlessChromiumDriver } from './server/browsers/chromium/driver'; export { HeadlessChromiumDriverFactory } from './server/browsers/chromium/driver_factory'; export { ExportTypesRegistry } from './server/lib/export_types_registry'; - // Prefer to import this type using: `import { LevelLogger } from 'relative/path/server/lib';` export { LevelLogger as Logger }; diff --git a/x-pack/legacy/plugins/siem/public/components/notes/note_card/__snapshots__/note_card_body.test.tsx.snap b/x-pack/legacy/plugins/siem/public/components/notes/note_card/__snapshots__/note_card_body.test.tsx.snap index ba35940ff0e5fa..bbacc86dcca2c9 100644 --- a/x-pack/legacy/plugins/siem/public/components/notes/note_card/__snapshots__/note_card_body.test.tsx.snap +++ b/x-pack/legacy/plugins/siem/public/components/notes/note_card/__snapshots__/note_card_body.test.tsx.snap @@ -333,6 +333,7 @@ exports[`NoteCardBody renders correctly against snapshot 1`] = ` "graphic": "#e7664c", }, }, + "euiPaletteColorBlindKeys": "'euiColorVis0', 'euiColorVis1', 'euiColorVis2', 'euiColorVis3', 'euiColorVis4', 'euiColorVis5', 'euiColorVis6', 'euiColorVis7', 'euiColorVis8', 'euiColorVis9'", "euiPanelPaddingModifiers": Object { "paddingLarge": "24px", "paddingMedium": "16px", @@ -437,6 +438,54 @@ exports[`NoteCardBody renders correctly against snapshot 1`] = ` "euiTextScale": "2.25 1.75 1.25 1.125 1 0.875 0.75", "euiTitleColor": "#dfe5ef", "euiToastWidth": "320px", + "euiTokenGrayColor": "#535966", + "euiTokenTypeKeys": "'euiColorVis0', 'euiColorVis1', 'euiColorVis2', 'euiColorVis3', 'euiColorVis4', 'euiColorVis5', 'euiColorVis6', 'euiColorVis7', 'euiColorVis8', 'euiColorVis9', 'gray'", + "euiTokenTypes": Object { + "euiColorVis0": Object { + "behindText": "#6dccb1", + "graphic": "#54b399", + }, + "euiColorVis1": Object { + "behindText": "#79aad9", + "graphic": "#6092c0", + }, + "euiColorVis2": Object { + "behindText": "#ee789d", + "graphic": "#d36086", + }, + "euiColorVis3": Object { + "behindText": "#a987d1", + "graphic": "#9170b8", + }, + "euiColorVis4": Object { + "behindText": "#e4a6c7", + "graphic": "#ca8eae", + }, + "euiColorVis5": Object { + "behindText": "#f1d86f", + "graphic": "#d6bf57", + }, + "euiColorVis6": Object { + "behindText": "#d2c0a0", + "graphic": "#b9a888", + }, + "euiColorVis7": Object { + "behindText": "#f5a35c", + "graphic": "#da8b45", + }, + "euiColorVis8": Object { + "behindText": "#c47c6c", + "graphic": "#aa6556", + }, + "euiColorVis9": Object { + "behindText": "#ff7e62", + "graphic": "#e7664c", + }, + "gray": Object { + "behindText": "#535966", + "graphic": "#535966", + }, + }, "euiTooltipAnimations": Object { "bottom": "euiToolTipLeft", "left": "euiToolTipBottom", @@ -548,20 +597,6 @@ exports[`NoteCardBody renders correctly against snapshot 1`] = ` "success": "#7de2d1", "warning": "#ffce7a", }, - "tokenTypes": Object { - "tokenTint01": "#1ba9f5", - "tokenTint02": "#f990c0", - "tokenTint03": "#9170b8", - "tokenTint04": "#da8b45", - "tokenTint05": "#6092c0", - "tokenTint06": "#e6c220", - "tokenTint07": "#54b399", - "tokenTint08": "#920000", - "tokenTint09": "#ff00ff", - "tokenTint10": "#26ab00", - "tokenTint11": "#4c1604", - "tokenTint12": "#666666", - }, }, } } diff --git a/x-pack/legacy/plugins/siem/public/components/paginated_table/__snapshots__/index.test.tsx.snap b/x-pack/legacy/plugins/siem/public/components/paginated_table/__snapshots__/index.test.tsx.snap index 59d2d91897254b..86a3c67227119c 100644 --- a/x-pack/legacy/plugins/siem/public/components/paginated_table/__snapshots__/index.test.tsx.snap +++ b/x-pack/legacy/plugins/siem/public/components/paginated_table/__snapshots__/index.test.tsx.snap @@ -333,6 +333,7 @@ exports[`Paginated Table Component rendering it renders the default load more ta "graphic": "#e7664c", }, }, + "euiPaletteColorBlindKeys": "'euiColorVis0', 'euiColorVis1', 'euiColorVis2', 'euiColorVis3', 'euiColorVis4', 'euiColorVis5', 'euiColorVis6', 'euiColorVis7', 'euiColorVis8', 'euiColorVis9'", "euiPanelPaddingModifiers": Object { "paddingLarge": "24px", "paddingMedium": "16px", @@ -437,6 +438,54 @@ exports[`Paginated Table Component rendering it renders the default load more ta "euiTextScale": "2.25 1.75 1.25 1.125 1 0.875 0.75", "euiTitleColor": "#dfe5ef", "euiToastWidth": "320px", + "euiTokenGrayColor": "#535966", + "euiTokenTypeKeys": "'euiColorVis0', 'euiColorVis1', 'euiColorVis2', 'euiColorVis3', 'euiColorVis4', 'euiColorVis5', 'euiColorVis6', 'euiColorVis7', 'euiColorVis8', 'euiColorVis9', 'gray'", + "euiTokenTypes": Object { + "euiColorVis0": Object { + "behindText": "#6dccb1", + "graphic": "#54b399", + }, + "euiColorVis1": Object { + "behindText": "#79aad9", + "graphic": "#6092c0", + }, + "euiColorVis2": Object { + "behindText": "#ee789d", + "graphic": "#d36086", + }, + "euiColorVis3": Object { + "behindText": "#a987d1", + "graphic": "#9170b8", + }, + "euiColorVis4": Object { + "behindText": "#e4a6c7", + "graphic": "#ca8eae", + }, + "euiColorVis5": Object { + "behindText": "#f1d86f", + "graphic": "#d6bf57", + }, + "euiColorVis6": Object { + "behindText": "#d2c0a0", + "graphic": "#b9a888", + }, + "euiColorVis7": Object { + "behindText": "#f5a35c", + "graphic": "#da8b45", + }, + "euiColorVis8": Object { + "behindText": "#c47c6c", + "graphic": "#aa6556", + }, + "euiColorVis9": Object { + "behindText": "#ff7e62", + "graphic": "#e7664c", + }, + "gray": Object { + "behindText": "#535966", + "graphic": "#535966", + }, + }, "euiTooltipAnimations": Object { "bottom": "euiToolTipLeft", "left": "euiToolTipBottom", @@ -548,20 +597,6 @@ exports[`Paginated Table Component rendering it renders the default load more ta "success": "#7de2d1", "warning": "#ffce7a", }, - "tokenTypes": Object { - "tokenTint01": "#1ba9f5", - "tokenTint02": "#f990c0", - "tokenTint03": "#9170b8", - "tokenTint04": "#da8b45", - "tokenTint05": "#6092c0", - "tokenTint06": "#e6c220", - "tokenTint07": "#54b399", - "tokenTint08": "#920000", - "tokenTint09": "#ff00ff", - "tokenTint10": "#26ab00", - "tokenTint11": "#4c1604", - "tokenTint12": "#666666", - }, }, } } diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_logistics.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_logistics.tsx index 111b46d596e565..ef92edcfaeb35f 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_logistics.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_logistics.tsx @@ -94,7 +94,6 @@ export const PolicyStepLogistics: React.FunctionComponent = ({ defaultMessage="A unique identifier for this policy." /> } - idAria="nameDescription" fullWidth > = ({ defaultMessage="Name" /> } - describedByIds={['nameDescription']} isInvalid={touched.name && Boolean(errors.name)} error={errors.name} fullWidth @@ -158,7 +156,6 @@ export const PolicyStepLogistics: React.FunctionComponent = ({ defaultMessage="The repository where you want to store the snapshots." /> } - idAria="policyRepositoryDescription" fullWidth > = ({ defaultMessage="Repository" /> } - describedByIds={['policyRepositoryDescription']} isInvalid={touched.repository && Boolean(errors.repository)} error={errors.repository} fullWidth @@ -307,7 +303,6 @@ export const PolicyStepLogistics: React.FunctionComponent = ({ defaultMessage="The name for the snapshots. A unique identifier is automatically added to each name." /> } - idAria="policySnapshotNameDescription" fullWidth > = ({ defaultMessage="Snapshot name" /> } - describedByIds={['policySnapshotNameDescription']} isInvalid={touched.snapshotName && Boolean(errors.snapshotName)} error={errors.snapshotName} helpText={ @@ -389,7 +383,6 @@ export const PolicyStepLogistics: React.FunctionComponent = ({ defaultMessage="The frequency at which to take the snapshots." /> } - idAria="policyScheduleDescription" fullWidth > {isAdvancedCronVisible ? ( @@ -401,7 +394,6 @@ export const PolicyStepLogistics: React.FunctionComponent = ({ defaultMessage="Schedule" /> } - describedByIds={['policyScheduleDescription']} isInvalid={touched.schedule && Boolean(errors.schedule)} error={errors.schedule} helpText={ diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_retention.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_retention.tsx index df7e2c8807d9f1..ec01885e76ff1f 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_retention.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_retention.tsx @@ -73,7 +73,6 @@ export const PolicyStepRetention: React.FunctionComponent = ({ defaultMessage="The time to wait before deleting snapshots." /> } - idAria="expirationDescription" fullWidth > = ({ defaultMessage="Delete after" /> } - describedByIds={['expirationDescription']} isInvalid={touched.expireAfterValue && Boolean(errors.expireAfterValue)} error={errors.expireAfterValue} fullWidth @@ -140,7 +138,6 @@ export const PolicyStepRetention: React.FunctionComponent = ({ defaultMessage="The minimum and maximum number of snapshots to store in your cluster." /> } - idAria="countDescription" fullWidth > @@ -152,7 +149,6 @@ export const PolicyStepRetention: React.FunctionComponent = ({ defaultMessage="Mininum count" /> } - describedByIds={['countDescription']} isInvalid={touched.minCount && Boolean(errors.minCount)} error={errors.minCount} fullWidth @@ -180,7 +176,6 @@ export const PolicyStepRetention: React.FunctionComponent = ({ defaultMessage="Maximum count" /> } - describedByIds={['countDescription']} isInvalid={touched.maxCount && Boolean(errors.maxCount)} error={errors.maxCount} fullWidth diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_settings.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_settings.tsx index 0e3b6e030d1c61..552dbff8e74418 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_settings.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/policy_form/steps/step_settings.tsx @@ -126,10 +126,9 @@ export const PolicyStepSettings: React.FunctionComponent = ({ defaultMessage="Indices to back up." /> } - idAria="indicesDescription" fullWidth > - + {isManagedPolicy ? ( = ({ defaultMessage="Ignores indices that are unavailable when taking the snapshot. Otherwise, the entire snapshot will fail." /> } - idAria="policyIgnoreUnavailableDescription" fullWidth > - + = ({ defaultMessage="Allows snapshots of indices with primary shards that are unavailable. Otherwise, the entire snapshot will fail." /> } - idAria="policyPartialDescription" fullWidth > - + = ({ defaultMessage="Stores the global state of the cluster as part of the snapshot." /> } - idAria="policyIncludeGlobalStateDescription" fullWidth > - + = ({ defaultMessage="A unique name for the repository." /> } - idAria="repositoryNameDescription" fullWidth > = ({ defaultMessage="Name" /> } - describedByIds={['repositoryNameDescription']} isInvalid={Boolean(hasValidationErrors && validation.errors.name)} error={validation.errors.name} fullWidth @@ -303,10 +301,9 @@ export const RepositoryFormStepOne: React.FunctionComponent = ({ /> } - idAria="sourceOnlyDescription" fullWidth > - + = ({ defaultMessage="The name of the Azure client." /> } - idAria="azureRepositoryClientDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['azureRepositoryClientDescription']} isInvalid={Boolean(hasErrors && settingErrors.client)} error={settingErrors.client} > @@ -123,7 +121,6 @@ export const AzureSettings: React.FunctionComponent = ({ defaultMessage="The name of the Azure container to use for snapshots." /> } - idAria="azureRepositoryContainerDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['azureRepositoryContainerDescription']} isInvalid={Boolean(hasErrors && settingErrors.container)} error={settingErrors.container} > @@ -169,7 +165,6 @@ export const AzureSettings: React.FunctionComponent = ({ defaultMessage="The container path to the repository data." /> } - idAria="azureRepositoryBasePathDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['azureRepositoryBasePathDescription']} isInvalid={Boolean(hasErrors && settingErrors.basePath)} error={settingErrors.basePath} > @@ -215,13 +209,11 @@ export const AzureSettings: React.FunctionComponent = ({ defaultMessage="Compresses the index mapping and setting files for snapshots. Data files are not compressed." /> } - idAria="azureRepositoryCompressDescription" fullWidth > @@ -261,7 +253,6 @@ export const AzureSettings: React.FunctionComponent = ({ defaultMessage="Breaks files into smaller units when taking snapshots." /> } - idAria="azureRepositoryChunkSizeDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['azureRepositoryChunkSizeDescription']} isInvalid={Boolean(hasErrors && settingErrors.chunkSize)} error={settingErrors.chunkSize} helpText={textService.getSizeNotationHelpText()} @@ -308,7 +298,6 @@ export const AzureSettings: React.FunctionComponent = ({ defaultMessage="The rate for creating snapshots for each node." /> } - idAria="azureRepositoryMaxSnapshotBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['azureRepositoryMaxSnapshotBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxSnapshotBytesPerSec)} error={settingErrors.maxSnapshotBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -355,7 +343,6 @@ export const AzureSettings: React.FunctionComponent = ({ defaultMessage="The snapshot restore rate for each node." /> } - idAria="azureRepositoryMaxRestoreBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['azureRepositoryMaxRestoreBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxRestoreBytesPerSec)} error={settingErrors.maxRestoreBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -402,7 +388,6 @@ export const AzureSettings: React.FunctionComponent = ({ defaultMessage="The primary or secondary location. If secondary, read-only is true." /> } - idAria="azureRepositoryLocationModeDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['azureRepositoryLocationModeDescription']} isInvalid={Boolean(hasErrors && settingErrors.locationMode)} error={settingErrors.locationMode} > @@ -450,13 +434,11 @@ export const AzureSettings: React.FunctionComponent = ({ defaultMessage="Only one cluster should have write access to this repository. All other clusters should be read-only." /> } - idAria="azureRepositoryReadonlyDescription" fullWidth > diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/fs_settings.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/fs_settings.tsx index 2e2238ac93e3ce..711db1ee300cbc 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/fs_settings.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/fs_settings.tsx @@ -73,7 +73,6 @@ export const FSSettings: React.FunctionComponent = ({ /> } - idAria="fsRepositoryLocationDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['fsRepositoryLocationDescription']} isInvalid={Boolean(hasErrors && settingErrors.location)} error={settingErrors.location} > @@ -119,13 +117,11 @@ export const FSSettings: React.FunctionComponent = ({ defaultMessage="Compresses the index mapping and setting files for snapshots. Data files are not compressed." /> } - idAria="fsRepositoryCompressDescription" fullWidth > @@ -165,7 +161,6 @@ export const FSSettings: React.FunctionComponent = ({ defaultMessage="Breaks files into smaller units when taking snapshots." /> } - idAria="fsRepositoryChunkSizeDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['fsRepositoryChunkSizeDescription']} isInvalid={Boolean(hasErrors && settingErrors.chunkSize)} error={settingErrors.chunkSize} helpText={textService.getSizeNotationHelpText()} @@ -212,7 +206,6 @@ export const FSSettings: React.FunctionComponent = ({ defaultMessage="The rate for creating snapshots for each node." /> } - idAria="fsRepositoryMaxSnapshotBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['fsRepositoryMaxSnapshotBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxSnapshotBytesPerSec)} error={settingErrors.maxSnapshotBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -259,7 +251,6 @@ export const FSSettings: React.FunctionComponent = ({ defaultMessage="The snapshot restore rate for each node." /> } - idAria="fsRepositoryMaxRestoreBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['fsRepositoryMaxRestoreBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxRestoreBytesPerSec)} error={settingErrors.maxRestoreBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -306,13 +296,11 @@ export const FSSettings: React.FunctionComponent = ({ defaultMessage="Only one cluster should have write access to this repository. All other clusters should be read-only." /> } - idAria="fsRepositoryReadonlyDescription" fullWidth > diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/gcs_settings.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/gcs_settings.tsx index d15e0043b8c81f..5a34d3aac6f6b7 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/gcs_settings.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/gcs_settings.tsx @@ -64,7 +64,6 @@ export const GCSSettings: React.FunctionComponent = ({ defaultMessage="The name of the Google Cloud Storage client." /> } - idAria="gcsRepositoryClientDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['gcsRepositoryClientDescription']} isInvalid={Boolean(hasErrors && settingErrors.client)} error={settingErrors.client} > @@ -110,7 +108,6 @@ export const GCSSettings: React.FunctionComponent = ({ defaultMessage="The name of the Google Cloud Storage bucket to use for snapshots." /> } - idAria="gcsRepositoryBucketDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['gcsRepositoryBucketDescription']} isInvalid={Boolean(hasErrors && settingErrors.bucket)} error={settingErrors.bucket} > @@ -156,7 +152,6 @@ export const GCSSettings: React.FunctionComponent = ({ defaultMessage="The bucket path to the repository data." /> } - idAria="gcsRepositoryBasePathDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['gcsRepositoryBasePathDescription']} isInvalid={Boolean(hasErrors && settingErrors.basePath)} error={settingErrors.basePath} > @@ -202,13 +196,11 @@ export const GCSSettings: React.FunctionComponent = ({ defaultMessage="Compresses the index mapping and setting files for snapshots. Data files are not compressed." /> } - idAria="gcsRepositoryCompressDescription" fullWidth > @@ -248,7 +240,6 @@ export const GCSSettings: React.FunctionComponent = ({ defaultMessage="Breaks files into smaller units when taking snapshots." /> } - idAria="gcsRepositoryChunkSizeDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['gcsRepositoryChunkSizeDescription']} isInvalid={Boolean(hasErrors && settingErrors.chunkSize)} error={settingErrors.chunkSize} helpText={textService.getSizeNotationHelpText()} @@ -295,7 +285,6 @@ export const GCSSettings: React.FunctionComponent = ({ defaultMessage="The rate for creating snapshots for each node." /> } - idAria="gcsRepositoryMaxSnapshotBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['gcsRepositoryMaxSnapshotBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxSnapshotBytesPerSec)} error={settingErrors.maxSnapshotBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -342,7 +330,6 @@ export const GCSSettings: React.FunctionComponent = ({ defaultMessage="The snapshot restore rate for each node." /> } - idAria="gcsRepositoryMaxRestoreBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['gcsRepositoryMaxRestoreBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxRestoreBytesPerSec)} error={settingErrors.maxRestoreBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -389,13 +375,11 @@ export const GCSSettings: React.FunctionComponent = ({ defaultMessage="Only one cluster should have write access to this repository. All other clusters should be read-only." /> } - idAria="gcsRepositoryReadonlyDescription" fullWidth > diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/hdfs_settings.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/hdfs_settings.tsx index ae42b810bf0591..4ef662d645bea3 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/hdfs_settings.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/hdfs_settings.tsx @@ -79,7 +79,6 @@ export const HDFSSettings: React.FunctionComponent = ({ defaultMessage="The URI address for HDFS." /> } - idAria="hdfsRepositoryUriDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['hdfsRepositoryUriDescription']} isInvalid={Boolean(hasErrors && settingErrors.uri)} error={settingErrors.uri} > @@ -108,7 +106,7 @@ export const HDFSSettings: React.FunctionComponent = ({ uri: e.target.value ? `hdfs://${e.target.value}` : '', }); }} - aria-describedby="hdfsRepositoryUriDescription hdfsRepositoryUriProtocolDescription" + aria-describedby="hdfsRepositoryUriProtocolDescription" data-test-subj="uriInput" /> @@ -132,7 +130,6 @@ export const HDFSSettings: React.FunctionComponent = ({ defaultMessage="The file path where data is stored." /> } - idAria="hdfsRepositoryPathDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['hdfsRepositoryPathDescription']} isInvalid={Boolean(hasErrors && settingErrors.path)} error={settingErrors.path} > @@ -178,13 +174,11 @@ export const HDFSSettings: React.FunctionComponent = ({ defaultMessage="Loads the default Hadoop configuration." /> } - idAria="hdfsRepositoryLoadDefaultsDescription" fullWidth > @@ -224,13 +218,11 @@ export const HDFSSettings: React.FunctionComponent = ({ defaultMessage="Compresses the index mapping and setting files for snapshots. Data files are not compressed." /> } - idAria="hdfsRepositoryCompressDescription" fullWidth > @@ -270,7 +262,6 @@ export const HDFSSettings: React.FunctionComponent = ({ defaultMessage="Breaks files into smaller units when taking snapshots." /> } - idAria="hdfsRepositoryChunkSizeDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['hdfsRepositoryChunkSizeDescription']} isInvalid={Boolean(hasErrors && settingErrors.chunkSize)} error={settingErrors.chunkSize} helpText={textService.getSizeNotationHelpText()} @@ -317,7 +307,6 @@ export const HDFSSettings: React.FunctionComponent = ({ defaultMessage="The Kerberos principal to use when connecting to a secured HDFS cluster." /> } - idAria="hdfsRepositorySecurityPrincipalDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['hdfsRepositorySecurityPrincipalDescription']} isInvalid={Boolean(hasErrors && settingErrors.securityPrincipal)} error={settingErrors.securityPrincipal} > @@ -365,7 +353,6 @@ export const HDFSSettings: React.FunctionComponent = ({ /> } - idAria="hdfsRepositoryConfigurationDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['hdfsRepositoryConfigurationDescription']} isInvalid={isConfInvalid} error={ = ({ defaultMessage="The rate for creating snapshots for each node." /> } - idAria="hdfsRepositoryMaxSnapshotBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['hdfsRepositoryMaxSnapshotBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxSnapshotBytesPerSec)} error={settingErrors.maxSnapshotBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -510,7 +494,6 @@ export const HDFSSettings: React.FunctionComponent = ({ defaultMessage="The snapshot restore rate for each node." /> } - idAria="hdfsRepositoryMaxRestoreBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['hdfsRepositoryMaxRestoreBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxRestoreBytesPerSec)} error={settingErrors.maxRestoreBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -557,13 +539,11 @@ export const HDFSSettings: React.FunctionComponent = ({ defaultMessage="Only one cluster should have write access to this repository. All other clusters should be read-only." /> } - idAria="hdfsRepositoryReadonlyDescription" fullWidth > diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/readonly_settings.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/readonly_settings.tsx index 5241a554553951..a0cc0764659908 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/readonly_settings.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/readonly_settings.tsx @@ -116,7 +116,6 @@ export const ReadonlySettings: React.FunctionComponent = ({ /> } - idAria="readonlyRepositoryUrlDescription" fullWidth >
@@ -130,7 +129,6 @@ export const ReadonlySettings: React.FunctionComponent = ({ /> } fullWidth - describedByIds={['readonlyRepositoryUrlDescription']} > = ({ /> } fullWidth - describedByIds={['readonlyRepositoryUrlDescription readonlyRepositoryUrlHelp']} + describedByIds={['readonlyRepositoryUrlHelp']} isInvalid={Boolean(hasErrors && settingErrors.url)} error={settingErrors.url} > diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/s3_settings.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/s3_settings.tsx index a897368ae7ca38..1a9902b42a931f 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/s3_settings.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/repository_form/type_settings/s3_settings.tsx @@ -93,7 +93,6 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="The name of the AWS S3 client." /> } - idAria="s3RepositoryClientDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryClientDescription']} isInvalid={Boolean(hasErrors && settingErrors.client)} error={settingErrors.client} > @@ -139,7 +137,6 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="The name of the AWS S3 bucket to use for snapshots." /> } - idAria="s3RepositoryBucketDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryBucketDescription']} isInvalid={Boolean(hasErrors && settingErrors.bucket)} error={settingErrors.bucket} > @@ -185,7 +181,6 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="The bucket path to the repository data." /> } - idAria="s3RepositoryBasePathDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryBasePathDescription']} isInvalid={Boolean(hasErrors && settingErrors.basePath)} error={settingErrors.basePath} > @@ -231,13 +225,11 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="Compresses the index mapping and setting files for snapshots. Data files are not compressed." /> } - idAria="s3RepositoryCompressDescription" fullWidth > @@ -277,7 +269,6 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="Breaks files into smaller units when taking snapshots." /> } - idAria="s3RepositoryChunkSizeDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryChunkSizeDescription']} isInvalid={Boolean(hasErrors && settingErrors.chunkSize)} error={settingErrors.chunkSize} helpText={textService.getSizeNotationHelpText()} @@ -324,13 +314,11 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="Encrypts files on the server using AES256 algorithm." /> } - idAria="s3RepositoryServerSideEncryptionDescription" fullWidth > @@ -371,7 +359,6 @@ export const S3Settings: React.FunctionComponent = ({ to split the chunk into several parts and upload each in its own request." /> } - idAria="s3RepositoryBufferSizeDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryBufferSizeDescription']} isInvalid={Boolean(hasErrors && settingErrors.bufferSize)} error={settingErrors.bufferSize} helpText={textService.getSizeNotationHelpText()} @@ -418,7 +404,6 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="The canned ACL to add to new S3 buckets and objects." /> } - idAria="s3RepositoryCannedAclDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryCannedAclDescription']} isInvalid={Boolean(hasErrors && settingErrors.cannedAcl)} error={settingErrors.cannedAcl} > @@ -465,7 +449,6 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="The storage class for new objects in the S3 repository." /> } - idAria="s3RepositoryStorageClassDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryStorageClassDescription']} isInvalid={Boolean(hasErrors && settingErrors.storageClass)} error={settingErrors.storageClass} > @@ -512,7 +494,6 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="The rate for creating snapshots for each node." /> } - idAria="s3RepositoryMaxSnapshotBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryMaxSnapshotBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxSnapshotBytesPerSec)} error={settingErrors.maxSnapshotBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -559,7 +539,6 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="The snapshot restore rate for each node." /> } - idAria="s3RepositoryMaxRestoreBytesDescription" fullWidth > = ({ /> } fullWidth - describedByIds={['s3RepositoryMaxRestoreBytesDescription']} isInvalid={Boolean(hasErrors && settingErrors.maxRestoreBytesPerSec)} error={settingErrors.maxRestoreBytesPerSec} helpText={textService.getSizeNotationHelpText()} @@ -606,13 +584,11 @@ export const S3Settings: React.FunctionComponent = ({ defaultMessage="Only one cluster should have write access to this repository. All other clusters should be read-only." /> } - idAria="s3RepositoryReadonlyDescription" fullWidth > diff --git a/x-pack/legacy/plugins/snapshot_restore/public/app/components/restore_snapshot_form/steps/step_logistics.tsx b/x-pack/legacy/plugins/snapshot_restore/public/app/components/restore_snapshot_form/steps/step_logistics.tsx index f5a3180adbd6e5..bd8a0650c087f9 100644 --- a/x-pack/legacy/plugins/snapshot_restore/public/app/components/restore_snapshot_form/steps/step_logistics.tsx +++ b/x-pack/legacy/plugins/snapshot_restore/public/app/components/restore_snapshot_form/steps/step_logistics.tsx @@ -141,14 +141,9 @@ export const RestoreSnapshotStepLogistics: React.FunctionComponent = if they are closed and have the same number of shards as the snapshot index." /> } - idAria="stepLogisticsIndicesDescription" fullWidth > - + = defaultMessage="Renames indices on restore." /> } - idAria="stepLogisticsRenameIndicesDescription" fullWidth > - + = defaultMessage="Allows restore of indices that don’t have snapshots of all shards." /> } - idAria="stepLogisticsPartialDescription" fullWidth > - + = templates with the same name. Also restores persistent settings." /> } - idAria="stepLogisticsIncludeGlobalStateDescription" fullWidth > = ( }} /> } - idAria="stepSettingsIndexSettingsDescription" fullWidth > - + = ( /> } fullWidth - describedByIds={['stepSettingsIndexSettingsDescription']} isInvalid={Boolean(errors.indexSettings)} error={errors.indexSettings} helpText={ @@ -235,14 +229,9 @@ export const RestoreSnapshotStepSettings: React.FunctionComponent = ( }} /> } - idAria="stepSettingsIgnoreIndexSettingsDescription" fullWidth > - + diff --git a/x-pack/package.json b/x-pack/package.json index 43df763c22bdc6..e3bc8aa36373d1 100644 --- a/x-pack/package.json +++ b/x-pack/package.json @@ -176,7 +176,7 @@ "@elastic/apm-rum-react": "^0.3.2", "@elastic/datemath": "5.0.2", "@elastic/ems-client": "7.6.0", - "@elastic/eui": "18.3.0", + "@elastic/eui": "19.0.0", "@elastic/filesaver": "1.1.2", "@elastic/maki": "6.1.0", "@elastic/node-crypto": "^1.0.0", diff --git a/x-pack/plugins/endpoint/common/types.ts b/x-pack/plugins/endpoint/common/types.ts index 0dc3fc29ca8050..5ef9d22e4dd7b8 100644 --- a/x-pack/plugins/endpoint/common/types.ts +++ b/x-pack/plugins/endpoint/common/types.ts @@ -118,4 +118,4 @@ export interface EndpointMetadata { /** * The PageId type is used for the payload when firing userNavigatedToPage actions */ -export type PageId = 'alertsPage' | 'managementPage'; +export type PageId = 'alertsPage' | 'managementPage' | 'policyListPage'; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/components/truncate_text.ts b/x-pack/plugins/endpoint/public/applications/endpoint/components/truncate_text.ts new file mode 100644 index 00000000000000..83f4bc1e793178 --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/components/truncate_text.ts @@ -0,0 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import styled from 'styled-components'; + +export const TruncateText = styled.div` + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; +`; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/index.tsx b/x-pack/plugins/endpoint/public/applications/endpoint/index.tsx index a86c647e771d41..7bb3b13525914f 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/index.tsx +++ b/x-pack/plugins/endpoint/public/applications/endpoint/index.tsx @@ -14,6 +14,7 @@ import { Store } from 'redux'; import { appStoreFactory } from './store'; import { AlertIndex } from './view/alerts'; import { ManagementList } from './view/managing'; +import { PolicyList } from './view/policy'; /** * This module will be loaded asynchronously to reduce the bundle size of your plugin's main bundle. @@ -51,6 +52,7 @@ const AppRoot: React.FunctionComponent = React.memo(({ basename, st /> + ( diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/action.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/action.ts index 04c6cf7fc46340..d099c81317090d 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/store/action.ts +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/action.ts @@ -7,5 +7,6 @@ import { ManagementAction } from './managing'; import { AlertAction } from './alerts'; import { RoutingAction } from './routing'; +import { PolicyListAction } from './policy_list'; -export type AppAction = ManagementAction | AlertAction | RoutingAction; +export type AppAction = ManagementAction | AlertAction | RoutingAction | PolicyListAction; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/index.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/index.ts index 3bbcc3f25a6d88..8fe61ae01d319a 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/store/index.ts +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/index.ts @@ -17,6 +17,7 @@ import { CoreStart } from 'kibana/public'; import { appReducer } from './reducer'; import { alertMiddlewareFactory } from './alerts/middleware'; import { managementMiddlewareFactory } from './managing'; +import { policyListMiddlewareFactory } from './policy_list'; import { GlobalState } from '../types'; import { AppAction } from './action'; @@ -56,6 +57,10 @@ export const appStoreFactory = (coreStart: CoreStart): Store => { substateMiddlewareFactory( globalState => globalState.managementList, managementMiddlewareFactory(coreStart) + ), + substateMiddlewareFactory( + globalState => globalState.policyList, + policyListMiddlewareFactory(coreStart) ) ) ) diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/action.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/action.ts new file mode 100644 index 00000000000000..5ac2a4328b00a2 --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/action.ts @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { PolicyData } from '../../types'; + +interface ServerReturnedPolicyListData { + type: 'serverReturnedPolicyListData'; + payload: { + policyItems: PolicyData[]; + total: number; + pageSize: number; + pageIndex: number; + }; +} + +interface UserPaginatedPolicyListTable { + type: 'userPaginatedPolicyListTable'; + payload: { + pageSize: number; + pageIndex: number; + }; +} + +export type PolicyListAction = ServerReturnedPolicyListData | UserPaginatedPolicyListTable; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/fake_data.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/fake_data.ts new file mode 100644 index 00000000000000..62bdd28f30be16 --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/fake_data.ts @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +// !!!! Should be deleted when https://github.com/elastic/endpoint-app-team/issues/150 +// is implemented + +const dateOffsets = [ + 0, + 1000, + 300000, // 5 minutes + 3.6e6, // 1 hour + 86340000, // 23h, 59m + 9e7, // 25h + 9e7 * 5, // 5d +]; + +const randomNumbers = [5, 50, 500, 5000, 50000]; + +const getRandomDateIsoString = () => { + const randomIndex = Math.floor(Math.random() * Math.floor(dateOffsets.length)); + return new Date(Date.now() - dateOffsets[randomIndex]).toISOString(); +}; + +const getRandomNumber = () => { + const randomIndex = Math.floor(Math.random() * Math.floor(randomNumbers.length)); + return randomNumbers[randomIndex]; +}; + +export const getFakeDatasourceApiResponse = async (page: number, pageSize: number) => { + await new Promise(resolve => setTimeout(resolve, 500)); + + // Emulates the API response - see PR: + // https://github.com/elastic/kibana/pull/56567/files#diff-431549a8739efe0c56763f164c32caeeR25 + return { + items: Array.from({ length: pageSize }, (x, i) => ({ + name: `policy with some protections ${i + 1}`, + total: getRandomNumber(), + pending: getRandomNumber(), + failed: getRandomNumber(), + created_by: `admin ABC`, + created: getRandomDateIsoString(), + updated_by: 'admin 123', + updated: getRandomDateIsoString(), + })), + success: true, + total: pageSize * 10, + page, + perPage: pageSize, + }; +}; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/index.test.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/index.test.ts new file mode 100644 index 00000000000000..ae4a0868a68fec --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/index.test.ts @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { PolicyListState } from '../../types'; +import { applyMiddleware, createStore, Dispatch, Store } from 'redux'; +import { AppAction } from '../action'; +import { policyListReducer } from './reducer'; +import { policyListMiddlewareFactory } from './middleware'; +import { coreMock } from '../../../../../../../../src/core/public/mocks'; +import { CoreStart } from 'kibana/public'; +import { selectIsLoading } from './selectors'; + +describe('policy list store concerns', () => { + const sleep = () => new Promise(resolve => setTimeout(resolve, 1000)); + let fakeCoreStart: jest.Mocked; + let store: Store; + let getState: typeof store['getState']; + let dispatch: Dispatch; + + beforeEach(() => { + fakeCoreStart = coreMock.createStart({ basePath: '/mock' }); + store = createStore( + policyListReducer, + applyMiddleware(policyListMiddlewareFactory(fakeCoreStart)) + ); + getState = store.getState; + dispatch = store.dispatch; + }); + + test('it sets `isLoading` when `userNavigatedToPage`', async () => { + expect(selectIsLoading(getState())).toBe(false); + dispatch({ type: 'userNavigatedToPage', payload: 'policyListPage' }); + expect(selectIsLoading(getState())).toBe(true); + await sleep(); + expect(selectIsLoading(getState())).toBe(false); + }); + + test('it sets `isLoading` when `userPaginatedPolicyListTable`', async () => { + expect(selectIsLoading(getState())).toBe(false); + dispatch({ + type: 'userPaginatedPolicyListTable', + payload: { + pageSize: 10, + pageIndex: 1, + }, + }); + expect(selectIsLoading(getState())).toBe(true); + await sleep(); + expect(selectIsLoading(getState())).toBe(false); + }); + + test('it resets state on `userNavigatedFromPage` action', async () => { + dispatch({ + type: 'serverReturnedPolicyListData', + payload: { + policyItems: [], + pageIndex: 20, + pageSize: 50, + total: 200, + }, + }); + dispatch({ type: 'userNavigatedFromPage', payload: 'policyListPage' }); + expect(getState()).toEqual({ + policyItems: [], + isLoading: false, + pageIndex: 0, + pageSize: 10, + total: 0, + }); + }); +}); diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/index.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/index.ts new file mode 100644 index 00000000000000..8086acc41d2bd7 --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/index.ts @@ -0,0 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export { policyListReducer } from './reducer'; +export { PolicyListAction } from './action'; +export { policyListMiddlewareFactory } from './middleware'; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/middleware.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/middleware.ts new file mode 100644 index 00000000000000..f8e2b7d07c389c --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/middleware.ts @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { MiddlewareFactory, PolicyListState } from '../../types'; + +export const policyListMiddlewareFactory: MiddlewareFactory = coreStart => { + return ({ getState, dispatch }) => next => async action => { + next(action); + + if ( + (action.type === 'userNavigatedToPage' && action.payload === 'policyListPage') || + action.type === 'userPaginatedPolicyListTable' + ) { + const state = getState(); + let pageSize: number; + let pageIndex: number; + + if (action.type === 'userPaginatedPolicyListTable') { + pageSize = action.payload.pageSize; + pageIndex = action.payload.pageIndex; + } else { + pageSize = state.pageSize; + pageIndex = state.pageIndex; + } + + // Need load data from API and remove fake data below + // Refactor tracked via: https://github.com/elastic/endpoint-app-team/issues/150 + const { getFakeDatasourceApiResponse } = await import('./fake_data'); + const { items: policyItems, total } = await getFakeDatasourceApiResponse(pageIndex, pageSize); + + dispatch({ + type: 'serverReturnedPolicyListData', + payload: { + policyItems, + pageIndex, + pageSize, + total, + }, + }); + } + }; +}; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/reducer.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/reducer.ts new file mode 100644 index 00000000000000..77f536d413ae38 --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/reducer.ts @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { Reducer } from 'redux'; +import { PolicyListState } from '../../types'; +import { AppAction } from '../action'; + +const initialPolicyListState = (): PolicyListState => { + return { + policyItems: [], + isLoading: false, + pageIndex: 0, + pageSize: 10, + total: 0, + }; +}; + +export const policyListReducer: Reducer = ( + state = initialPolicyListState(), + action +) => { + if (action.type === 'serverReturnedPolicyListData') { + return { + ...state, + ...action.payload, + isLoading: false, + }; + } + + if ( + action.type === 'userPaginatedPolicyListTable' || + (action.type === 'userNavigatedToPage' && action.payload === 'policyListPage') + ) { + return { + ...state, + isLoading: true, + }; + } + + if (action.type === 'userNavigatedFromPage' && action.payload === 'policyListPage') { + return initialPolicyListState(); + } + + return state; +}; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/selectors.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/selectors.ts new file mode 100644 index 00000000000000..b9c2edbf5d55b1 --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/policy_list/selectors.ts @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { PolicyListState } from '../../types'; + +export const selectPolicyItems = (state: PolicyListState) => state.policyItems; + +export const selectPageIndex = (state: PolicyListState) => state.pageIndex; + +export const selectPageSize = (state: PolicyListState) => state.pageSize; + +export const selectTotal = (state: PolicyListState) => state.total; + +export const selectIsLoading = (state: PolicyListState) => state.isLoading; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/reducer.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/reducer.ts index 7d738c266fae0c..3d9d21c0da9c3e 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/store/reducer.ts +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/reducer.ts @@ -8,8 +8,10 @@ import { managementListReducer } from './managing'; import { AppAction } from './action'; import { alertListReducer } from './alerts'; import { GlobalState } from '../types'; +import { policyListReducer } from './policy_list'; export const appReducer: Reducer = combineReducers({ managementList: managementListReducer, alertList: alertListReducer, + policyList: policyListReducer, }); diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/store/routing/action.ts b/x-pack/plugins/endpoint/public/applications/endpoint/store/routing/action.ts index 263a3f72d57d5a..9080af8c918175 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/store/routing/action.ts +++ b/x-pack/plugins/endpoint/public/applications/endpoint/store/routing/action.ts @@ -11,4 +11,9 @@ interface UserNavigatedToPage { readonly payload: PageId; } -export type RoutingAction = UserNavigatedToPage; +interface UserNavigatedFromPage { + readonly type: 'userNavigatedFromPage'; + readonly payload: PageId; +} + +export type RoutingAction = UserNavigatedToPage | UserNavigatedFromPage; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/types.ts b/x-pack/plugins/endpoint/public/applications/endpoint/types.ts index 02a7793fc38b06..6b20012592fd9b 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/types.ts +++ b/x-pack/plugins/endpoint/public/applications/endpoint/types.ts @@ -29,9 +29,38 @@ export interface ManagementListPagination { pageSize: number; } +// REFACTOR to use Types from Ingest Manager - see: https://github.com/elastic/endpoint-app-team/issues/150 +export interface PolicyData { + name: string; + total: number; + pending: number; + failed: number; + created_by: string; + created: string; + updated_by: string; + updated: string; +} + +/** + * Policy list store state + */ +export interface PolicyListState { + /** Array of policy items */ + policyItems: PolicyData[]; + /** total number of policies */ + total: number; + /** Number of policies per page */ + pageSize: number; + /** page number (zero based) */ + pageIndex: number; + /** data is being retrieved from server */ + isLoading: boolean; +} + export interface GlobalState { readonly managementList: ManagementListState; readonly alertList: AlertListState; + readonly policyList: PolicyListState; } export type AlertListData = AlertResultList; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/index.ts b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/index.ts new file mode 100644 index 00000000000000..d561da7574de07 --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/index.ts @@ -0,0 +1,7 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export * from './policy_list'; diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_hooks.ts b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_hooks.ts new file mode 100644 index 00000000000000..14558fb6504bb0 --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_hooks.ts @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { useSelector } from 'react-redux'; +import { GlobalState, PolicyListState } from '../../types'; + +export function usePolicyListSelector(selector: (state: PolicyListState) => TSelected) { + return useSelector((state: GlobalState) => selector(state.policyList)); +} diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_list.tsx b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_list.tsx new file mode 100644 index 00000000000000..75ffa5e8806e99 --- /dev/null +++ b/x-pack/plugins/endpoint/public/applications/endpoint/view/policy/policy_list.tsx @@ -0,0 +1,232 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import React, { useCallback, useMemo } from 'react'; +import { + EuiPage, + EuiPageBody, + EuiPageContent, + EuiPageContentBody, + EuiPageContentHeader, + EuiPageContentHeaderSection, + EuiTitle, + EuiBasicTable, + EuiText, + EuiTableFieldDataColumnType, + EuiToolTip, +} from '@elastic/eui'; +import { i18n } from '@kbn/i18n'; +import { + FormattedMessage, + FormattedDate, + FormattedTime, + FormattedNumber, + FormattedRelative, +} from '@kbn/i18n/react'; +import { useDispatch } from 'react-redux'; +import styled from 'styled-components'; +import { usePageId } from '../use_page_id'; +import { + selectIsLoading, + selectPageIndex, + selectPageSize, + selectPolicyItems, + selectTotal, +} from '../../store/policy_list/selectors'; +import { usePolicyListSelector } from './policy_hooks'; +import { PolicyListAction } from '../../store/policy_list'; +import { PolicyData } from '../../types'; +import { TruncateText } from '../../components/truncate_text'; + +interface TableChangeCallbackArguments { + page: { index: number; size: number }; +} + +const TruncateTooltipText = styled(TruncateText)` + .euiToolTipAnchor { + display: block; + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; + } +`; + +const FormattedDateAndTime: React.FC<{ date: Date }> = ({ date }) => { + // If date is greater than or equal to 24h (ago), then show it as a date + // else, show it as relative to "now" + return Date.now() - date.getTime() >= 8.64e7 ? ( + <> + + {' @'} + + + ) : ( + <> + + + ); +}; + +const renderDate = (date: string, _item: PolicyData) => ( + + + + + +); + +const renderFormattedNumber = (value: number, _item: PolicyData) => ( + + + +); + +export const PolicyList = React.memo(() => { + usePageId('policyListPage'); + + const dispatch = useDispatch<(action: PolicyListAction) => void>(); + const policyItems = usePolicyListSelector(selectPolicyItems); + const pageIndex = usePolicyListSelector(selectPageIndex); + const pageSize = usePolicyListSelector(selectPageSize); + const totalItemCount = usePolicyListSelector(selectTotal); + const loading = usePolicyListSelector(selectIsLoading); + + const paginationSetup = useMemo(() => { + return { + pageIndex, + pageSize, + totalItemCount, + pageSizeOptions: [10, 20, 50], + hidePerPageOptions: false, + }; + }, [pageIndex, pageSize, totalItemCount]); + + const handleTableChange = useCallback( + ({ page: { index, size } }: TableChangeCallbackArguments) => { + dispatch({ + type: 'userPaginatedPolicyListTable', + payload: { + pageIndex: index, + pageSize: size, + }, + }); + }, + [dispatch] + ); + + const columns: Array> = useMemo( + () => [ + { + field: 'name', + name: i18n.translate('xpack.endpoint.policyList.nameField', { + defaultMessage: 'Policy Name', + }), + truncateText: true, + }, + { + field: 'total', + name: i18n.translate('xpack.endpoint.policyList.totalField', { + defaultMessage: 'Total', + }), + render: renderFormattedNumber, + dataType: 'number', + truncateText: true, + width: '15ch', + }, + { + field: 'pending', + name: i18n.translate('xpack.endpoint.policyList.pendingField', { + defaultMessage: 'Pending', + }), + render: renderFormattedNumber, + dataType: 'number', + truncateText: true, + width: '15ch', + }, + { + field: 'failed', + name: i18n.translate('xpack.endpoint.policyList.failedField', { + defaultMessage: 'Failed', + }), + render: renderFormattedNumber, + dataType: 'number', + truncateText: true, + width: '15ch', + }, + { + field: 'created_by', + name: i18n.translate('xpack.endpoint.policyList.createdByField', { + defaultMessage: 'Created By', + }), + truncateText: true, + }, + { + field: 'created', + name: i18n.translate('xpack.endpoint.policyList.createdField', { + defaultMessage: 'Created', + }), + render: renderDate, + truncateText: true, + }, + { + field: 'updated_by', + name: i18n.translate('xpack.endpoint.policyList.updatedByField', { + defaultMessage: 'Last Updated By', + }), + truncateText: true, + }, + { + field: 'updated', + name: i18n.translate('xpack.endpoint.policyList.updatedField', { + defaultMessage: 'Last Updated', + }), + render: renderDate, + truncateText: true, + }, + ], + [] + ); + + return ( + + + + + + +

+ +

+
+

+ + + +

+
+
+ + + +
+
+
+ ); +}); diff --git a/x-pack/plugins/endpoint/public/applications/endpoint/view/use_page_id.ts b/x-pack/plugins/endpoint/public/applications/endpoint/view/use_page_id.ts index 9e241af4c0445c..49c39064c8d9a2 100644 --- a/x-pack/plugins/endpoint/public/applications/endpoint/view/use_page_id.ts +++ b/x-pack/plugins/endpoint/public/applications/endpoint/view/use_page_id.ts @@ -10,11 +10,19 @@ import { PageId } from '../../../../common/types'; import { RoutingAction } from '../store/routing'; /** - * Dispatches a 'userNavigatedToPage' action with the given 'pageId' as the action payload + * Dispatches a 'userNavigatedToPage' action with the given 'pageId' as the action payload. + * When the component is un-mounted, a `userNavigatedFromPage` action will be dispatched + * with the given `pageId`. + * + * @param pageId A page id */ export function usePageId(pageId: PageId) { const dispatch: (action: RoutingAction) => unknown = useDispatch(); useEffect(() => { dispatch({ type: 'userNavigatedToPage', payload: pageId }); + + return () => { + dispatch({ type: 'userNavigatedFromPage', payload: pageId }); + }; }, [dispatch, pageId]); } diff --git a/x-pack/plugins/event_log/server/es/cluster_client_adapter.mock.ts b/x-pack/plugins/event_log/server/es/cluster_client_adapter.mock.ts new file mode 100644 index 00000000000000..87e8fb0f521a9e --- /dev/null +++ b/x-pack/plugins/event_log/server/es/cluster_client_adapter.mock.ts @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { IClusterClientAdapter } from './cluster_client_adapter'; + +const createClusterClientMock = () => { + const mock: jest.Mocked = { + indexDocument: jest.fn(), + doesIlmPolicyExist: jest.fn(), + createIlmPolicy: jest.fn(), + doesIndexTemplateExist: jest.fn(), + createIndexTemplate: jest.fn(), + doesAliasExist: jest.fn(), + createIndex: jest.fn(), + }; + return mock; +}; + +export const clusterClientAdapterMock = { + create: createClusterClientMock, +}; diff --git a/x-pack/plugins/event_log/server/es/cluster_client_adapter.test.ts b/x-pack/plugins/event_log/server/es/cluster_client_adapter.test.ts new file mode 100644 index 00000000000000..ecefd4bfa271ef --- /dev/null +++ b/x-pack/plugins/event_log/server/es/cluster_client_adapter.test.ts @@ -0,0 +1,196 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { ClusterClient, Logger } from '../../../../../src/core/server'; +import { elasticsearchServiceMock, loggingServiceMock } from '../../../../../src/core/server/mocks'; +import { ClusterClientAdapter, IClusterClientAdapter } from './cluster_client_adapter'; + +type EsClusterClient = Pick, 'callAsInternalUser' | 'asScoped'>; + +let logger: Logger; +let clusterClient: EsClusterClient; +let clusterClientAdapter: IClusterClientAdapter; + +beforeEach(() => { + logger = loggingServiceMock.createLogger(); + clusterClient = elasticsearchServiceMock.createClusterClient(); + clusterClientAdapter = new ClusterClientAdapter({ + logger, + clusterClient, + }); +}); + +describe('indexDocument', () => { + test('should call cluster client with given doc', async () => { + await clusterClientAdapter.indexDocument({ args: true }); + expect(clusterClient.callAsInternalUser).toHaveBeenCalledWith('index', { + args: true, + }); + }); + + test('should throw error when cluster client throws an error', async () => { + clusterClient.callAsInternalUser.mockRejectedValue(new Error('Fail')); + await expect( + clusterClientAdapter.indexDocument({ args: true }) + ).rejects.toThrowErrorMatchingInlineSnapshot(`"Fail"`); + }); +}); + +describe('doesIlmPolicyExist', () => { + const notFoundError = new Error('Not found') as any; + notFoundError.statusCode = 404; + + test('should call cluster with proper arguments', async () => { + await clusterClientAdapter.doesIlmPolicyExist('foo'); + expect(clusterClient.callAsInternalUser).toHaveBeenCalledWith('transport.request', { + method: 'GET', + path: '_ilm/policy/foo', + }); + }); + + test('should return false when 404 error is returned by Elasticsearch', async () => { + clusterClient.callAsInternalUser.mockRejectedValue(notFoundError); + await expect(clusterClientAdapter.doesIlmPolicyExist('foo')).resolves.toEqual(false); + }); + + test('should throw error when error is not 404', async () => { + clusterClient.callAsInternalUser.mockRejectedValue(new Error('Fail')); + await expect( + clusterClientAdapter.doesIlmPolicyExist('foo') + ).rejects.toThrowErrorMatchingInlineSnapshot(`"error checking existance of ilm policy: Fail"`); + }); + + test('should return true when no error is thrown', async () => { + await expect(clusterClientAdapter.doesIlmPolicyExist('foo')).resolves.toEqual(true); + }); +}); + +describe('createIlmPolicy', () => { + test('should call cluster client with given policy', async () => { + clusterClient.callAsInternalUser.mockResolvedValue({ success: true }); + await clusterClientAdapter.createIlmPolicy('foo', { args: true }); + expect(clusterClient.callAsInternalUser).toHaveBeenCalledWith('transport.request', { + method: 'PUT', + path: '_ilm/policy/foo', + body: { args: true }, + }); + }); + + test('should throw error when call cluster client throws', async () => { + clusterClient.callAsInternalUser.mockRejectedValue(new Error('Fail')); + await expect( + clusterClientAdapter.createIlmPolicy('foo', { args: true }) + ).rejects.toThrowErrorMatchingInlineSnapshot(`"error creating ilm policy: Fail"`); + }); +}); + +describe('doesIndexTemplateExist', () => { + test('should call cluster with proper arguments', async () => { + await clusterClientAdapter.doesIndexTemplateExist('foo'); + expect(clusterClient.callAsInternalUser).toHaveBeenCalledWith('indices.existsTemplate', { + name: 'foo', + }); + }); + + test('should return true when call cluster returns true', async () => { + clusterClient.callAsInternalUser.mockResolvedValue(true); + await expect(clusterClientAdapter.doesIndexTemplateExist('foo')).resolves.toEqual(true); + }); + + test('should return false when call cluster returns false', async () => { + clusterClient.callAsInternalUser.mockResolvedValue(false); + await expect(clusterClientAdapter.doesIndexTemplateExist('foo')).resolves.toEqual(false); + }); + + test('should throw error when call cluster throws an error', async () => { + clusterClient.callAsInternalUser.mockRejectedValue(new Error('Fail')); + await expect( + clusterClientAdapter.doesIndexTemplateExist('foo') + ).rejects.toThrowErrorMatchingInlineSnapshot( + `"error checking existance of index template: Fail"` + ); + }); +}); + +describe('createIndexTemplate', () => { + test('should call cluster with given template', async () => { + await clusterClientAdapter.createIndexTemplate('foo', { args: true }); + expect(clusterClient.callAsInternalUser).toHaveBeenCalledWith('indices.putTemplate', { + name: 'foo', + create: true, + body: { args: true }, + }); + }); + + test(`should throw error if index template still doesn't exist after error is thrown`, async () => { + clusterClient.callAsInternalUser.mockRejectedValueOnce(new Error('Fail')); + clusterClient.callAsInternalUser.mockResolvedValueOnce(false); + await expect( + clusterClientAdapter.createIndexTemplate('foo', { args: true }) + ).rejects.toThrowErrorMatchingInlineSnapshot(`"error creating index template: Fail"`); + }); + + test('should not throw error if index template exists after error is thrown', async () => { + clusterClient.callAsInternalUser.mockRejectedValueOnce(new Error('Fail')); + clusterClient.callAsInternalUser.mockResolvedValueOnce(true); + await clusterClientAdapter.createIndexTemplate('foo', { args: true }); + }); +}); + +describe('doesAliasExist', () => { + test('should call cluster with proper arguments', async () => { + await clusterClientAdapter.doesAliasExist('foo'); + expect(clusterClient.callAsInternalUser).toHaveBeenCalledWith('indices.existsAlias', { + name: 'foo', + }); + }); + + test('should return true when call cluster returns true', async () => { + clusterClient.callAsInternalUser.mockResolvedValueOnce(true); + await expect(clusterClientAdapter.doesAliasExist('foo')).resolves.toEqual(true); + }); + + test('should return false when call cluster returns false', async () => { + clusterClient.callAsInternalUser.mockResolvedValueOnce(false); + await expect(clusterClientAdapter.doesAliasExist('foo')).resolves.toEqual(false); + }); + + test('should throw error when call cluster throws an error', async () => { + clusterClient.callAsInternalUser.mockRejectedValue(new Error('Fail')); + await expect( + clusterClientAdapter.doesAliasExist('foo') + ).rejects.toThrowErrorMatchingInlineSnapshot( + `"error checking existance of initial index: Fail"` + ); + }); +}); + +describe('createIndex', () => { + test('should call cluster with proper arguments', async () => { + await clusterClientAdapter.createIndex('foo'); + expect(clusterClient.callAsInternalUser).toHaveBeenCalledWith('indices.create', { + index: 'foo', + }); + }); + + test('should throw error when not getting an error of type resource_already_exists_exception', async () => { + clusterClient.callAsInternalUser.mockRejectedValue(new Error('Fail')); + await expect( + clusterClientAdapter.createIndex('foo') + ).rejects.toThrowErrorMatchingInlineSnapshot(`"error creating initial index: Fail"`); + }); + + test(`shouldn't throw when an error of type resource_already_exists_exception is thrown`, async () => { + const err = new Error('Already exists') as any; + err.body = { + error: { + type: 'resource_already_exists_exception', + }, + }; + clusterClient.callAsInternalUser.mockRejectedValue(err); + await clusterClientAdapter.createIndex('foo'); + }); +}); diff --git a/x-pack/plugins/event_log/server/es/cluster_client_adapter.ts b/x-pack/plugins/event_log/server/es/cluster_client_adapter.ts new file mode 100644 index 00000000000000..c74eeacc9bb19c --- /dev/null +++ b/x-pack/plugins/event_log/server/es/cluster_client_adapter.ts @@ -0,0 +1,126 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { Logger, ClusterClient } from '../../../../../src/core/server'; + +export type EsClusterClient = Pick; +export type IClusterClientAdapter = PublicMethodsOf; + +export interface ConstructorOpts { + logger: Logger; + clusterClient: EsClusterClient; +} + +export class ClusterClientAdapter { + private readonly logger: Logger; + private readonly clusterClient: EsClusterClient; + + constructor(opts: ConstructorOpts) { + this.logger = opts.logger; + this.clusterClient = opts.clusterClient; + } + + public async indexDocument(doc: any): Promise { + await this.callEs('index', doc); + } + + public async doesIlmPolicyExist(policyName: string): Promise { + const request = { + method: 'GET', + path: `_ilm/policy/${policyName}`, + }; + try { + await this.callEs('transport.request', request); + } catch (err) { + if (err.statusCode === 404) return false; + throw new Error(`error checking existance of ilm policy: ${err.message}`); + } + return true; + } + + public async createIlmPolicy(policyName: string, policy: any): Promise { + const request = { + method: 'PUT', + path: `_ilm/policy/${policyName}`, + body: policy, + }; + try { + await this.callEs('transport.request', request); + } catch (err) { + throw new Error(`error creating ilm policy: ${err.message}`); + } + } + + public async doesIndexTemplateExist(name: string): Promise { + let result; + try { + result = await this.callEs('indices.existsTemplate', { name }); + } catch (err) { + throw new Error(`error checking existance of index template: ${err.message}`); + } + return result as boolean; + } + + public async createIndexTemplate(name: string, template: any): Promise { + const addTemplateParams = { + name, + create: true, + body: template, + }; + try { + await this.callEs('indices.putTemplate', addTemplateParams); + } catch (err) { + // The error message doesn't have a type attribute we can look to guarantee it's due + // to the template already existing (only long message) so we'll check ourselves to see + // if the template now exists. This scenario would happen if you startup multiple Kibana + // instances at the same time. + const existsNow = await this.doesIndexTemplateExist(name); + if (!existsNow) { + throw new Error(`error creating index template: ${err.message}`); + } + } + } + + public async doesAliasExist(name: string): Promise { + let result; + try { + result = await this.callEs('indices.existsAlias', { name }); + } catch (err) { + throw new Error(`error checking existance of initial index: ${err.message}`); + } + return result as boolean; + } + + public async createIndex(name: string): Promise { + try { + await this.callEs('indices.create', { index: name }); + } catch (err) { + if (err.body?.error?.type !== 'resource_already_exists_exception') { + throw new Error(`error creating initial index: ${err.message}`); + } + } + } + + private async callEs(operation: string, body?: any): Promise { + try { + this.debug(`callEs(${operation}) calls:`, body); + const result = await this.clusterClient.callAsInternalUser(operation, body); + this.debug(`callEs(${operation}) result:`, result); + return result; + } catch (err) { + this.debug(`callEs(${operation}) error:`, { + message: err.message, + statusCode: err.statusCode, + }); + throw err; + } + } + + private debug(message: string, object?: any) { + const objectString = object == null ? '' : JSON.stringify(object); + this.logger.debug(`esContext: ${message} ${objectString}`); + } +} diff --git a/x-pack/plugins/event_log/server/es/context.mock.ts b/x-pack/plugins/event_log/server/es/context.mock.ts index fb894ce6e77875..6581cd689e43d9 100644 --- a/x-pack/plugins/event_log/server/es/context.mock.ts +++ b/x-pack/plugins/event_log/server/es/context.mock.ts @@ -4,43 +4,25 @@ * you may not use this file except in compliance with the Elastic License. */ -import { Logger, ClusterClient } from '../../../../../src/core/server'; import { EsContext } from './context'; - -import { EsNames } from './names'; - -export type EsClusterClient = Pick; - -export interface EsError { - readonly statusCode: number; - readonly message: string; -} - -interface CreateMockEsContextParams { - logger: Logger; - esNames: EsNames; -} - -export function createMockEsContext(params: CreateMockEsContextParams): EsContext { - return new EsContextMock(params); -} - -class EsContextMock implements EsContext { - public logger: Logger; - public esNames: EsNames; - - constructor(params: CreateMockEsContextParams) { - this.logger = params.logger; - this.esNames = params.esNames; - } - - initialize() {} - - async waitTillReady(): Promise { - return true; - } - - async callEs(operation: string, body?: any): Promise { - return {}; - } -} +import { namesMock } from './names.mock'; +import { IClusterClientAdapter } from './cluster_client_adapter'; +import { loggingServiceMock } from '../../../../../src/core/server/mocks'; +import { clusterClientAdapterMock } from './cluster_client_adapter.mock'; + +const createContextMock = () => { + const mock: jest.Mocked & { + esAdapter: jest.Mocked; + } = { + logger: loggingServiceMock.createLogger(), + esNames: namesMock.create(), + initialize: jest.fn(), + waitTillReady: jest.fn(), + esAdapter: clusterClientAdapterMock.create(), + }; + return mock; +}; + +export const contextMock = { + create: createContextMock, +}; diff --git a/x-pack/plugins/event_log/server/es/context.ts b/x-pack/plugins/event_log/server/es/context.ts index b93c1892d02064..144f44ac8e5ea9 100644 --- a/x-pack/plugins/event_log/server/es/context.ts +++ b/x-pack/plugins/event_log/server/es/context.ts @@ -8,6 +8,7 @@ import { Logger, ClusterClient } from 'src/core/server'; import { EsNames, getEsNames } from './names'; import { initializeEs } from './init'; +import { ClusterClientAdapter, IClusterClientAdapter } from './cluster_client_adapter'; import { createReadySignal, ReadySignal } from '../lib/ready_signal'; export type EsClusterClient = Pick; @@ -15,9 +16,9 @@ export type EsClusterClient = Pick; - callEs(operation: string, body?: any): Promise; } export interface EsError { @@ -38,16 +39,19 @@ export interface EsContextCtorParams { class EsContextImpl implements EsContext { public readonly logger: Logger; public readonly esNames: EsNames; - private readonly clusterClient: EsClusterClient; + public esAdapter: IClusterClientAdapter; private readonly readySignal: ReadySignal; private initialized: boolean; constructor(params: EsContextCtorParams) { this.logger = params.logger; this.esNames = getEsNames(params.indexNameRoot); - this.clusterClient = params.clusterClient; this.readySignal = createReadySignal(); this.initialized = false; + this.esAdapter = new ClusterClientAdapter({ + logger: params.logger, + clusterClient: params.clusterClient, + }); } initialize() { @@ -73,27 +77,7 @@ class EsContextImpl implements EsContext { return await this.readySignal.wait(); } - async callEs(operation: string, body?: any): Promise { - try { - this.debug(`callEs(${operation}) calls:`, body); - const result = await this.clusterClient.callAsInternalUser(operation, body); - this.debug(`callEs(${operation}) result:`, result); - return result; - } catch (err) { - this.debug(`callEs(${operation}) error:`, { - message: err.message, - statusCode: err.statusCode, - }); - throw err; - } - } - private async _initialize() { await initializeEs(this); } - - private debug(message: string, object?: any) { - const objectString = object == null ? '' : JSON.stringify(object); - this.logger.debug(`esContext: ${message} ${objectString}`); - } } diff --git a/x-pack/plugins/event_log/server/es/documents.test.ts b/x-pack/plugins/event_log/server/es/documents.test.ts index 2dec23c61de2f8..7edca4b3943a65 100644 --- a/x-pack/plugins/event_log/server/es/documents.test.ts +++ b/x-pack/plugins/event_log/server/es/documents.test.ts @@ -21,23 +21,13 @@ describe('getIndexTemplate()', () => { const esNames = getEsNames('XYZ'); test('returns the correct details of the index template', () => { - const indexTemplate = getIndexTemplate(esNames, true); + const indexTemplate = getIndexTemplate(esNames); expect(indexTemplate.index_patterns).toEqual([esNames.indexPattern]); expect(indexTemplate.aliases[esNames.alias]).toEqual({}); expect(indexTemplate.settings.number_of_shards).toBeGreaterThanOrEqual(0); expect(indexTemplate.settings.number_of_replicas).toBeGreaterThanOrEqual(0); - expect(indexTemplate.mappings).toMatchObject({}); - }); - - test('returns correct index template bits for ilm when ilm is supported', () => { - const indexTemplate = getIndexTemplate(esNames, true); expect(indexTemplate.settings['index.lifecycle.name']).toBe(esNames.ilmPolicy); expect(indexTemplate.settings['index.lifecycle.rollover_alias']).toBe(esNames.alias); - }); - - test('returns correct index template bits for ilm when ilm is not supported', () => { - const indexTemplate = getIndexTemplate(esNames, false); - expect(indexTemplate.settings['index.lifecycle.name']).toBeUndefined(); - expect(indexTemplate.settings['index.lifecycle.rollover_alias']).toBeUndefined(); + expect(indexTemplate.mappings).toMatchObject({}); }); }); diff --git a/x-pack/plugins/event_log/server/es/documents.ts b/x-pack/plugins/event_log/server/es/documents.ts index dfc544f8a41cbf..09dd7383c4c5e3 100644 --- a/x-pack/plugins/event_log/server/es/documents.ts +++ b/x-pack/plugins/event_log/server/es/documents.ts @@ -8,7 +8,7 @@ import { EsNames } from './names'; import mappings from '../../generated/mappings.json'; // returns the body of an index template used in an ES indices.putTemplate call -export function getIndexTemplate(esNames: EsNames, ilmExists: boolean) { +export function getIndexTemplate(esNames: EsNames) { const indexTemplateBody: any = { index_patterns: [esNames.indexPattern], aliases: { @@ -23,11 +23,6 @@ export function getIndexTemplate(esNames: EsNames, ilmExists: boolean) { mappings, }; - if (!ilmExists) { - delete indexTemplateBody.settings['index.lifecycle.name']; - delete indexTemplateBody.settings['index.lifecycle.rollover_alias']; - } - return indexTemplateBody; } diff --git a/x-pack/plugins/event_log/server/es/init.test.ts b/x-pack/plugins/event_log/server/es/init.test.ts new file mode 100644 index 00000000000000..ad237e522c0a53 --- /dev/null +++ b/x-pack/plugins/event_log/server/es/init.test.ts @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { contextMock } from './context.mock'; +import { initializeEs } from './init'; + +describe('initializeEs', () => { + let esContext = contextMock.create(); + + beforeEach(() => { + esContext = contextMock.create(); + }); + + test(`should create ILM policy if it doesn't exist`, async () => { + esContext.esAdapter.doesIlmPolicyExist.mockResolvedValue(false); + + await initializeEs(esContext); + expect(esContext.esAdapter.doesIlmPolicyExist).toHaveBeenCalled(); + expect(esContext.esAdapter.createIlmPolicy).toHaveBeenCalled(); + }); + + test(`shouldn't create ILM policy if it exists`, async () => { + esContext.esAdapter.doesIlmPolicyExist.mockResolvedValue(true); + + await initializeEs(esContext); + expect(esContext.esAdapter.doesIlmPolicyExist).toHaveBeenCalled(); + expect(esContext.esAdapter.createIlmPolicy).not.toHaveBeenCalled(); + }); + + test(`should create index template if it doesn't exist`, async () => { + esContext.esAdapter.doesIndexTemplateExist.mockResolvedValue(false); + + await initializeEs(esContext); + expect(esContext.esAdapter.doesIndexTemplateExist).toHaveBeenCalled(); + expect(esContext.esAdapter.createIndexTemplate).toHaveBeenCalled(); + }); + + test(`shouldn't create index template if it already exists`, async () => { + esContext.esAdapter.doesIndexTemplateExist.mockResolvedValue(true); + + await initializeEs(esContext); + expect(esContext.esAdapter.doesIndexTemplateExist).toHaveBeenCalled(); + expect(esContext.esAdapter.createIndexTemplate).not.toHaveBeenCalled(); + }); + + test(`should create initial index if it doesn't exist`, async () => { + esContext.esAdapter.doesAliasExist.mockResolvedValue(false); + + await initializeEs(esContext); + expect(esContext.esAdapter.doesAliasExist).toHaveBeenCalled(); + expect(esContext.esAdapter.createIndex).toHaveBeenCalled(); + }); + + test(`shouldn't create initial index if it already exists`, async () => { + esContext.esAdapter.doesAliasExist.mockResolvedValue(true); + + await initializeEs(esContext); + expect(esContext.esAdapter.doesAliasExist).toHaveBeenCalled(); + expect(esContext.esAdapter.createIndex).not.toHaveBeenCalled(); + }); +}); diff --git a/x-pack/plugins/event_log/server/es/init.ts b/x-pack/plugins/event_log/server/es/init.ts index d87f5bce034757..7094277f7aa9fd 100644 --- a/x-pack/plugins/event_log/server/es/init.ts +++ b/x-pack/plugins/event_log/server/es/init.ts @@ -23,25 +23,10 @@ export async function initializeEs(esContext: EsContext): Promise { async function initializeEsResources(esContext: EsContext) { const steps = new EsInitializationSteps(esContext); - let ilmExists: boolean; - // create the ilm policy, if required - ilmExists = await steps.doesIlmPolicyExist(); - if (!ilmExists) { - ilmExists = await steps.createIlmPolicy(); - } - - if (!(await steps.doesIndexTemplateExist())) { - await steps.createIndexTemplate({ ilmExists }); - } - - if (!(await steps.doesInitialIndexExist())) { - await steps.createInitialIndex(); - } -} - -interface AddTemplateOpts { - ilmExists: boolean; + await steps.createIlmPolicyIfNotExists(); + await steps.createIndexTemplateIfNotExists(); + await steps.createInitialIndexIfNotExists(); } class EsInitializationSteps { @@ -49,89 +34,35 @@ class EsInitializationSteps { this.esContext = esContext; } - async doesIlmPolicyExist(): Promise { - const request = { - method: 'GET', - path: `_ilm/policy/${this.esContext.esNames.ilmPolicy}`, - }; - try { - await this.esContext.callEs('transport.request', request); - } catch (err) { - if (err.statusCode === 404) return false; - // TODO: remove following once kibana user can access ilm - if (err.statusCode === 403) return false; - - throw new Error(`error checking existance of ilm policy: ${err.message}`); - } - return true; - } - - async createIlmPolicy(): Promise { - const request = { - method: 'PUT', - path: `_ilm/policy/${this.esContext.esNames.ilmPolicy}`, - body: getIlmPolicy(), - }; - try { - await this.esContext.callEs('transport.request', request); - } catch (err) { - // TODO: remove following once kibana user can access ilm - if (err.statusCode === 403) return false; - throw new Error(`error creating ilm policy: ${err.message}`); + async createIlmPolicyIfNotExists(): Promise { + const exists = await this.esContext.esAdapter.doesIlmPolicyExist( + this.esContext.esNames.ilmPolicy + ); + if (!exists) { + await this.esContext.esAdapter.createIlmPolicy( + this.esContext.esNames.ilmPolicy, + getIlmPolicy() + ); } - return true; } - async doesIndexTemplateExist(): Promise { - const name = this.esContext.esNames.indexTemplate; - let result; - try { - result = await this.esContext.callEs('indices.existsTemplate', { name }); - } catch (err) { - throw new Error(`error checking existance of index template: ${err.message}`); + async createIndexTemplateIfNotExists(): Promise { + const exists = await this.esContext.esAdapter.doesIndexTemplateExist( + this.esContext.esNames.indexTemplate + ); + if (!exists) { + const templateBody = getIndexTemplate(this.esContext.esNames); + await this.esContext.esAdapter.createIndexTemplate( + this.esContext.esNames.indexTemplate, + templateBody + ); } - return result as boolean; } - async createIndexTemplate(opts: AddTemplateOpts): Promise { - const templateBody = getIndexTemplate(this.esContext.esNames, opts.ilmExists); - const addTemplateParams = { - create: true, - name: this.esContext.esNames.indexTemplate, - body: templateBody, - }; - try { - await this.esContext.callEs('indices.putTemplate', addTemplateParams); - } catch (err) { - throw new Error(`error creating index template: ${err.message}`); + async createInitialIndexIfNotExists(): Promise { + const exists = await this.esContext.esAdapter.doesAliasExist(this.esContext.esNames.alias); + if (!exists) { + await this.esContext.esAdapter.createIndex(this.esContext.esNames.initialIndex); } } - - async doesInitialIndexExist(): Promise { - const name = this.esContext.esNames.alias; - let result; - try { - result = await this.esContext.callEs('indices.existsAlias', { name }); - } catch (err) { - throw new Error(`error checking existance of initial index: ${err.message}`); - } - return result as boolean; - } - - async createInitialIndex(): Promise { - const index = this.esContext.esNames.initialIndex; - try { - await this.esContext.callEs('indices.create', { index }); - } catch (err) { - throw new Error(`error creating initial index: ${err.message}`); - } - } - - debug(message: string) { - this.esContext.logger.debug(message); - } - - warn(message: string) { - this.esContext.logger.warn(message); - } } diff --git a/x-pack/plugins/event_log/server/es/names.mock.ts b/x-pack/plugins/event_log/server/es/names.mock.ts new file mode 100644 index 00000000000000..7b013a0d263da8 --- /dev/null +++ b/x-pack/plugins/event_log/server/es/names.mock.ts @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { EsNames } from './names'; + +const createNamesMock = () => { + const mock: jest.Mocked = { + base: '.kibana', + alias: '.kibana-event-log', + ilmPolicy: '.kibana-event-log-policy', + indexPattern: '.kibana-event-log-*', + initialIndex: '.kibana-event-log-000001', + indexTemplate: '.kibana-event-log-template', + }; + return mock; +}; + +export const namesMock = { + create: createNamesMock, +}; diff --git a/x-pack/plugins/event_log/server/event_log_service.test.ts b/x-pack/plugins/event_log/server/event_log_service.test.ts index c7e752d1a652bc..3b250b74620097 100644 --- a/x-pack/plugins/event_log/server/event_log_service.test.ts +++ b/x-pack/plugins/event_log/server/event_log_service.test.ts @@ -6,18 +6,14 @@ import { IEventLogConfig } from './types'; import { EventLogService } from './event_log_service'; -import { getEsNames } from './es/names'; -import { createMockEsContext } from './es/context.mock'; +import { contextMock } from './es/context.mock'; import { loggingServiceMock } from '../../../../src/core/server/logging/logging_service.mock'; const loggingService = loggingServiceMock.create(); const systemLogger = loggingService.get(); describe('EventLogService', () => { - const esContext = createMockEsContext({ - esNames: getEsNames('ABC'), - logger: systemLogger, - }); + const esContext = contextMock.create(); function getService(config: IEventLogConfig) { const { enabled, logEntries, indexEntries } = config; diff --git a/x-pack/plugins/event_log/server/event_logger.test.ts b/x-pack/plugins/event_log/server/event_logger.test.ts index c2de8d4dfd12bd..673bac4f396e1b 100644 --- a/x-pack/plugins/event_log/server/event_logger.test.ts +++ b/x-pack/plugins/event_log/server/event_logger.test.ts @@ -7,9 +7,8 @@ import { IEvent, IEventLogger, IEventLogService } from './index'; import { ECS_VERSION } from './types'; import { EventLogService } from './event_log_service'; -import { getEsNames } from './es/names'; import { EsContext } from './es/context'; -import { createMockEsContext } from './es/context.mock'; +import { contextMock } from './es/context.mock'; import { loggerMock, MockedLogger } from '../../../../src/core/server/logging/logger.mock'; import { delay } from './lib/delay'; import { EVENT_LOGGED_PREFIX } from './event_logger'; @@ -24,7 +23,7 @@ describe('EventLogger', () => { beforeEach(() => { systemLogger = loggerMock.create(); - esContext = createMockEsContext({ esNames: getEsNames('ABC'), logger: systemLogger }); + esContext = contextMock.create(); service = new EventLogService({ esContext, systemLogger, @@ -57,8 +56,6 @@ describe('EventLogger', () => { kibana: { server_uuid: '424-24-2424', }, - error: {}, - user: {}, }); const $timeStamp = event!['@timestamp']!; diff --git a/x-pack/plugins/event_log/server/event_logger.ts b/x-pack/plugins/event_log/server/event_logger.ts index 891abda947fc8a..f5149da069953c 100644 --- a/x-pack/plugins/event_log/server/event_logger.ts +++ b/x-pack/plugins/event_log/server/event_logger.ts @@ -171,7 +171,7 @@ function indexEventDoc(esContext: EsContext, doc: Doc): void { async function indexLogEventDoc(esContext: EsContext, doc: any) { esContext.logger.debug(`writing to event log: ${JSON.stringify(doc)}`); await esContext.waitTillReady(); - await esContext.callEs('index', doc); + await esContext.esAdapter.indexDocument(doc); esContext.logger.debug(`writing to event log complete`); } diff --git a/x-pack/plugins/security/public/management/roles/edit_role/privileges/es/__snapshots__/elasticsearch_privileges.test.tsx.snap b/x-pack/plugins/security/public/management/roles/edit_role/privileges/es/__snapshots__/elasticsearch_privileges.test.tsx.snap index 323629de7578d7..2a00c7ca5c347c 100644 --- a/x-pack/plugins/security/public/management/roles/edit_role/privileges/es/__snapshots__/elasticsearch_privileges.test.tsx.snap +++ b/x-pack/plugins/security/public/management/roles/edit_role/privileges/es/__snapshots__/elasticsearch_privileges.test.tsx.snap @@ -26,8 +26,6 @@ exports[`it renders without crashing 1`] = `

} - fullWidth={false} - gutterSize="l" title={

} - titleSize="xs" >

} - fullWidth={false} - gutterSize="l" title={

} - titleSize="xs" > renders without crashing 1`] = ` />

} - fullWidth={false} - gutterSize="l" title={

renders without crashing 1`] = ` />

} - titleSize="xs" > {i18n.translate( @@ -323,7 +322,6 @@ export const JsonWatchEditSimulate = ({ } > {i18n.translate( @@ -361,7 +358,6 @@ export const JsonWatchEditSimulate = ({ > { + await pageObjects.common.navigateToUrlWithBrowserHistory('endpoint', '/policy'); + }); + + it('loads the Policy List Page', async () => { + await testSubjects.existOrFail('policyListPage'); + }); + it('displays page title', async () => { + const policyTitle = await testSubjects.getVisibleText('policyViewTitle'); + expect(policyTitle).to.equal('Policies'); + }); + it('shows policy count total', async () => { + const policyTotal = await testSubjects.getVisibleText('policyTotalCount'); + expect(policyTotal).to.equal('0 Policies'); + }); + it('includes policy list table', async () => { + await testSubjects.existOrFail('policyTable'); + }); + it('has correct table headers', async () => { + const allHeaderCells = await pageObjects.endpoint.tableHeaderVisibleText('policyTable'); + expect(allHeaderCells).to.eql([ + 'Policy Name', + 'Total', + 'Pending', + 'Failed', + 'Created By', + 'Created', + 'Last Updated By', + 'Last Updated', + ]); + }); + }); +} diff --git a/x-pack/test/functional/page_objects/endpoint_page.ts b/x-pack/test/functional/page_objects/endpoint_page.ts index a306a855a83eb3..54f537dd0e8c37 100644 --- a/x-pack/test/functional/page_objects/endpoint_page.ts +++ b/x-pack/test/functional/page_objects/endpoint_page.ts @@ -11,6 +11,25 @@ export function EndpointPageProvider({ getService }: FtrProviderContext) { const table = getService('table'); return { + /** + * Finds the Table with the given `selector` (test subject) and returns + * back an array containing the table's header column text + * + * @param selector + * @returns Promise + */ + async tableHeaderVisibleText(selector: string) { + const $ = await (await testSubjects.find('policyTable')).parseDomContent(); + return $('thead tr th') + .toArray() + .map(th => + $(th) + .text() + .replace(/ /g, '') + .trim() + ); + }, + async welcomeEndpointTitle() { return await testSubjects.getVisibleText('welcomeTitle'); }, diff --git a/x-pack/typings/@elastic/eui/index.d.ts b/x-pack/typings/@elastic/eui/index.d.ts index de9697f859fd79..688d1a2fa127d5 100644 --- a/x-pack/typings/@elastic/eui/index.d.ts +++ b/x-pack/typings/@elastic/eui/index.d.ts @@ -7,7 +7,6 @@ // TODO: Remove once typescript definitions are in EUI declare module '@elastic/eui' { - export const EuiDescribedFormGroup: React.FC; export const EuiCodeEditor: React.FC; export const Query: any; } diff --git a/yarn.lock b/yarn.lock index 5e439f074da279..33cb366e72f002 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1955,15 +1955,17 @@ tabbable "^1.1.0" uuid "^3.1.0" -"@elastic/eui@18.3.0": - version "18.3.0" - resolved "https://registry.yarnpkg.com/@elastic/eui/-/eui-18.3.0.tgz#e21c6246624f694e2ae1c7c1f1a11b612faf260a" - integrity sha512-Rkj1rTtDa6iZMUF7pxYRojku1sLXzTU0FK1D9i0XE3H//exy3VyTV6qUlbdkiKXjO7emrgQqfzKDeXT+ZYztgg== +"@elastic/eui@19.0.0": + version "19.0.0" + resolved "https://registry.yarnpkg.com/@elastic/eui/-/eui-19.0.0.tgz#cf7d644945c95997d442585cf614e853f173746e" + integrity sha512-8/USz56MYhu6bV4oecJct7tsdi0ktErOIFLobNmQIKdxDOni/KpttX6IHqxM7OuIWi1AEMXoIozw68+oyL/uKQ== dependencies: "@types/chroma-js" "^1.4.3" + "@types/enzyme" "^3.1.13" "@types/lodash" "^4.14.116" "@types/numeral" "^0.0.25" "@types/react-beautiful-dnd" "^10.1.0" + "@types/react-virtualized" "^9.18.7" chroma-js "^2.0.4" classnames "^2.2.5" highlight.js "^9.12.0" @@ -4389,10 +4391,10 @@ resolved "https://registry.yarnpkg.com/@types/elasticsearch/-/elasticsearch-5.0.33.tgz#b0fd37dc674f498223b6d68c313bdfd71f4d812b" integrity sha512-n/g9pqJEpE4fyUE8VvHNGtl7E2Wv8TCroNwfgAeJKRV4ghDENahtrAo1KMsFNIejBD2gDAlEUa4CM4oEEd8p9Q== -"@types/enzyme@^3.9.0": - version "3.9.3" - resolved "https://registry.yarnpkg.com/@types/enzyme/-/enzyme-3.9.3.tgz#d1029c0edd353d7b00f3924803eb88216460beed" - integrity sha512-jDKoZiiMA3lGO3skSO7dfqEHNvmiTLLV+PHD9EBQVlJANJvpY6qq1zzjRI24ZOtG7F+CS7BVWDXKewRmN8PjHQ== +"@types/enzyme@^3.1.13", "@types/enzyme@^3.9.0": + version "3.10.5" + resolved "https://registry.yarnpkg.com/@types/enzyme/-/enzyme-3.10.5.tgz#fe7eeba3550369eed20e7fb565bfb74eec44f1f0" + integrity sha512-R+phe509UuUYy9Tk0YlSbipRpfVtIzb/9BHn5pTEtjJTF5LXvUjrIQcZvNyANNEyFrd2YGs196PniNT1fgvOQA== dependencies: "@types/cheerio" "*" "@types/react" "*"