From 79193727bd2171c30165ce9648ce33085b5ac0f8 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Wed, 4 Jul 2018 15:54:22 +0300 Subject: [PATCH 01/19] SQL: Update CLI logo --- .../sql/sql-cli/src/main/resources/logo.txt | 26 +------------------ 1 file changed, 1 insertion(+), 25 deletions(-) diff --git a/x-pack/plugin/sql/sql-cli/src/main/resources/logo.txt b/x-pack/plugin/sql/sql-cli/src/main/resources/logo.txt index 0229fa9b335b7..2880a1d41534d 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/resources/logo.txt +++ b/x-pack/plugin/sql/sql-cli/src/main/resources/logo.txt @@ -1,25 +1 @@ - .sssssss.` .sssssss. - .:sXXXXXXXXXXo` `ohXXXXXXXXXho. - .yXXXXXXXXXXXXXXo` `oXXXXXXXXXXXXXXX- -.XXXXXXXXXXXXXXXXXXo` `oXXXXXXXXXXXXXXXXXX. -.XXXXXXXXXXXXXXXXXXXXo. .oXXXXXXXXXXXXXXXXXXXXh -.XXXXXXXXXXXXXXXXXXXXXXo``oXXXXXXXXXXXXXXXXXXXXXXy -`yXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX. - `oXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXo` - `oXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXo` - `oXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXo` - `oXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXo` - `oXXXXXXXXXXXXXXXXXXXXXXXXXXXXo` - .XXXXXXXXXXXXXXXXXXXXXXXXXo` - .oXXXXXXXXXXXXXXXXXXXXXXXXo` - `oXXXXXXXXXXXXXXXXXXXXXXXXo` `odo` - `oXXXXXXXXXXXXXXXXXXXXXXXXo` `oXXXXXo` - `oXXXXXXXXXXXXXXXXXXXXXXXXo` `oXXXXXXXXXo` - `oXXXXXXXXXXXXXXXXXXXXXXXXo` `oXXXXXXXXXXXXXo` -`yXXXXXXXXXXXXXXXXXXXXXXXo` oXXXXXXXXXXXXXXXXX. -.XXXXXXXXXXXXXXXXXXXXXXo` `oXXXXXXXXXXXXXXXXXXXy -.XXXXXXXXXXXXXXXXXXXXo` /XXXXXXXXXXXXXXXXXXXXX -.XXXXXXXXXXXXXXXXXXo` `oXXXXXXXXXXXXXXXXXX- - -XXXXXXXXXXXXXXXo` `oXXXXXXXXXXXXXXXo` - .oXXXXXXXXXXXo` `oXXXXXXXXXXXo. - `.sshXXyso` SQL `.sshXhss.` \ No newline at end of file + Elasticsearch SQL \ No newline at end of file From 4328470dd80468cb411cce1bd84e8e8f27ab8295 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Wed, 4 Jul 2018 15:46:59 +0200 Subject: [PATCH 02/19] Watcher: Ensure correct method is used to read secure settings (#31753) As SecureSetting is extended from Setting, you can easily accidentally use `SecureSetting.simpleString()` to read a secure setting instead of `SecureSetting.secureString()`. This commit changes this behaviour in some watcher notification services. --- .../notification/hipchat/HipChatService.java | 6 +++--- .../watcher/notification/jira/JiraService.java | 14 ++++++++------ .../notification/pagerduty/PagerDutyService.java | 5 +++-- .../watcher/notification/slack/SlackService.java | 5 +++-- 4 files changed, 17 insertions(+), 13 deletions(-) diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java index ebbcdd5662c20..ca970d5597ba1 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java @@ -7,6 +7,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; @@ -35,10 +36,9 @@ public class HipChatService extends NotificationService { (key) -> Setting.simpleString(key, Setting.Property.Dynamic, Setting.Property.NodeScope, Setting.Property.Filtered, Setting.Property.Deprecated)); - private static final Setting.AffixSetting SETTING_AUTH_TOKEN_SECURE = + private static final Setting.AffixSetting SETTING_AUTH_TOKEN_SECURE = Setting.affixKeySetting("xpack.notification.hipchat.account.", "secure_auth_token", - (key) -> SecureSetting.simpleString(key, Setting.Property.Dynamic, Setting.Property.NodeScope, - Setting.Property.Filtered)); + (key) -> SecureSetting.secureString(key, null)); private static final Setting.AffixSetting SETTING_PROFILE = Setting.affixKeySetting("xpack.notification.hipchat.account.", "profile", diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java index ad9652ae2083d..3ccff775051a4 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java @@ -6,6 +6,8 @@ package org.elasticsearch.xpack.watcher.notification.jira; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -41,17 +43,17 @@ public class JiraService extends NotificationService { Setting.affixKeySetting("xpack.notification.jira.account.", "password", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered, Property.Deprecated)); - private static final Setting.AffixSetting SETTING_SECURE_USER = + private static final Setting.AffixSetting SETTING_SECURE_USER = Setting.affixKeySetting("xpack.notification.jira.account.", "secure_user", - (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered)); + (key) -> SecureSetting.secureString(key, null)); - private static final Setting.AffixSetting SETTING_SECURE_URL = + private static final Setting.AffixSetting SETTING_SECURE_URL = Setting.affixKeySetting("xpack.notification.jira.account.", "secure_url", - (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered)); + (key) -> SecureSetting.secureString(key, null)); - private static final Setting.AffixSetting SETTING_SECURE_PASSWORD = + private static final Setting.AffixSetting SETTING_SECURE_PASSWORD = Setting.affixKeySetting("xpack.notification.jira.account.", "secure_password", - (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered)); + (key) -> SecureSetting.secureString(key, null)); private static final Setting.AffixSetting SETTING_DEFAULTS = Setting.affixKeySetting("xpack.notification.jira.account.", "issue_defaults", diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java index 51be23d5b63ea..21c2f1fefb1a9 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java @@ -7,6 +7,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -28,9 +29,9 @@ public class PagerDutyService extends NotificationService { Setting.affixKeySetting("xpack.notification.pagerduty.account.", "service_api_key", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered, Property.Deprecated)); - private static final Setting.AffixSetting SETTING_SECURE_SERVICE_API_KEY = + private static final Setting.AffixSetting SETTING_SECURE_SERVICE_API_KEY = Setting.affixKeySetting("xpack.notification.pagerduty.account.", "secure_service_api_key", - (key) -> SecureSetting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered)); + (key) -> SecureSetting.secureString(key, null)); private static final Setting.AffixSetting SETTING_DEFAULTS = Setting.affixKeySetting("xpack.notification.pagerduty.account.", "event_defaults", diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java index c784be1d4f021..d648501a5f8d6 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java @@ -7,6 +7,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -28,9 +29,9 @@ public class SlackService extends NotificationService { Setting.affixKeySetting("xpack.notification.slack.account.", "url", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered, Property.Deprecated)); - private static final Setting.AffixSetting SETTING_URL_SECURE = + private static final Setting.AffixSetting SETTING_URL_SECURE = Setting.affixKeySetting("xpack.notification.slack.account.", "secure_url", - (key) -> SecureSetting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered)); + (key) -> SecureSetting.secureString(key, null)); private static final Setting.AffixSetting SETTING_DEFAULTS = Setting.affixKeySetting("xpack.notification.slack.account.", "message_defaults", From 0a2ef59c41330bed98af1625c80f8ff1e03b3541 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Wed, 4 Jul 2018 17:55:53 +0200 Subject: [PATCH 03/19] Watcher: Fix check for currently executed watches (#31137) The ack watch action has a check for currently executed watches, to make sure that currently running watches cannot be acknowledged. This check only checked on the coordinating node for watches being executed, but should have checked the whole cluster using a WatcherStatsRequest, which is being switched to in this commit. --- .../actions/ack/TransportAckWatchAction.java | 111 +++++++++--------- .../ack/TransportAckWatchActionTests.java | 37 ++++-- 2 files changed, 86 insertions(+), 62 deletions(-) diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java index 8c056d0dcb8be..98c98ca1b537b 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java @@ -25,13 +25,13 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.watcher.actions.ActionWrapper; -import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionSnapshot; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchAction; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchResponse; +import org.elasticsearch.xpack.core.watcher.transport.actions.stats.WatcherStatsAction; +import org.elasticsearch.xpack.core.watcher.transport.actions.stats.WatcherStatsRequest; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.core.watcher.watch.WatchField; -import org.elasticsearch.xpack.watcher.execution.ExecutionService; import org.elasticsearch.xpack.watcher.transport.actions.WatcherTransportAction; import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.joda.time.DateTime; @@ -49,83 +49,86 @@ public class TransportAckWatchAction extends WatcherTransportAction listener) { - // if the watch to be acked is running currently, reject this request - List snapshots = executionService.currentExecutions(); - boolean isWatchRunning = snapshots.stream().anyMatch(s -> s.watchId().equals(request.getWatchId())); - if (isWatchRunning) { - listener.onFailure(new ElasticsearchStatusException("watch[{}] is running currently, cannot ack until finished", + WatcherStatsRequest watcherStatsRequest = new WatcherStatsRequest(); + watcherStatsRequest.includeCurrentWatches(true); + + executeAsyncWithOrigin(client, WATCHER_ORIGIN, WatcherStatsAction.INSTANCE, watcherStatsRequest, ActionListener.wrap(response -> { + boolean isWatchRunning = response.getNodes().stream() + .anyMatch(node -> node.getSnapshots().stream().anyMatch(snapshot -> snapshot.watchId().equals(request.getWatchId()))); + if (isWatchRunning) { + listener.onFailure(new ElasticsearchStatusException("watch[{}] is running currently, cannot ack until finished", RestStatus.CONFLICT, request.getWatchId())); - return; - } - - GetRequest getRequest = new GetRequest(Watch.INDEX, Watch.DOC_TYPE, request.getWatchId()) - .preference(Preference.LOCAL.type()).realtime(true); - - executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, getRequest, - ActionListener.wrap((response) -> { - if (response.isExists() == false) { - listener.onFailure(new ResourceNotFoundException("Watch with id [{}] does not exist", request.getWatchId())); - } else { - DateTime now = new DateTime(clock.millis(), UTC); - Watch watch = parser.parseWithSecrets(request.getWatchId(), true, response.getSourceAsBytesRef(), + } else { + GetRequest getRequest = new GetRequest(Watch.INDEX, Watch.DOC_TYPE, request.getWatchId()) + .preference(Preference.LOCAL.type()).realtime(true); + + executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, getRequest, + ActionListener.wrap(getResponse -> { + if (getResponse.isExists() == false) { + listener.onFailure(new ResourceNotFoundException("Watch with id [{}] does not exist", request.getWatchId())); + } else { + DateTime now = new DateTime(clock.millis(), UTC); + Watch watch = parser.parseWithSecrets(request.getWatchId(), true, getResponse.getSourceAsBytesRef(), now, XContentType.JSON); - watch.version(response.getVersion()); - watch.status().version(response.getVersion()); - String[] actionIds = request.getActionIds(); - if (actionIds == null || actionIds.length == 0) { - actionIds = new String[]{WatchField.ALL_ACTIONS_ID}; - } + watch.version(getResponse.getVersion()); + watch.status().version(getResponse.getVersion()); + String[] actionIds = request.getActionIds(); + if (actionIds == null || actionIds.length == 0) { + actionIds = new String[]{WatchField.ALL_ACTIONS_ID}; + } - // exit early in case nothing changes - boolean isChanged = watch.ack(now, actionIds); - if (isChanged == false) { - listener.onResponse(new AckWatchResponse(watch.status())); - return; - } + // exit early in case nothing changes + boolean isChanged = watch.ack(now, actionIds); + if (isChanged == false) { + listener.onResponse(new AckWatchResponse(watch.status())); + return; + } - UpdateRequest updateRequest = new UpdateRequest(Watch.INDEX, Watch.DOC_TYPE, request.getWatchId()); - // this may reject this action, but prevents concurrent updates from a watch execution - updateRequest.version(response.getVersion()); - updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - XContentBuilder builder = jsonBuilder(); - builder.startObject() + UpdateRequest updateRequest = new UpdateRequest(Watch.INDEX, Watch.DOC_TYPE, request.getWatchId()); + // this may reject this action, but prevents concurrent updates from a watch execution + updateRequest.version(getResponse.getVersion()); + updateRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + XContentBuilder builder = jsonBuilder(); + builder.startObject() .startObject(WatchField.STATUS.getPreferredName()) .startObject("actions"); - List actionIdsAsList = Arrays.asList(actionIds); - boolean updateAll = actionIdsAsList.contains("_all"); - for (ActionWrapper actionWrapper : watch.actions()) { - if (updateAll || actionIdsAsList.contains(actionWrapper.id())) { - builder.startObject(actionWrapper.id()) + List actionIdsAsList = Arrays.asList(actionIds); + boolean updateAll = actionIdsAsList.contains("_all"); + for (ActionWrapper actionWrapper : watch.actions()) { + if (updateAll || actionIdsAsList.contains(actionWrapper.id())) { + builder.startObject(actionWrapper.id()) .field("ack", watch.status().actionStatus(actionWrapper.id()).ackStatus(), ToXContent.EMPTY_PARAMS) .endObject(); + } } - } - builder.endObject().endObject().endObject(); - updateRequest.doc(builder); + builder.endObject().endObject().endObject(); + updateRequest.doc(builder); - executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, updateRequest, + executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, updateRequest, ActionListener.wrap( - (updateResponse) -> listener.onResponse(new AckWatchResponse(watch.status())), - listener::onFailure), client::update); - } - }, listener::onFailure), client::get); + (updateResponse) -> listener.onResponse(new AckWatchResponse(watch.status())), + listener::onFailure), client::update); + } + }, listener::onFailure), client::get); + + } + + }, listener::onFailure)); } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java index ea4d70b95c2e3..0121d07616065 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java @@ -6,11 +6,15 @@ package org.elasticsearch.xpack.watcher.transport.actions.ack; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -20,11 +24,13 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.watcher.WatcherMetaData; import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionSnapshot; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.ack.AckWatchResponse; +import org.elasticsearch.xpack.core.watcher.transport.actions.stats.WatcherStatsAction; +import org.elasticsearch.xpack.core.watcher.transport.actions.stats.WatcherStatsResponse; import org.elasticsearch.xpack.core.watcher.watch.Watch; -import org.elasticsearch.xpack.watcher.execution.ExecutionService; import org.elasticsearch.xpack.watcher.watch.WatchParser; import org.junit.Before; @@ -34,6 +40,7 @@ import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.anyObject; +import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -41,7 +48,6 @@ public class TransportAckWatchActionTests extends ESTestCase { private TransportAckWatchAction action; - private ExecutionService executionService; private Client client; @Before @@ -51,11 +57,10 @@ public void setupAction() { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); when(threadPool.getThreadContext()).thenReturn(threadContext); WatchParser watchParser = mock(WatchParser.class); - executionService = mock(ExecutionService.class); client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); action = new TransportAckWatchAction(Settings.EMPTY, transportService, new ActionFilters(Collections.emptySet()), - Clock.systemUTC(), new XPackLicenseState(Settings.EMPTY), watchParser, executionService, client); + Clock.systemUTC(), new XPackLicenseState(Settings.EMPTY), watchParser, client); } public void testWatchNotFound() { @@ -67,6 +72,13 @@ public void testWatchNotFound() { return null; }).when(client).get(anyObject(), anyObject()); + doAnswer(invocation -> { + ContextPreservingActionListener listener = (ContextPreservingActionListener) invocation.getArguments()[2]; + listener.onResponse(new WatcherStatsResponse(new ClusterName("clusterName"), new WatcherMetaData(false), + Collections.emptyList(), Collections.emptyList())); + return null; + }).when(client).execute(eq(WatcherStatsAction.INSTANCE), anyObject(), anyObject()); + AckWatchRequest ackWatchRequest = new AckWatchRequest(watchId); PlainActionFuture listener = PlainActionFuture.newFuture(); action.doExecute(ackWatchRequest, listener); @@ -78,9 +90,18 @@ public void testWatchNotFound() { public void testThatWatchCannotBeAckedWhileRunning() { String watchId = "my_watch_id"; - WatchExecutionSnapshot snapshot = mock(WatchExecutionSnapshot.class); - when(snapshot.watchId()).thenReturn(watchId); - when(executionService.currentExecutions()).thenReturn(Collections.singletonList(snapshot)); + + doAnswer(invocation -> { + ContextPreservingActionListener listener = (ContextPreservingActionListener) invocation.getArguments()[2]; + DiscoveryNode discoveryNode = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); + WatcherStatsResponse.Node node = new WatcherStatsResponse.Node(discoveryNode); + WatchExecutionSnapshot snapshot = mock(WatchExecutionSnapshot.class); + when(snapshot.watchId()).thenReturn(watchId); + node.setSnapshots(Collections.singletonList(snapshot)); + listener.onResponse(new WatcherStatsResponse(new ClusterName("clusterName"), + new WatcherMetaData(false), Collections.singletonList(node), Collections.emptyList())); + return null; + }).when(client).execute(eq(WatcherStatsAction.INSTANCE), anyObject(), anyObject()); AckWatchRequest ackWatchRequest = new AckWatchRequest(watchId); PlainActionFuture listener = PlainActionFuture.newFuture(); @@ -91,4 +112,4 @@ public void testThatWatchCannotBeAckedWhileRunning() { assertThat(e.getMessage(), is("watch[my_watch_id] is running currently, cannot ack until finished")); assertThat(e.status(), is(RestStatus.CONFLICT)); } -} \ No newline at end of file +} From cf2295b4084e0197df73bd0b0c4bcb012e92f31c Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Thu, 5 Jul 2018 03:24:01 +0000 Subject: [PATCH 04/19] Add JDK11 support and enable in CI (#31644) * Upgrade bouncycastle Required to fix `bcprov-jdk15on-1.55.jar; invalid manifest format ` on jdk 11 * Downgrade bouncycastle to avoid invalid manifest * Add checksum for new jars * Update tika permissions for jdk 11 * Mute test failing on jdk 11 * Add JDK11 to CI * Thread#stop(Throwable) was removed http://mail.openjdk.java.net/pipermail/core-libs-dev/2018-June/053536.html * Disable failing tests #31456 * Temprorarily disable doc tests To see if there are other failures on JDK11 * Only blacklist specific doc tests * Disable only failing tests in ingest attachment plugin * Mute failing HDFS tests #31498 * Mute failing lang-painless tests #31500 * Fix backwards compatability builds Fix JAVA version to 10 for ES 6.3 * Add 6.x to bwx -> java10 * Prefix out and err from buildBwcVersion for readability ``` > Task :distribution:bwc:next-bugfix-snapshot:buildBwcVersion [bwc] :buildSrc:compileJava [bwc] WARNING: An illegal reflective access operation has occurred [bwc] WARNING: Illegal reflective access by org.codehaus.groovy.reflection.CachedClass (file:/home/alpar/.gradle/wrapper/dists/gradle-4.5-all/cg9lyzfg3iwv6fa00os9gcgj4/gradle-4.5/lib/groovy-all-2.4.12.jar) to method java.lang.Object.finalize() [bwc] WARNING: Please consider reporting this to the maintainers of org.codehaus.groovy.reflection.CachedClass [bwc] WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations [bwc] WARNING: All illegal access operations will be denied in a future release [bwc] :buildSrc:compileGroovy [bwc] :buildSrc:writeVersionProperties [bwc] :buildSrc:processResources [bwc] :buildSrc:classes [bwc] :buildSrc:jar ``` * Also set RUNTIME_JAVA_HOME for bwcBuild So that we can make sure it's not too new for the build to understand. * Align bouncycastle dependency * fix painles array tets closes #31500 * Update jar checksums * Keep 8/10 runtime/compile untill consensus builds on 11 * Only skip failing tests if running on Java 11 * Failures are dependent of compile java version not runtime * Condition doc test exceptions on compiler java version as well * Disable hdfs tests based on runtime java * Set runtime java to minimum supported for bwc * PR review * Add comment with ticket for forbidden apis --- .ci/matrix-build-javas.yml | 1 + .ci/matrix-runtime-javas.yml | 1 + .../resources/forbidden/jdk-signatures.txt | 3 +- distribution/bwc/build.gradle | 36 +++++++++++++++++-- docs/build.gradle | 9 +++++ .../elasticsearch/painless/ArrayTests.java | 7 +++- plugins/ingest-attachment/build.gradle | 12 ++++++- .../licenses/bcmail-jdk15on-1.55.jar.sha1 | 1 - .../licenses/bcmail-jdk15on-1.59.jar.sha1 | 1 + .../licenses/bcpkix-jdk15on-1.55.jar.sha1 | 1 - .../licenses/bcpkix-jdk15on-1.59.jar.sha1 | 1 + .../licenses/bcprov-jdk15on-1.55.jar.sha1 | 1 - .../licenses/bcprov-jdk15on-1.59.jar.sha1 | 1 + .../ingest/attachment/TikaImpl.java | 11 +++--- .../attachment/AttachmentProcessorTests.java | 2 ++ plugins/repository-hdfs/build.gradle | 21 ++++++++++- .../repositories/hdfs/HdfsTests.java | 12 ++++--- x-pack/plugin/core/build.gradle | 4 +-- .../licenses/bcpkix-jdk15on-1.58.jar.sha1 | 1 - .../licenses/bcpkix-jdk15on-1.59.jar.sha1 | 1 + .../licenses/bcprov-jdk15on-1.58.jar.sha1 | 1 - .../licenses/bcprov-jdk15on-1.59.jar.sha1 | 1 + x-pack/plugin/security/build.gradle | 4 +-- 23 files changed, 107 insertions(+), 26 deletions(-) delete mode 100644 plugins/ingest-attachment/licenses/bcmail-jdk15on-1.55.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/bcmail-jdk15on-1.59.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/bcpkix-jdk15on-1.55.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/bcpkix-jdk15on-1.59.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/bcprov-jdk15on-1.55.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/bcprov-jdk15on-1.59.jar.sha1 delete mode 100644 x-pack/plugin/core/licenses/bcpkix-jdk15on-1.58.jar.sha1 create mode 100644 x-pack/plugin/core/licenses/bcpkix-jdk15on-1.59.jar.sha1 delete mode 100644 x-pack/plugin/core/licenses/bcprov-jdk15on-1.58.jar.sha1 create mode 100644 x-pack/plugin/core/licenses/bcprov-jdk15on-1.59.jar.sha1 diff --git a/.ci/matrix-build-javas.yml b/.ci/matrix-build-javas.yml index 17aa4b0bf222a..bbb61b8eb6df0 100644 --- a/.ci/matrix-build-javas.yml +++ b/.ci/matrix-build-javas.yml @@ -7,3 +7,4 @@ ES_BUILD_JAVA: - java10 + - java11 diff --git a/.ci/matrix-runtime-javas.yml b/.ci/matrix-runtime-javas.yml index 72282ca805afd..737e15ada208a 100644 --- a/.ci/matrix-runtime-javas.yml +++ b/.ci/matrix-runtime-javas.yml @@ -8,3 +8,4 @@ ES_RUNTIME_JAVA: - java8 - java10 + - java11 diff --git a/buildSrc/src/main/resources/forbidden/jdk-signatures.txt b/buildSrc/src/main/resources/forbidden/jdk-signatures.txt index b17495db6bfb8..3ae2e4b609427 100644 --- a/buildSrc/src/main/resources/forbidden/jdk-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/jdk-signatures.txt @@ -88,7 +88,8 @@ java.lang.Thread#getAllStackTraces() @defaultMessage Stopping threads explicitly leads to inconsistent states. Use interrupt() instead. java.lang.Thread#stop() -java.lang.Thread#stop(java.lang.Throwable) +# uncomment when https://github.com/elastic/elasticsearch/issues/31715 is fixed +# java.lang.Thread#stop(java.lang.Throwable) @defaultMessage Please do not terminate the application java.lang.System#exit(int) diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle index 42412c6230fa4..b84bf1df2fe8e 100644 --- a/distribution/bwc/build.gradle +++ b/distribution/bwc/build.gradle @@ -18,12 +18,14 @@ */ + import org.apache.tools.ant.taskdefs.condition.Os import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.Version -import static org.elasticsearch.gradle.BuildPlugin.getJavaHome +import java.nio.charset.StandardCharsets +import static org.elasticsearch.gradle.BuildPlugin.getJavaHome /** * This is a dummy project which does a local checkout of the previous * wire compat version's branch, and builds a snapshot. This allows backcompat @@ -147,12 +149,16 @@ subprojects { task buildBwcVersion(type: Exec) { dependsOn checkoutBwcBranch, writeBuildMetadata + // send RUNTIME_JAVA_HOME so the build doesn't fails on newer version the branch doesn't know about + environment('RUNTIME_JAVA_HOME', getJavaHome(it, rootProject.ext.minimumRuntimeVersion.getMajorVersion() as int)) workingDir = checkoutDir + // we are building branches that are officially built with JDK 8, push JAVA8_HOME to JAVA_HOME for these builds if (["5.6", "6.0", "6.1"].contains(bwcBranch)) { - // we are building branches that are officially built with JDK 8, push JAVA8_HOME to JAVA_HOME for these builds environment('JAVA_HOME', getJavaHome(it, 8)) } else if ("6.2".equals(bwcBranch)) { environment('JAVA_HOME', getJavaHome(it, 9)) + } else if (["6.3", "6.x"].contains(bwcBranch)) { + environment('JAVA_HOME', getJavaHome(it, 10)) } else { environment('JAVA_HOME', project.compilerJavaHome) } @@ -177,6 +183,8 @@ subprojects { } else if (showStacktraceName.equals("ALWAYS_FULL")) { args "--full-stacktrace" } + standardOutput = new IndentingOutputStream(System.out) + errorOutput = new IndentingOutputStream(System.err) doLast { List missing = artifactFiles.grep { file -> false == file.exists() @@ -196,3 +204,27 @@ subprojects { } } } + +class IndentingOutputStream extends OutputStream { + + public static final byte[] INDENT = " [bwc] ".getBytes(StandardCharsets.UTF_8) + private final OutputStream delegate + + public IndentingOutputStream(OutputStream delegate) { + this.delegate = delegate + } + + @Override + public void write(int b) { + write([b] as int[], 0, 1) + } + + public void write(int[] bytes, int offset, int length) { + for (int i = 0; i < bytes.length; i++) { + delegate.write(bytes[i]) + if (bytes[i] == '\n') { + delegate.write(INDENT) + } + } + } +} \ No newline at end of file diff --git a/docs/build.gradle b/docs/build.gradle index 5f0caf4f19fd7..829db4381b046 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -39,6 +39,15 @@ integTestCluster { setting 'reindex.remote.whitelist', '127.0.0.1:*' } +// remove when https://github.com/elastic/elasticsearch/issues/31305 is fixed +if (rootProject.ext.compilerJavaVersion.isJava11()) { + integTestRunner { + systemProperty 'tests.rest.blacklist', [ + 'plugins/ingest-attachment/line_164', + 'plugins/ingest-attachment/line_117' + ].join(',') + } +} // Build the cluster with all plugins project.rootProject.subprojects.findAll { it.parent.path == ':plugins' }.each { subproj -> diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayTests.java index 5c1141a49e557..86b17fb353142 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.painless; import org.apache.lucene.util.Constants; +import org.elasticsearch.bootstrap.JavaVersion; import org.hamcrest.Matcher; import java.lang.invoke.MethodHandle; @@ -41,7 +42,11 @@ protected String valueCtorCall(String valueType, int size) { @Override protected Matcher outOfBoundsExceptionMessageMatcher(int index, int size) { - return equalTo(Integer.toString(index)); + if (JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0) { + return equalTo(Integer.toString(index)); + } else{ + return equalTo("Index " + Integer.toString(index) + " out of bounds for length " + Integer.toString(size)); + } } public void testArrayLengthHelper() throws Throwable { diff --git a/plugins/ingest-attachment/build.gradle b/plugins/ingest-attachment/build.gradle index f000fdfeef5e0..1a6aa809de040 100644 --- a/plugins/ingest-attachment/build.gradle +++ b/plugins/ingest-attachment/build.gradle @@ -25,11 +25,21 @@ esplugin { versions << [ 'tika': '1.18', 'pdfbox': '2.0.9', - 'bouncycastle': '1.55', + 'bouncycastle': '1.59', 'poi': '3.17', 'mime4j': '0.8.1' ] +if (rootProject.ext.compilerJavaVersion.isJava11()) { + // disabled until https://github.com/elastic/elasticsearch/issues/31456 is fixed. + integTestRunner { + systemProperty 'tests.rest.blacklist', [ + 'ingest_attachment/20_attachment_processor/Test indexed chars are configurable', + 'ingest_attachment/20_attachment_processor/Test indexed chars are configurable per document' + ].join(',') + } +} + dependencies { // mandatory for tika compile "org.apache.tika:tika-core:${versions.tika}" diff --git a/plugins/ingest-attachment/licenses/bcmail-jdk15on-1.55.jar.sha1 b/plugins/ingest-attachment/licenses/bcmail-jdk15on-1.55.jar.sha1 deleted file mode 100644 index 8fdfb8f580780..0000000000000 --- a/plugins/ingest-attachment/licenses/bcmail-jdk15on-1.55.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5cea2dada69b98698ea975a5c1dd3c91ac8ffbb6 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/bcmail-jdk15on-1.59.jar.sha1 b/plugins/ingest-attachment/licenses/bcmail-jdk15on-1.59.jar.sha1 new file mode 100644 index 0000000000000..dde0a237a186b --- /dev/null +++ b/plugins/ingest-attachment/licenses/bcmail-jdk15on-1.59.jar.sha1 @@ -0,0 +1 @@ +db389ade95f48592908a84e7050a691c8834723c \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/bcpkix-jdk15on-1.55.jar.sha1 b/plugins/ingest-attachment/licenses/bcpkix-jdk15on-1.55.jar.sha1 deleted file mode 100644 index a4d546be04fc2..0000000000000 --- a/plugins/ingest-attachment/licenses/bcpkix-jdk15on-1.55.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6392d8cba22b722c6570d660ca0b3921ff1bae4f \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/bcpkix-jdk15on-1.59.jar.sha1 b/plugins/ingest-attachment/licenses/bcpkix-jdk15on-1.59.jar.sha1 new file mode 100644 index 0000000000000..be5e561ee9a76 --- /dev/null +++ b/plugins/ingest-attachment/licenses/bcpkix-jdk15on-1.59.jar.sha1 @@ -0,0 +1 @@ +9cef0aab8a4bb849a8476c058ce3ff302aba3fff \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/bcprov-jdk15on-1.55.jar.sha1 b/plugins/ingest-attachment/licenses/bcprov-jdk15on-1.55.jar.sha1 deleted file mode 100644 index 1c507e17b88e0..0000000000000 --- a/plugins/ingest-attachment/licenses/bcprov-jdk15on-1.55.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -935f2e57a00ec2c489cbd2ad830d4a399708f979 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/bcprov-jdk15on-1.59.jar.sha1 b/plugins/ingest-attachment/licenses/bcprov-jdk15on-1.59.jar.sha1 new file mode 100644 index 0000000000000..aa42dbb8f6906 --- /dev/null +++ b/plugins/ingest-attachment/licenses/bcprov-jdk15on-1.59.jar.sha1 @@ -0,0 +1 @@ +2507204241ab450456bdb8e8c0a8f986e418bd99 \ No newline at end of file diff --git a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java index 6606d1bc72727..75a01231ead03 100644 --- a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java +++ b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java @@ -164,12 +164,11 @@ static PermissionCollection getRestrictedPermissions() { perms.add(new RuntimePermission("getClassLoader")); // ZipFile needs accessDeclaredMembers on JDK 10; cf. https://bugs.openjdk.java.net/browse/JDK-8187485 if (JavaVersion.current().compareTo(JavaVersion.parse("10")) >= 0) { - /* - * See if this permission can be removed in JDK 11, bump the version here to 12 if not. If this permission can be removed, also - * remove the grant in the plugin-security.policy. - */ - assert JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0; - perms.add(new RuntimePermission("accessDeclaredMembers")); + if (JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0) { + // TODO remove this and from plugin-security.policy when JDK 11 is the only one we support + // this is needed pre 11, but it's fixed in 11 : https://bugs.openjdk.java.net/browse/JDK-8187485 + perms.add(new RuntimePermission("accessDeclaredMembers")); + } } perms.setReadOnly(); return perms; diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java index 654bc361f53ad..80ad1fbca875d 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java @@ -21,6 +21,7 @@ import org.apache.commons.io.IOUtils; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; @@ -296,6 +297,7 @@ private Map parseDocument(String file, AttachmentProcessor proce } public void testIndexedChars() throws Exception { + assumeFalse("https://github.com/elastic/elasticsearch/issues/31305", JavaVersion.current().equals(JavaVersion.parse("11"))); processor = new AttachmentProcessor(randomAlphaOfLength(10), "source_field", "target_field", EnumSet.allOf(AttachmentProcessor.Property.class), 19, false, null); diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 8856ae1526a21..808b31ae1cd35 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -1,4 +1,4 @@ -/* +1/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright @@ -214,6 +214,25 @@ RestIntegTestTask integTestSecureHa = project.tasks.create('integTestSecureHa', description = "Runs rest tests against an elasticsearch cluster with HDFS configured with HA Namenode and secured by MIT Kerberos." } +if (rootProject.ext.compilerJavaVersion.isJava11()) { + // TODO remove when: https://github.com/elastic/elasticsearch/issues/31498 + integTestRunner { + systemProperty 'tests.rest.blacklist', [ + 'hdfs_repository/30_snapshot/take snapshot', + 'hdfs_repository/40_restore/Create a snapshot and then restore it', + 'hdfs_repository/20_repository_verify/HDFS Repository Verify', + 'hdfs_repository/30_snapshot_get/Get a snapshot', + 'hdfs_repository/20_repository_create/HDFS Repository Creation', + 'hdfs_repository/20_repository_delete/HDFS Delete Repository', + 'hdfs_repository/30_snapshot_readonly/Get a snapshot - readonly', + ].join(',') + } +} +if (rootProject.ext.runtimeJavaVersion.isJava11()) { + // TODO remove when: https://github.com/elastic/elasticsearch/issues/31498 + integTestHa.enabled = false +} + // Determine HDFS Fixture compatibility for the current build environment. boolean fixtureSupported = false if (Os.isFamily(Os.FAMILY_WINDOWS)) { diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java index 9d0520205ed5b..c328563b4b658 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java @@ -18,15 +18,11 @@ */ package org.elasticsearch.repositories.hdfs; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; - -import java.util.Collection; - import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; +import org.elasticsearch.bootstrap.JavaVersion; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; @@ -35,6 +31,11 @@ import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESSingleNodeTestCase; +import java.util.Collection; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; + @ThreadLeakFilters(filters = {HdfsClientThreadLeakFilter.class}) public class HdfsTests extends ESSingleNodeTestCase { @@ -44,6 +45,7 @@ protected Collection> getPlugins() { } public void testSimpleWorkflow() { + assumeFalse("https://github.com/elastic/elasticsearch/issues/31498", JavaVersion.current().equals(JavaVersion.parse("11"))); Client client = client(); PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo") diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 4bbe339a09b79..4bb0e0ffc031b 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -35,8 +35,8 @@ dependencies { // security deps compile 'com.unboundid:unboundid-ldapsdk:3.2.0' - compile 'org.bouncycastle:bcprov-jdk15on:1.58' - compile 'org.bouncycastle:bcpkix-jdk15on:1.58' + compile 'org.bouncycastle:bcprov-jdk15on:1.59' + compile 'org.bouncycastle:bcpkix-jdk15on:1.59' compile project(path: ':modules:transport-netty4', configuration: 'runtime') testCompile 'org.elasticsearch:securemock:1.2' diff --git a/x-pack/plugin/core/licenses/bcpkix-jdk15on-1.58.jar.sha1 b/x-pack/plugin/core/licenses/bcpkix-jdk15on-1.58.jar.sha1 deleted file mode 100644 index 1fbdc7fcc1fa8..0000000000000 --- a/x-pack/plugin/core/licenses/bcpkix-jdk15on-1.58.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -15a760a039b040e767a75c77ffcc4ff62558f903 \ No newline at end of file diff --git a/x-pack/plugin/core/licenses/bcpkix-jdk15on-1.59.jar.sha1 b/x-pack/plugin/core/licenses/bcpkix-jdk15on-1.59.jar.sha1 new file mode 100644 index 0000000000000..be5e561ee9a76 --- /dev/null +++ b/x-pack/plugin/core/licenses/bcpkix-jdk15on-1.59.jar.sha1 @@ -0,0 +1 @@ +9cef0aab8a4bb849a8476c058ce3ff302aba3fff \ No newline at end of file diff --git a/x-pack/plugin/core/licenses/bcprov-jdk15on-1.58.jar.sha1 b/x-pack/plugin/core/licenses/bcprov-jdk15on-1.58.jar.sha1 deleted file mode 100644 index 95bc28eb146ef..0000000000000 --- a/x-pack/plugin/core/licenses/bcprov-jdk15on-1.58.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2c9aa1c4e3372b447ba5daabade4adf2a2264b12 \ No newline at end of file diff --git a/x-pack/plugin/core/licenses/bcprov-jdk15on-1.59.jar.sha1 b/x-pack/plugin/core/licenses/bcprov-jdk15on-1.59.jar.sha1 new file mode 100644 index 0000000000000..aa42dbb8f6906 --- /dev/null +++ b/x-pack/plugin/core/licenses/bcprov-jdk15on-1.59.jar.sha1 @@ -0,0 +1 @@ +2507204241ab450456bdb8e8c0a8f986e418bd99 \ No newline at end of file diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index 4a9df6c9ccc5f..5bdfdf65014f9 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -22,8 +22,8 @@ dependencies { testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') compile 'com.unboundid:unboundid-ldapsdk:3.2.0' - compile 'org.bouncycastle:bcprov-jdk15on:1.58' - compile 'org.bouncycastle:bcpkix-jdk15on:1.58' + compile 'org.bouncycastle:bcprov-jdk15on:1.59' + compile 'org.bouncycastle:bcpkix-jdk15on:1.59' // the following are all SAML dependencies - might as well download the whole internet compile "org.opensaml:opensaml-core:3.3.0" From d7785a7c43beb4ccc70be60ecf19b65ea5f2e9ca Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Thu, 5 Jul 2018 08:18:51 +0300 Subject: [PATCH 05/19] Fix doclint jdk 11 --- .../java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java index e21f2bf71496b..26bc7c962c450 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java @@ -350,7 +350,7 @@ private static List objectsPaths(final String path) { } /** - * Retrieves the object name from all derives paths named {pathX} where 0 <= X < 10. + * Retrieves the object name from all derives paths named {pathX} where 0 <= X < 10. * * This is the counterpart of {@link #objectsPaths(String)} */ From 1e8e3f6dae5a98b4c83a7eaa9d66ba0b47d15844 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Thu, 5 Jul 2018 10:30:20 +0300 Subject: [PATCH 06/19] Correct exclusion of test on JDK 11 --- plugins/repository-hdfs/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 808b31ae1cd35..112731af6401b 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -228,7 +228,7 @@ if (rootProject.ext.compilerJavaVersion.isJava11()) { ].join(',') } } -if (rootProject.ext.runtimeJavaVersion.isJava11()) { +if (rootProject.ext.compilerJavaVersion.isJava11()) { // TODO remove when: https://github.com/elastic/elasticsearch/issues/31498 integTestHa.enabled = false } From dabf2b41e08fc8443ac29b9468486170db8efe2b Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Thu, 5 Jul 2018 09:39:54 +0200 Subject: [PATCH 07/19] mark SearchAsyncActionTests.testFanOutAndCollect as AwaitsFix --- .../org/elasticsearch/action/search/SearchAsyncActionTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index c4042fffe1254..e4cbc1fcd80b1 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -256,6 +256,7 @@ public void run() { assertEquals(10, numRequests.get()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/29242") public void testFanOutAndCollect() throws InterruptedException { SearchRequest request = new SearchRequest(); request.allowPartialSearchResults(true); From 6acb5910128dfebe85cdced8b1eb661acedd307a Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Thu, 5 Jul 2018 10:03:10 +0200 Subject: [PATCH 08/19] mark RollupIT.testTwoJobsStartStopDeleteOne as AwaitsFix --- .../src/test/java/org/elasticsearch/xpack/rollup/RollupIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupIT.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupIT.java index 3f930cb42981d..157cd6a5b9d1a 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupIT.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupIT.java @@ -221,6 +221,7 @@ public void testIndexPattern() throws Exception { Assert.assertThat(getIndexResponse.indices().length, Matchers.greaterThan(0)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30290") public void testTwoJobsStartStopDeleteOne() throws Exception { MetricConfig metricConfig = new MetricConfig.Builder() .setField("foo") From f40581caa03191c6687b8512f5dba4d3377fed26 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Thu, 5 Jul 2018 12:28:40 +0300 Subject: [PATCH 09/19] Fix license header generation on Windows (#31790) Updates the build.gradle to take into account the OS differences for Windows (in particular line separator and project naming) --- build.gradle | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index a6b7b4ec6fb63..9d9f85db47bbc 100644 --- a/build.gradle +++ b/build.gradle @@ -445,12 +445,19 @@ allprojects { } File licenseHeaderFile; - if (eclipse.project.name.startsWith(':x-pack')) { + String prefix = ':x-pack'; + + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + prefix = prefix.replace(':', '_') + } + if (eclipse.project.name.startsWith(prefix)) { licenseHeaderFile = new File(project.rootDir, 'buildSrc/src/main/resources/license-headers/elastic-license-header.txt') } else { licenseHeaderFile = new File(project.rootDir, 'buildSrc/src/main/resources/license-headers/oss-license-header.txt') } - String licenseHeader = licenseHeaderFile.getText('UTF-8').replace('\n', '\\\\n') + + String lineSeparator = Os.isFamily(Os.FAMILY_WINDOWS) ? '\\\\r\\\\n' : '\\\\n' + String licenseHeader = licenseHeaderFile.getText('UTF-8').replace(System.lineSeparator(), lineSeparator) task copyEclipseSettings(type: Copy) { // TODO: "package this up" for external builds from new File(project.rootDir, 'buildSrc/src/main/resources/eclipse.settings') From 9c11bf1e127a488995277eaabf240c0ea3c6d195 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Thu, 5 Jul 2018 13:14:12 +0100 Subject: [PATCH 10/19] [ML] Fix calendar and filter updates from non-master nodes (#31804) Job updates or changes to calendars or filters may result into updating the job process if it has been running. To preserve the order of updates, process updates are queued through the UpdateJobProcessNotifier which is only running on the master node. All actions performing such updates must run on the master node. However, the CRUD actions for calendars and filters are not master node actions. They have been submitting the updates to the UpdateJobProcessNotifier even though it might have not been running (given the action was run on a non-master node). When that happens, the update never reaches the process. This commit fixes this problem by ensuring the notifier runs on all nodes and by ensuring the process update action gets the resources again before updating the process (instead of having those resources passed in the request). This ensures that even if the order of the updates gets messed up, the latest update will read the latest state of those resource and the process will get back in sync. This leaves us with 2 types of updates: 1. updates to the job config should happen on the master node. This is because we cannot refetch the entire job and update it. We need to know the parts that have been changed. 2. updates to resources the job uses. Those can be handled on non-master nodes but they should be re-fetched by the update process action. Closes #31803 --- .../ml/job/UpdateJobProcessNotifier.java | 55 ++++++++----- .../autodetect/AutodetectCommunicator.java | 39 ++++----- .../autodetect/AutodetectProcessManager.java | 73 +++++++++++++---- .../job/process/autodetect/UpdateParams.java | 9 ++ .../autodetect/UpdateProcessMessage.java | 82 +++++++++++++++++++ .../AutodetectCommunicatorTests.java | 5 +- .../AutodetectProcessManagerTests.java | 10 ++- 7 files changed, 212 insertions(+), 61 deletions(-) create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateProcessMessage.java diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/UpdateJobProcessNotifier.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/UpdateJobProcessNotifier.java index b9c795df9b78c..6b871c074619e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/UpdateJobProcessNotifier.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/UpdateJobProcessNotifier.java @@ -5,14 +5,15 @@ */ package org.elasticsearch.xpack.ml.job; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.LocalNodeMasterListener; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.LifecycleListener; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.threadpool.ThreadPool; @@ -31,9 +32,26 @@ import static org.elasticsearch.xpack.core.ml.action.UpdateProcessAction.Request; import static org.elasticsearch.xpack.core.ml.action.UpdateProcessAction.Response; -public class UpdateJobProcessNotifier extends AbstractComponent implements LocalNodeMasterListener { +/** + * This class serves as a queue for updates to the job process. + * Queueing is important for 2 reasons: first, it throttles the updates + * to the process, and second and most important, it preserves the order of the updates + * for actions that run on the master node. For preserving the order of the updates + * to the job config, it's necessary to handle the whole update chain on the master + * node. However, for updates to resources the job uses (e.g. calendars, filters), + * they can be handled on non-master nodes as long as the update process action + * is fetching the latest version of those resources from the index instead of + * using the version that existed while the handling action was at work. This makes + * sure that even if the order of updates gets reversed, the final process update + * will fetch the valid state of those external resources ensuring the process is + * in sync. + */ +public class UpdateJobProcessNotifier extends AbstractComponent { + + private static final Logger LOGGER = Loggers.getLogger(UpdateJobProcessNotifier.class); private final Client client; + private final ClusterService clusterService; private final ThreadPool threadPool; private final LinkedBlockingQueue orderedJobUpdates = new LinkedBlockingQueue<>(1000); @@ -42,9 +60,15 @@ public class UpdateJobProcessNotifier extends AbstractComponent implements Local public UpdateJobProcessNotifier(Settings settings, Client client, ClusterService clusterService, ThreadPool threadPool) { super(settings); this.client = client; + this.clusterService = clusterService; this.threadPool = threadPool; - clusterService.addLocalNodeMasterListener(this); clusterService.addLifecycleListener(new LifecycleListener() { + + @Override + public void beforeStart() { + start(); + } + @Override public void beforeStop() { stop(); @@ -56,16 +80,6 @@ boolean submitJobUpdate(UpdateParams update, ActionListener listener) { return orderedJobUpdates.offer(new UpdateHolder(update, listener)); } - @Override - public void onMaster() { - start(); - } - - @Override - public void offMaster() { - stop(); - } - private void start() { cancellable = threadPool.scheduleWithFixedDelay(this::processNextUpdate, TimeValue.timeValueSeconds(1), ThreadPool.Names.GENERIC); } @@ -79,12 +93,6 @@ private void stop() { } } - @Override - public String executorName() { - // SAME is ok here, because both start() and stop() are inexpensive: - return ThreadPool.Names.SAME; - } - private void processNextUpdate() { List updates = new ArrayList<>(orderedJobUpdates.size()); try { @@ -101,6 +109,15 @@ void executeProcessUpdates(Iterator updatesIterator) { } UpdateHolder updateHolder = updatesIterator.next(); UpdateParams update = updateHolder.update; + + if (update.isJobUpdate() && clusterService.localNode().isMasterNode() == false) { + assert clusterService.localNode().isMasterNode(); + LOGGER.error("Job update was submitted to non-master node [" + clusterService.nodeName() + "]; update for job [" + + update.getJobId() + "] will be ignored"); + executeProcessUpdates(updatesIterator); + return; + } + Request request = new Request(update.getJobId(), update.getModelPlotConfig(), update.getDetectorUpdates(), update.getFilter(), update.isUpdateScheduledEvents()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java index bdac41cd9b96d..0885a8f9d6479 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java @@ -17,28 +17,27 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.index.analysis.AnalysisRegistry; -import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent; -import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzer; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.output.FlushAcknowledgement; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzer; import org.elasticsearch.xpack.ml.job.persistence.StateStreamer; import org.elasticsearch.xpack.ml.job.process.CountingInputStream; import org.elasticsearch.xpack.ml.job.process.DataCountsReporter; import org.elasticsearch.xpack.ml.job.process.autodetect.output.AutoDetectResultProcessor; -import org.elasticsearch.xpack.core.ml.job.process.autodetect.output.FlushAcknowledgement; import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.FlushJobParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.ForecastParams; -import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; -import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; -import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.ml.job.process.autodetect.writer.DataToProcessWriter; import org.elasticsearch.xpack.ml.job.process.autodetect.writer.DataToProcessWriterFactory; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.Closeable; import java.io.IOException; @@ -46,7 +45,6 @@ import java.time.Duration; import java.time.ZonedDateTime; import java.util.Collections; -import java.util.List; import java.util.Locale; import java.util.Optional; import java.util.concurrent.CountDownLatch; @@ -206,30 +204,29 @@ public void killProcess(boolean awaitCompletion, boolean finish) throws IOExcept } } - public void writeUpdateProcessMessage(UpdateParams updateParams, List scheduledEvents, - BiConsumer handler) { + public void writeUpdateProcessMessage(UpdateProcessMessage update, BiConsumer handler) { submitOperation(() -> { - if (updateParams.getModelPlotConfig() != null) { - autodetectProcess.writeUpdateModelPlotMessage(updateParams.getModelPlotConfig()); + if (update.getModelPlotConfig() != null) { + autodetectProcess.writeUpdateModelPlotMessage(update.getModelPlotConfig()); } // Filters have to be written before detectors - if (updateParams.getFilter() != null) { - autodetectProcess.writeUpdateFiltersMessage(Collections.singletonList(updateParams.getFilter())); + if (update.getFilter() != null) { + autodetectProcess.writeUpdateFiltersMessage(Collections.singletonList(update.getFilter())); } // Add detector rules - if (updateParams.getDetectorUpdates() != null) { - for (JobUpdate.DetectorUpdate update : updateParams.getDetectorUpdates()) { - if (update.getRules() != null) { - autodetectProcess.writeUpdateDetectorRulesMessage(update.getDetectorIndex(), update.getRules()); + if (update.getDetectorUpdates() != null) { + for (JobUpdate.DetectorUpdate detectorUpdate : update.getDetectorUpdates()) { + if (detectorUpdate.getRules() != null) { + autodetectProcess.writeUpdateDetectorRulesMessage(detectorUpdate.getDetectorIndex(), detectorUpdate.getRules()); } } } // Add scheduled events; null means there's no update but an empty list means we should clear any events in the process - if (scheduledEvents != null) { - autodetectProcess.writeUpdateScheduledEventsMessage(scheduledEvents, job.getAnalysisConfig().getBucketSpan()); + if (update.getScheduledEvents() != null) { + autodetectProcess.writeUpdateScheduledEventsMessage(update.getScheduledEvents(), job.getAnalysisConfig().getBucketSpan()); } return null; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index b6efb688c1797..77e7fe1471611 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -5,8 +5,6 @@ */ package org.elasticsearch.xpack.ml.job.process.autodetect; -import org.elasticsearch.common.xcontent.XContentElasticsearchExtension; -import org.elasticsearch.core.internal.io.IOUtils; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; @@ -22,24 +20,26 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentElasticsearchExtension; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ml.action.GetFiltersAction; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; -import org.elasticsearch.xpack.ml.job.persistence.ScheduledEventsQueryBuilder; +import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.job.process.autodetect.output.FlushAcknowledgement; -import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.action.TransportOpenJobAction.JobTask; import org.elasticsearch.xpack.ml.job.JobManager; @@ -47,10 +47,12 @@ import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobRenormalizedResultsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; +import org.elasticsearch.xpack.ml.job.persistence.ScheduledEventsQueryBuilder; import org.elasticsearch.xpack.ml.job.persistence.StateStreamer; import org.elasticsearch.xpack.ml.job.process.DataCountsReporter; import org.elasticsearch.xpack.ml.job.process.NativeStorageProvider; import org.elasticsearch.xpack.ml.job.process.autodetect.output.AutoDetectResultProcessor; +import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.FlushJobParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.ForecastParams; @@ -82,6 +84,8 @@ import java.util.function.Consumer; import static org.elasticsearch.common.settings.Setting.Property; +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; public class AutodetectProcessManager extends AbstractComponent { @@ -156,7 +160,7 @@ public void onNodeStartup() { } } - public synchronized void closeAllJobsOnThisNode(String reason) throws IOException { + public synchronized void closeAllJobsOnThisNode(String reason) { int numJobs = processByAllocation.size(); if (numJobs != 0) { logger.info("Closing [{}] jobs, because [{}]", numJobs, reason); @@ -322,8 +326,7 @@ public void forecastJob(JobTask jobTask, ForecastParams params, Consumer handler) { + public void writeUpdateProcessMessage(JobTask jobTask, UpdateParams updateParams, Consumer handler) { AutodetectCommunicator communicator = getOpenAutodetectCommunicator(jobTask); if (communicator == null) { String message = "Cannot process update model debug config because job [" + jobTask.getJobId() + "] is not open"; @@ -332,25 +335,59 @@ public void writeUpdateProcessMessage(JobTask jobTask, UpdateParams updateParams return; } + UpdateProcessMessage.Builder updateProcessMessage = new UpdateProcessMessage.Builder(); + updateProcessMessage.setModelPlotConfig(updateParams.getModelPlotConfig()); + updateProcessMessage.setDetectorUpdates(updateParams.getDetectorUpdates()); + + // Step 3. Set scheduled events on message and write update process message ActionListener> eventsListener = ActionListener.wrap( events -> { - communicator.writeUpdateProcessMessage(updateParams, events == null ? null : events.results(), (aVoid, e) -> { + updateProcessMessage.setScheduledEvents(events == null ? null : events.results()); + communicator.writeUpdateProcessMessage(updateProcessMessage.build(), (aVoid, e) -> { if (e == null) { handler.accept(null); } else { handler.accept(e); } }); - }, - handler::accept); - - if (updateParams.isUpdateScheduledEvents()) { - Job job = jobManager.getJobOrThrowIfUnknown(jobTask.getJobId()); - DataCounts dataCounts = getStatistics(jobTask).get().v1(); - ScheduledEventsQueryBuilder query = new ScheduledEventsQueryBuilder().start(job.earliestValidTimestamp(dataCounts)); - jobProvider.scheduledEventsForJob(jobTask.getJobId(), job.getGroups(), query, eventsListener); + }, handler + ); + + // Step 2. Set the filter on the message and get scheduled events + ActionListener filterListener = ActionListener.wrap( + filter -> { + updateProcessMessage.setFilter(filter); + + if (updateParams.isUpdateScheduledEvents()) { + Job job = jobManager.getJobOrThrowIfUnknown(jobTask.getJobId()); + DataCounts dataCounts = getStatistics(jobTask).get().v1(); + ScheduledEventsQueryBuilder query = new ScheduledEventsQueryBuilder().start(job.earliestValidTimestamp(dataCounts)); + jobProvider.scheduledEventsForJob(jobTask.getJobId(), job.getGroups(), query, eventsListener); + } else { + eventsListener.onResponse(null); + } + }, handler + ); + + // Step 1. Get the filter + if (updateParams.getFilter() == null) { + filterListener.onResponse(null); } else { - eventsListener.onResponse(null); + GetFiltersAction.Request getFilterRequest = new GetFiltersAction.Request(); + getFilterRequest.setFilterId(updateParams.getFilter().getId()); + executeAsyncWithOrigin(client, ML_ORIGIN, GetFiltersAction.INSTANCE, getFilterRequest, + new ActionListener() { + + @Override + public void onResponse(GetFiltersAction.Response response) { + filterListener.onResponse(response.getFilters().results().get(0)); + } + + @Override + public void onFailure(Exception e) { + handler.accept(e); + } + }); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateParams.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateParams.java index ac41dcccbcff9..127fb18e5fff4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateParams.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateParams.java @@ -49,6 +49,15 @@ public MlFilter getFilter() { return filter; } + /** + * Returns true if the update params include a job update, + * ie an update to the job config directly rather than an + * update to external resources a job uses (e.g. calendars, filters). + */ + public boolean isJobUpdate() { + return modelPlotConfig != null || detectorUpdates != null; + } + public boolean isUpdateScheduledEvents() { return updateScheduledEvents; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateProcessMessage.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateProcessMessage.java new file mode 100644 index 0000000000000..4686d4ed37273 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/UpdateProcessMessage.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.job.process.autodetect; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent; +import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; +import org.elasticsearch.xpack.core.ml.job.config.MlFilter; +import org.elasticsearch.xpack.core.ml.job.config.ModelPlotConfig; + +import java.util.List; + +public final class UpdateProcessMessage { + + @Nullable private final ModelPlotConfig modelPlotConfig; + @Nullable private final List detectorUpdates; + @Nullable private final MlFilter filter; + @Nullable private final List scheduledEvents; + + private UpdateProcessMessage(@Nullable ModelPlotConfig modelPlotConfig, @Nullable List detectorUpdates, + @Nullable MlFilter filter, List scheduledEvents) { + this.modelPlotConfig = modelPlotConfig; + this.detectorUpdates = detectorUpdates; + this.filter = filter; + this.scheduledEvents = scheduledEvents; + } + + @Nullable + public ModelPlotConfig getModelPlotConfig() { + return modelPlotConfig; + } + + @Nullable + public List getDetectorUpdates() { + return detectorUpdates; + } + + @Nullable + public MlFilter getFilter() { + return filter; + } + + @Nullable + public List getScheduledEvents() { + return scheduledEvents; + } + + public static class Builder { + + @Nullable private ModelPlotConfig modelPlotConfig; + @Nullable private List detectorUpdates; + @Nullable private MlFilter filter; + @Nullable private List scheduledEvents; + + public Builder setModelPlotConfig(ModelPlotConfig modelPlotConfig) { + this.modelPlotConfig = modelPlotConfig; + return this; + } + + public Builder setDetectorUpdates(List detectorUpdates) { + this.detectorUpdates = detectorUpdates; + return this; + } + + public Builder setFilter(MlFilter filter) { + this.filter = filter; + return this; + } + + public Builder setScheduledEvents(List scheduledEvents) { + this.scheduledEvents = scheduledEvents; + return this; + } + + public UpdateProcessMessage build() { + return new UpdateProcessMessage(modelPlotConfig, detectorUpdates, filter, scheduledEvents); + } + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java index 57e5f6cfdb3ff..ab24aadb9dc3a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java @@ -95,11 +95,12 @@ public void testWriteUpdateProcessMessage() throws IOException { List detectorUpdates = Collections.singletonList( new JobUpdate.DetectorUpdate(0, "updated description", Collections.singletonList(updatedRule))); - UpdateParams updateParams = UpdateParams.builder("foo").detectorUpdates(detectorUpdates).build(); List events = Collections.singletonList( ScheduledEventTests.createScheduledEvent(randomAlphaOfLength(10))); + UpdateProcessMessage.Builder updateProcessMessage = new UpdateProcessMessage.Builder().setDetectorUpdates(detectorUpdates); + updateProcessMessage.setScheduledEvents(events); - communicator.writeUpdateProcessMessage(updateParams, events, ((aVoid, e) -> {})); + communicator.writeUpdateProcessMessage(updateProcessMessage.build(), ((aVoid, e) -> {})); verify(process).writeUpdateDetectorRulesMessage(eq(0), eq(Collections.singletonList(updatedRule))); verify(process).writeUpdateScheduledEventsMessage(events, AnalysisConfig.Builder.DEFAULT_BUCKET_SPAN); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java index a1b9aad452b9e..313f449cadd81 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java @@ -48,6 +48,7 @@ import org.elasticsearch.xpack.ml.job.process.normalizer.NormalizerFactory; import org.elasticsearch.xpack.ml.notifications.Auditor; import org.junit.Before; +import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import java.io.ByteArrayInputStream; @@ -489,8 +490,15 @@ public void testWriteUpdateProcessMessage() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); UpdateParams updateParams = UpdateParams.builder("foo").modelPlotConfig(modelConfig).detectorUpdates(detectorUpdates).build(); + manager.writeUpdateProcessMessage(jobTask, updateParams, e -> {}); - verify(communicator).writeUpdateProcessMessage(same(updateParams), eq(null), any()); + + ArgumentCaptor captor = ArgumentCaptor.forClass(UpdateProcessMessage.class); + verify(communicator).writeUpdateProcessMessage(captor.capture(), any()); + + UpdateProcessMessage updateProcessMessage = captor.getValue(); + assertThat(updateProcessMessage.getModelPlotConfig(), equalTo(modelConfig)); + assertThat(updateProcessMessage.getDetectorUpdates(), equalTo(detectorUpdates)); } public void testJobHasActiveAutodetectProcess() { From 92de94c237bf03f1a0f76560134dab90117696ca Mon Sep 17 00:00:00 2001 From: David Roberts Date: Thu, 5 Jul 2018 13:26:17 +0100 Subject: [PATCH 11/19] [ML] Don't treat stale FAILED jobs as OPENING in job allocation (#31800) Job persistent tasks with stale allocation IDs used to always be considered as OPENING jobs in the ML job node allocation decision. However, FAILED jobs are not relocated to other nodes, which leads to them blocking up the nodes they failed on after node restarts. FAILED jobs should not restrict how many other jobs can open on a node, regardless of whether they are stale or not. Closes #31794 --- .../ml/action/TransportOpenJobAction.java | 21 +++++-- .../action/TransportOpenJobActionTests.java | 58 +++++++++++++++++-- 2 files changed, 69 insertions(+), 10 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index e7fb0fe5fb315..290e407ab664c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -210,16 +210,27 @@ static PersistentTasksCustomMetaData.Assignment selectLeastLoadedMlNode(String j for (PersistentTasksCustomMetaData.PersistentTask assignedTask : assignedTasks) { JobTaskState jobTaskState = (JobTaskState) assignedTask.getState(); JobState jobState; - if (jobTaskState == null || // executor node didn't have the chance to set job status to OPENING - // previous executor node failed and current executor node didn't have the chance to set job status to OPENING - jobTaskState.isStatusStale(assignedTask)) { + if (jobTaskState == null) { + // executor node didn't have the chance to set job status to OPENING ++numberOfAllocatingJobs; jobState = JobState.OPENING; } else { jobState = jobTaskState.getState(); + if (jobTaskState.isStatusStale(assignedTask)) { + if (jobState == JobState.CLOSING) { + // previous executor node failed while the job was closing - it won't + // be reopened, so consider it CLOSED for resource usage purposes + jobState = JobState.CLOSED; + } else if (jobState != JobState.FAILED) { + // previous executor node failed and current executor node didn't + // have the chance to set job status to OPENING + ++numberOfAllocatingJobs; + jobState = JobState.OPENING; + } + } } - // Don't count FAILED jobs, as they don't consume native memory - if (jobState != JobState.FAILED) { + // Don't count CLOSED or FAILED jobs, as they don't consume native memory + if (jobState.isAnyOf(JobState.CLOSED, JobState.FAILED) == false) { ++numberOfAssignedJobs; String assignedJobId = ((OpenJobAction.JobParams) assignedTask.getParams()).getJobId(); Job assignedJob = mlMetadata.getJobs().get(assignedJobId); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java index b5a315d9687bb..dd8ddf3aa62ad 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java @@ -55,7 +55,6 @@ import java.io.IOException; import java.net.InetAddress; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; @@ -285,7 +284,7 @@ public void testSelectLeastLoadedMlNode_maxConcurrentOpeningJobs() { nodeAttr, Collections.emptySet(), Version.CURRENT)) .build(); - PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); + PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id1", "_node_id1", null, tasksBuilder); addJobTask("job_id2", "_node_id1", null, tasksBuilder); addJobTask("job_id3", "_node_id2", null, tasksBuilder); @@ -340,6 +339,55 @@ public void testSelectLeastLoadedMlNode_maxConcurrentOpeningJobs() { assertTrue(result.getExplanation().contains("because node exceeds [2] the maximum number of jobs [2] in opening state")); } + public void testSelectLeastLoadedMlNode_concurrentOpeningJobsAndStaleFailedJob() { + Map nodeAttr = new HashMap<>(); + nodeAttr.put(MachineLearning.ML_ENABLED_NODE_ATTR, "true"); + DiscoveryNodes nodes = DiscoveryNodes.builder() + .add(new DiscoveryNode("_node_name1", "_node_id1", new TransportAddress(InetAddress.getLoopbackAddress(), 9300), + nodeAttr, Collections.emptySet(), Version.CURRENT)) + .add(new DiscoveryNode("_node_name2", "_node_id2", new TransportAddress(InetAddress.getLoopbackAddress(), 9301), + nodeAttr, Collections.emptySet(), Version.CURRENT)) + .add(new DiscoveryNode("_node_name3", "_node_id3", new TransportAddress(InetAddress.getLoopbackAddress(), 9302), + nodeAttr, Collections.emptySet(), Version.CURRENT)) + .build(); + + PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); + addJobTask("job_id1", "_node_id1", JobState.fromString("failed"), tasksBuilder); + // This will make the allocation stale for job_id1 + tasksBuilder.reassignTask(MlMetadata.jobTaskId("job_id1"), new Assignment("_node_id1", "test assignment")); + addJobTask("job_id2", "_node_id1", null, tasksBuilder); + addJobTask("job_id3", "_node_id2", null, tasksBuilder); + addJobTask("job_id4", "_node_id2", null, tasksBuilder); + addJobTask("job_id5", "_node_id3", null, tasksBuilder); + addJobTask("job_id6", "_node_id3", null, tasksBuilder); + PersistentTasksCustomMetaData tasks = tasksBuilder.build(); + + ClusterState.Builder csBuilder = ClusterState.builder(new ClusterName("_name")); + csBuilder.nodes(nodes); + MetaData.Builder metaData = MetaData.builder(); + RoutingTable.Builder routingTable = RoutingTable.builder(); + addJobAndIndices(metaData, routingTable, "job_id1", "job_id2", "job_id3", "job_id4", "job_id5", "job_id6", "job_id7", "job_id8"); + csBuilder.routingTable(routingTable.build()); + metaData.putCustom(PersistentTasksCustomMetaData.TYPE, tasks); + csBuilder.metaData(metaData); + + ClusterState cs = csBuilder.build(); + // Allocation won't be possible if the stale failed job is treated as opening + Assignment result = TransportOpenJobAction.selectLeastLoadedMlNode("job_id7", cs, 2, 10, 30, logger); + assertEquals("_node_id1", result.getExecutorNode()); + + tasksBuilder = PersistentTasksCustomMetaData.builder(tasks); + addJobTask("job_id7", "_node_id1", null, tasksBuilder); + tasks = tasksBuilder.build(); + + csBuilder = ClusterState.builder(cs); + csBuilder.metaData(MetaData.builder(cs.metaData()).putCustom(PersistentTasksCustomMetaData.TYPE, tasks)); + cs = csBuilder.build(); + result = TransportOpenJobAction.selectLeastLoadedMlNode("job_id8", cs, 2, 10, 30, logger); + assertNull("no node selected, because OPENING state", result.getExecutorNode()); + assertTrue(result.getExplanation().contains("because node exceeds [2] the maximum number of jobs [2] in opening state")); + } + public void testSelectLeastLoadedMlNode_noCompatibleJobTypeNodes() { Map nodeAttr = new HashMap<>(); nodeAttr.put(MachineLearning.ML_ENABLED_NODE_ATTR, "true"); @@ -710,13 +758,13 @@ private ClusterState getClusterStateWithMappingsWithMetaData(Map private static Function jobWithRulesCreator() { return jobId -> { - DetectionRule rule = new DetectionRule.Builder(Arrays.asList( + DetectionRule rule = new DetectionRule.Builder(Collections.singletonList( new RuleCondition(RuleCondition.AppliesTo.TYPICAL, Operator.LT, 100.0) )).build(); Detector.Builder detector = new Detector.Builder("count", null); - detector.setRules(Arrays.asList(rule)); - AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Arrays.asList(detector.build())); + detector.setRules(Collections.singletonList(rule)); + AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build())); DataDescription.Builder dataDescription = new DataDescription.Builder(); Job.Builder job = new Job.Builder(jobId); job.setAnalysisConfig(analysisConfig); From 894fb97ad7efd6873fd6368296b3b0e7b249ecf7 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Thu, 5 Jul 2018 13:56:54 +0100 Subject: [PATCH 12/19] [ML][TEST] Use java 11 valid time format in DataDescriptionTests (#31817) It seems that java 11 tightened some validations with regard to time formats. The random instance creator was setting an odd time format to the data description which is invalid when run with java 11. This commit changes it to a valid format. --- .../xpack/core/ml/job/config/DataDescriptionTests.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DataDescriptionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DataDescriptionTests.java index 4670420a9dd04..3ca4bac47cb29 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DataDescriptionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DataDescriptionTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.AbstractSerializingTestCase; -import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.DataDescription.DataFormat; import org.elasticsearch.xpack.core.ml.job.messages.Messages; @@ -255,7 +254,7 @@ protected DataDescription createTestInstance() { } else if (randomBoolean()) { format = DataDescription.EPOCH_MS; } else { - format = "yyy.MM.dd G 'at' HH:mm:ss z"; + format = "yyyy-MM-dd HH:mm:ss.SSS"; } dataDescription.setTimeFormat(format); } From 40b822c878fa058f82accbb82df9def0e9aed828 Mon Sep 17 00:00:00 2001 From: Sohaib Iftikhar Date: Thu, 5 Jul 2018 15:30:08 +0200 Subject: [PATCH 13/19] Scripting: Remove support for deprecated StoredScript contexts (#31394) Removes support for storing scripts without the usual json around the script. So You can no longer do: ``` POST _scripts/ { "query": { "match": { "title": "{{query_string}}" } } } ``` and must instead do: ``` POST _scripts/ { "script": { "lang": "mustache", "source": { "query": { "match": { "title": "{{query_string}}" } } } } } ``` This improves error reporting when you attempt to store a script but don't quite get the syntax right. Before, there was a good chance that we'd think of it as a "raw" template and just store it. Now we won't do that. Nice. --- .../migration/migrate_7_0/api.asciidoc | 4 + .../script/mustache/SearchTemplateIT.java | 137 ++++++++++-------- .../GetStoredScriptResponse.java | 13 +- .../script/StoredScriptSource.java | 126 +++------------- .../script/ScriptMetaDataTests.java | 37 +++-- .../script/StoredScriptSourceTests.java | 23 ++- .../script/StoredScriptTests.java | 66 ++------- .../test/integration/BasicWatcherTests.java | 6 +- 8 files changed, 141 insertions(+), 271 deletions(-) diff --git a/docs/reference/migration/migrate_7_0/api.asciidoc b/docs/reference/migration/migrate_7_0/api.asciidoc index 3d824c600648f..f7b6f9b2e00a9 100644 --- a/docs/reference/migration/migrate_7_0/api.asciidoc +++ b/docs/reference/migration/migrate_7_0/api.asciidoc @@ -75,3 +75,7 @@ will be for such settings to be copied on such operations. To enable users in `copy_settings` parameter was added on the REST layer. As this behavior will be the only behavior in 8.0.0, this parameter is deprecated in 7.0.0 for removal in 8.0.0. + +==== The deprecated stored script contexts have now been removed +When putting stored scripts, support for storing them with the deprecated `template` context or without a context is +now removed. Scripts must be stored using the `script` context as mentioned in the documentation. diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java index 884e26e7df855..0fbc3fa16afd2 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.plugins.Plugin; @@ -152,25 +151,22 @@ public void testTemplateQueryAsEscapedStringWithConditionalClauseAtEnd() throws public void testIndexedTemplateClient() throws Exception { assertAcked(client().admin().cluster().preparePutStoredScript() .setId("testTemplate") - .setContent(new BytesArray("{" + - "\"template\":{" + - " \"query\":{" + - " \"match\":{" + - " \"theField\" : \"{{fieldParam}}\"}" + - " }" + - "}" + - "}"), XContentType.JSON)); - - - assertAcked(client().admin().cluster().preparePutStoredScript() - .setId("testTemplate").setContent(new BytesArray("{" + - "\"template\":{" + - " \"query\":{" + - " \"match\":{" + - " \"theField\" : \"{{fieldParam}}\"}" + - " }" + - "}" + - "}"), XContentType.JSON)); + .setContent( + new BytesArray( + "{" + + " \"script\": {" + + " \"lang\": \"mustache\"," + + " \"source\": {" + + " \"query\": {" + + " \"match\": {" + + " \"theField\": \"{{fieldParam}}\"" + + " }" + + " }" + + " }" + + " }" + + "}" + ), + XContentType.JSON)); GetStoredScriptResponse getResponse = client().admin().cluster() .prepareGetStoredScript("testTemplate").get(); @@ -198,41 +194,32 @@ public void testIndexedTemplateClient() throws Exception { getResponse = client().admin().cluster().prepareGetStoredScript("testTemplate").get(); assertNull(getResponse.getSource()); - assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element."); } public void testIndexedTemplate() throws Exception { - assertAcked(client().admin().cluster().preparePutStoredScript() - .setId("1a") - .setContent(new BytesArray("{" + - "\"template\":{" + - " \"query\":{" + - " \"match\":{" + - " \"theField\" : \"{{fieldParam}}\"}" + - " }" + - "}" + - "}" - ), XContentType.JSON) + + String script = + "{" + + " \"script\": {" + + " \"lang\": \"mustache\"," + + " \"source\": {" + + " \"query\": {" + + " \"match\": {" + + " \"theField\": \"{{fieldParam}}\"" + + " }" + + " }" + + " }" + + " }" + + "}"; + + assertAcked( + client().admin().cluster().preparePutStoredScript().setId("1a").setContent(new BytesArray(script), XContentType.JSON) ); - assertAcked(client().admin().cluster().preparePutStoredScript() - .setId("2") - .setContent(new BytesArray("{" + - "\"template\":{" + - " \"query\":{" + - " \"match\":{" + - " \"theField\" : \"{{fieldParam}}\"}" + - " }" + - "}" + - "}"), XContentType.JSON) + assertAcked( + client().admin().cluster().preparePutStoredScript().setId("2").setContent(new BytesArray(script), XContentType.JSON) ); - assertAcked(client().admin().cluster().preparePutStoredScript() - .setId("3") - .setContent(new BytesArray("{" + - "\"template\":{" + - " \"match\":{" + - " \"theField\" : \"{{fieldParam}}\"}" + - " }" + - "}"), XContentType.JSON) + assertAcked( + client().admin().cluster().preparePutStoredScript().setId("3").setContent(new BytesArray(script), XContentType.JSON) ); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); @@ -268,7 +255,6 @@ public void testIndexedTemplate() throws Exception { .setScript("2").setScriptType(ScriptType.STORED).setScriptParams(templateParams) .get(); assertHitCount(searchResponse.getResponse(), 1); - assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element."); } // Relates to #10397 @@ -282,13 +268,27 @@ public void testIndexedTemplateOverwrite() throws Exception { client().admin().indices().prepareRefresh().get(); int iterations = randomIntBetween(2, 11); + String query = + "{" + + " \"script\": {" + + " \"lang\": \"mustache\"," + + " \"source\": {" + + " \"query\": {" + + " \"match_phrase_prefix\": {" + + " \"searchtext\": {" + + " \"query\": \"{{P_Keyword1}}\"," + + " \"slop\": {{slop}}" + + " }" + + " }" + + " }" + + " }" + + " }" + + "}"; for (int i = 1; i < iterations; i++) { assertAcked(client().admin().cluster().preparePutStoredScript() .setId("git01") - .setContent(new BytesArray( - "{\"template\":{\"query\": {\"match_phrase_prefix\": {\"searchtext\": {\"query\": \"{{P_Keyword1}}\"," - + "\"slop\": -1}}}}}"), - XContentType.JSON)); + .setContent(new BytesArray(query.replace("{{slop}}", Integer.toString(-1))), XContentType.JSON) + ); GetStoredScriptResponse getResponse = client().admin().cluster().prepareGetStoredScript("git01").get(); assertNotNull(getResponse.getSource()); @@ -304,8 +304,8 @@ public void testIndexedTemplateOverwrite() throws Exception { assertAcked(client().admin().cluster().preparePutStoredScript() .setId("git01") - .setContent(new BytesArray("{\"query\": {\"match_phrase_prefix\": {\"searchtext\": {\"query\": \"{{P_Keyword1}}\"," + - "\"slop\": 0}}}}"), XContentType.JSON)); + .setContent(new BytesArray(query.replace("{{slop}}", Integer.toString(0))), XContentType.JSON) + ); SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()) .setRequest(new SearchRequest("testindex").types("test")) @@ -313,16 +313,30 @@ public void testIndexedTemplateOverwrite() throws Exception { .get(); assertHitCount(searchResponse.getResponse(), 1); } - assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element."); } public void testIndexedTemplateWithArray() throws Exception { - String multiQuery = "{\"query\":{\"terms\":{\"theField\":[\"{{#fieldParam}}\",\"{{.}}\",\"{{/fieldParam}}\"]}}}"; + String multiQuery = + "{\n" + + " \"script\": {\n" + + " \"lang\": \"mustache\",\n" + + " \"source\": {\n" + + " \"query\": {\n" + + " \"terms\": {\n" + + " \"theField\": [\n" + + " \"{{#fieldParam}}\",\n" + + " \"{{.}}\",\n" + + " \"{{/fieldParam}}\"\n" + + " ]\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; assertAcked( client().admin().cluster().preparePutStoredScript() .setId("4") - .setContent(BytesReference.bytes(jsonBuilder().startObject().field("template", multiQuery).endObject()), - XContentType.JSON) + .setContent(new BytesArray(multiQuery), XContentType.JSON) ); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); @@ -342,7 +356,6 @@ public void testIndexedTemplateWithArray() throws Exception { .setScript("4").setScriptType(ScriptType.STORED).setScriptParams(arrayTemplateParams) .get(); assertHitCount(searchResponse.getResponse(), 5); - assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element."); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java index 4cf686b9c282c..742fb1a74eab4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java @@ -114,11 +114,7 @@ public void readFrom(StreamInput in) throws IOException { super.readFrom(in); if (in.readBoolean()) { - if (in.getVersion().onOrAfter(Version.V_5_3_0)) { - source = new StoredScriptSource(in); - } else { - source = new StoredScriptSource(in.readString()); - } + source = new StoredScriptSource(in); } else { source = null; } @@ -136,12 +132,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(false); } else { out.writeBoolean(true); - - if (out.getVersion().onOrAfter(Version.V_5_3_0)) { - source.writeTo(out); - } else { - out.writeString(source.getSource()); - } + source.writeTo(out); } if (out.getVersion().onOrAfter(Version.V_6_4_0)) { out.writeString(id); diff --git a/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java b/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java index 885d72bdec6f5..aabef751fc76f 100644 --- a/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java +++ b/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java @@ -19,15 +19,12 @@ package org.elasticsearch.script; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.Diff; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -69,16 +66,6 @@ public class StoredScriptSource extends AbstractDiffable imp */ public static final ParseField SCRIPT_PARSE_FIELD = new ParseField("script"); - /** - * Standard {@link ParseField} for outer level of stored script source. - */ - public static final ParseField TEMPLATE_PARSE_FIELD = new ParseField("template"); - - /** - * Standard {@link ParseField} for query on the inner field. - */ - public static final ParseField TEMPLATE_NO_WRAPPER_PARSE_FIELD = new ParseField("query"); - /** * Standard {@link ParseField} for lang on the inner level. */ @@ -194,26 +181,6 @@ private StoredScriptSource build(boolean ignoreEmpty) { PARSER.declareField(Builder::setOptions, XContentParser::mapStrings, OPTIONS_PARSE_FIELD, ValueType.OBJECT); } - private static StoredScriptSource parseRemaining(Token token, XContentParser parser) throws IOException { - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - if (token != Token.START_OBJECT) { - builder.startObject(); - builder.copyCurrentStructure(parser); - builder.endObject(); - } else { - builder.copyCurrentStructure(parser); - } - - String source = Strings.toString(builder); - - if (source == null || source.isEmpty()) { - DEPRECATION_LOGGER.deprecated("empty templates should no longer be used"); - } - - return new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, source, Collections.emptyMap()); - } - } - /** * This will parse XContent into a {@link StoredScriptSource}. The following formats can be parsed: * @@ -271,27 +238,8 @@ private static StoredScriptSource parseRemaining(Token token, XContentParser par * } * } * - * The simple template format: - * - * {@code - * { - * "query" : ... - * } - * } - * - * The complex template format: - * - * {@code - * { - * "template": { - * "query" : ... - * } - * } - * } - * - * Note that templates can be handled as both strings and complex JSON objects. - * Also templates may be part of the 'source' parameter in a script. The Parser - * can handle this case as well. + * Note that the "source" parameter can also handle template parsing including from + * a complex JSON object. * * @param content The content from the request to be parsed as described above. * @return The parsed {@link StoredScriptSource}. @@ -316,7 +264,7 @@ public static StoredScriptSource parse(BytesReference content, XContentType xCon if (token != Token.FIELD_NAME) { throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + ", expected [" + - SCRIPT_PARSE_FIELD.getPreferredName() + ", " + TEMPLATE_PARSE_FIELD.getPreferredName()); + SCRIPT_PARSE_FIELD.getPreferredName() + "]"); } String name = parser.currentName(); @@ -329,28 +277,9 @@ public static StoredScriptSource parse(BytesReference content, XContentType xCon } else { throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "], expected [{, ]"); } - } else if (TEMPLATE_PARSE_FIELD.getPreferredName().equals(name)) { - - DEPRECATION_LOGGER.deprecated("the template context is now deprecated. Specify templates in a \"script\" element."); - - token = parser.nextToken(); - if (token == Token.VALUE_STRING) { - String source = parser.text(); - - if (source == null || source.isEmpty()) { - DEPRECATION_LOGGER.deprecated("empty templates should no longer be used"); - } - - return new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, source, Collections.emptyMap()); - } else { - return parseRemaining(token, parser); - } - } else if (TEMPLATE_NO_WRAPPER_PARSE_FIELD.getPreferredName().equals(name)) { - DEPRECATION_LOGGER.deprecated("the template context is now deprecated. Specify templates in a \"script\" element."); - return parseRemaining(token, parser); } else { - DEPRECATION_LOGGER.deprecated("scripts should not be stored without a context. Specify them in a \"script\" element."); - return parseRemaining(token, parser); + throw new ParsingException(parser.getTokenLocation(), "unexpected field [" + name + "], expected [" + + SCRIPT_PARSE_FIELD.getPreferredName() + "]"); } } catch (IOException ioe) { throw new UncheckedIOException(ioe); @@ -397,16 +326,6 @@ public static Diff readDiffFrom(StreamInput in) throws IOExc private final String source; private final Map options; - /** - * Constructor for use with {@link GetStoredScriptResponse} - * to support the deprecated stored script namespace. - */ - public StoredScriptSource(String source) { - this.lang = null; - this.source = Objects.requireNonNull(source); - this.options = null; - } - /** * Standard StoredScriptSource constructor. * @param lang The language to compile the script with. Must not be {@code null}. @@ -426,35 +345,24 @@ public StoredScriptSource(String lang, String source, Map option * only the source parameter will be read in as a bytes reference. */ public StoredScriptSource(StreamInput in) throws IOException { - if (in.getVersion().onOrAfter(Version.V_5_3_0)) { - this.lang = in.readString(); - this.source = in.readString(); - @SuppressWarnings("unchecked") - Map options = (Map)(Map)in.readMap(); - this.options = options; - } else { - this.lang = null; - this.source = in.readBytesReference().utf8ToString(); - this.options = null; - } + this.lang = in.readString(); + this.source = in.readString(); + @SuppressWarnings("unchecked") + Map options = (Map)(Map)in.readMap(); + this.options = options; } /** - * Writes a {@link StoredScriptSource} to a stream. Version 5.3+ will write - * all of the lang, source, and options parameters. For versions prior to 5.3, - * only the source parameter will be read in as a bytes reference. + * Writes a {@link StoredScriptSource} to a stream. Will write + * all of the lang, source, and options parameters. */ @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(Version.V_5_3_0)) { - out.writeString(lang); - out.writeString(source); - @SuppressWarnings("unchecked") - Map options = (Map)(Map)this.options; - out.writeMap(options); - } else { - out.writeBytesReference(new BytesArray(source)); - } + out.writeString(lang); + out.writeString(source); + @SuppressWarnings("unchecked") + Map options = (Map)(Map)this.options; + out.writeMap(options); } /** diff --git a/server/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java b/server/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java index 6a17556a1035b..7a856ee13b9d3 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java @@ -77,14 +77,12 @@ public void testGetScript() throws Exception { ScriptMetaData.Builder builder = new ScriptMetaData.Builder(null); XContentBuilder sourceBuilder = XContentFactory.jsonBuilder(); - sourceBuilder.startObject().startObject("template").field("field", "value").endObject().endObject(); - builder.storeScript("template", StoredScriptSource.parse(BytesReference.bytes(sourceBuilder), sourceBuilder.contentType())); - assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element."); - - sourceBuilder = XContentFactory.jsonBuilder(); - sourceBuilder.startObject().field("template", "value").endObject(); - builder.storeScript("template_field", StoredScriptSource.parse(BytesReference.bytes(sourceBuilder), sourceBuilder.contentType())); - assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element."); + sourceBuilder.startObject().startObject("script") + .field("lang", "_lang") + .startObject("source").field("field", "value").endObject() + .endObject().endObject(); + builder.storeScript("source_template", StoredScriptSource.parse(BytesReference.bytes(sourceBuilder), + sourceBuilder.contentType())); sourceBuilder = XContentFactory.jsonBuilder(); sourceBuilder.startObject().startObject("script").field("lang", "_lang").field("source", "_source").endObject().endObject(); @@ -92,26 +90,25 @@ public void testGetScript() throws Exception { ScriptMetaData scriptMetaData = builder.build(); assertEquals("_source", scriptMetaData.getStoredScript("script").getSource()); - assertEquals("{\"field\":\"value\"}", scriptMetaData.getStoredScript("template").getSource()); - assertEquals("value", scriptMetaData.getStoredScript("template_field").getSource()); + assertEquals("{\"field\":\"value\"}", scriptMetaData.getStoredScript("source_template").getSource()); } public void testDiff() throws Exception { ScriptMetaData.Builder builder = new ScriptMetaData.Builder(null); - builder.storeScript("1", StoredScriptSource.parse(new BytesArray("{\"foo\":\"abc\"}"), XContentType.JSON)); - assertWarnings("scripts should not be stored without a context. Specify them in a \"script\" element."); - builder.storeScript("2", StoredScriptSource.parse(new BytesArray("{\"foo\":\"def\"}"), XContentType.JSON)); - assertWarnings("scripts should not be stored without a context. Specify them in a \"script\" element."); - builder.storeScript("3", StoredScriptSource.parse(new BytesArray("{\"foo\":\"ghi\"}"), XContentType.JSON)); - assertWarnings("scripts should not be stored without a context. Specify them in a \"script\" element."); + builder.storeScript("1", StoredScriptSource.parse( + new BytesArray("{\"script\":{\"lang\":\"mustache\",\"source\":{\"foo\":\"abc\"}}}"), XContentType.JSON)); + builder.storeScript("2", StoredScriptSource.parse( + new BytesArray("{\"script\":{\"lang\":\"mustache\",\"source\":{\"foo\":\"def\"}}}"), XContentType.JSON)); + builder.storeScript("3", StoredScriptSource.parse( + new BytesArray("{\"script\":{\"lang\":\"mustache\",\"source\":{\"foo\":\"ghi\"}}}"), XContentType.JSON)); ScriptMetaData scriptMetaData1 = builder.build(); builder = new ScriptMetaData.Builder(scriptMetaData1); - builder.storeScript("2", StoredScriptSource.parse(new BytesArray("{\"foo\":\"changed\"}"), XContentType.JSON)); - assertWarnings("scripts should not be stored without a context. Specify them in a \"script\" element."); + builder.storeScript("2", StoredScriptSource.parse( + new BytesArray("{\"script\":{\"lang\":\"mustache\",\"source\":{\"foo\":\"changed\"}}}"), XContentType.JSON)); builder.deleteScript("3"); - builder.storeScript("4", StoredScriptSource.parse(new BytesArray("{\"foo\":\"jkl\"}"), XContentType.JSON)); - assertWarnings("scripts should not be stored without a context. Specify them in a \"script\" element."); + builder.storeScript("4", StoredScriptSource.parse( + new BytesArray("{\"script\":{\"lang\":\"mustache\",\"source\":{\"foo\":\"jkl\"}}}"), XContentType.JSON)); ScriptMetaData scriptMetaData2 = builder.build(); ScriptMetaData.ScriptMetadataDiff diff = (ScriptMetaData.ScriptMetadataDiff) scriptMetaData2.diff(scriptMetaData1); diff --git a/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java b/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java index 49e2623626895..79d5c67bc782e 100644 --- a/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java +++ b/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java @@ -40,19 +40,21 @@ protected StoredScriptSource createTestInstance() { try { XContentBuilder template = XContentBuilder.builder(xContentType.xContent()); template.startObject(); - template.startObject("query"); - template.startObject("match"); - template.field("title", "{{query_string}}"); - template.endObject(); + template.startObject("script"); + { + template.field("lang", "mustache"); + template.startObject("source"); + template.startObject("query").startObject("match").field("title", "{{query_string}}").endObject(); + template.endObject(); + template.endObject(); + } template.endObject(); template.endObject(); Map options = new HashMap<>(); if (randomBoolean()) { options.put(Script.CONTENT_TYPE_OPTION, xContentType.mediaType()); } - StoredScriptSource source = StoredScriptSource.parse(BytesReference.bytes(template), xContentType); - assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element."); - return source; + return StoredScriptSource.parse(BytesReference.bytes(template), xContentType); } catch (IOException e) { throw new AssertionError("Failed to create test instance", e); } @@ -84,7 +86,7 @@ protected StoredScriptSource mutateInstance(StoredScriptSource instance) throws newTemplate.endObject(); newTemplate.endObject(); - switch (between(0, 3)) { + switch (between(0, 2)) { case 0: source = Strings.toString(newTemplate); break; @@ -92,12 +94,9 @@ protected StoredScriptSource mutateInstance(StoredScriptSource instance) throws lang = randomAlphaOfLengthBetween(1, 20); break; case 2: + default: options = new HashMap<>(options); options.put(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); - break; - case 3: - default: - return new StoredScriptSource(Strings.toString(newTemplate)); } return new StoredScriptSource(lang, source, options); } diff --git a/server/src/test/java/org/elasticsearch/script/StoredScriptTests.java b/server/src/test/java/org/elasticsearch/script/StoredScriptTests.java index 04483c869d9b3..627d67dc833e4 100644 --- a/server/src/test/java/org/elasticsearch/script/StoredScriptTests.java +++ b/server/src/test/java/org/elasticsearch/script/StoredScriptTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.script; import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable; @@ -66,49 +67,6 @@ public void testSourceParsing() throws Exception { assertThat(parsed, equalTo(source)); } - // simple template value string - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { - builder.startObject().field("template", "code").endObject(); - - StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); - StoredScriptSource source = new StoredScriptSource("mustache", "code", Collections.emptyMap()); - - assertThat(parsed, equalTo(source)); - assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element."); - } - - // complex template with wrapper template object - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { - builder.startObject().field("template").startObject().field("query", "code").endObject().endObject(); - String code; - - try (XContentBuilder cb = XContentFactory.contentBuilder(builder.contentType())) { - code = Strings.toString(cb.startObject().field("query", "code").endObject()); - } - - StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); - StoredScriptSource source = new StoredScriptSource("mustache", code, Collections.emptyMap()); - - assertThat(parsed, equalTo(source)); - assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element."); - } - - // complex template with no wrapper object - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { - builder.startObject().field("query", "code").endObject(); - String code; - - try (XContentBuilder cb = XContentFactory.contentBuilder(builder.contentType())) { - code = Strings.toString(cb.startObject().field("query", "code").endObject()); - } - - StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); - StoredScriptSource source = new StoredScriptSource("mustache", code, Collections.emptyMap()); - - assertThat(parsed, equalTo(source)); - assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element."); - } - // complex template using script as the field name try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { builder.startObject().startObject("script").field("lang", "mustache") @@ -206,6 +164,15 @@ public void testSourceParsingErrors() throws Exception { StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON)); assertThat(iae.getMessage(), equalTo("illegal compiler options [{option=option}] specified")); } + + // check for unsupported template context + try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { + builder.startObject().field("template", "code").endObject(); + ParsingException pEx = expectThrows(ParsingException.class, () -> + StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON)); + assertThat(pEx.getMessage(), equalTo("unexpected field [template], expected ["+ + StoredScriptSource.SCRIPT_PARSE_FIELD.getPreferredName()+ "]")); + } } public void testEmptyTemplateDeprecations() throws IOException { @@ -219,19 +186,6 @@ public void testEmptyTemplateDeprecations() throws IOException { assertWarnings("empty templates should no longer be used"); } - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { - builder.startObject().field("template", "").endObject(); - - StoredScriptSource parsed = StoredScriptSource.parse(BytesReference.bytes(builder), XContentType.JSON); - StoredScriptSource source = new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, "", Collections.emptyMap()); - - assertThat(parsed, equalTo(source)); - assertWarnings( - "the template context is now deprecated. Specify templates in a \"script\" element.", - "empty templates should no longer be used" - ); - } - try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { builder.startObject().field("script").startObject().field("lang", "mustache") .field("source", "").endObject().endObject(); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java index afcbd2499033e..1c1d8dde8cfdb 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java @@ -220,7 +220,11 @@ public void testConditionSearchWithIndexedTemplate() throws Exception { SearchSourceBuilder searchSourceBuilder = searchSource().query(matchQuery("level", "a")); assertAcked(client().admin().cluster().preparePutStoredScript() .setId("my-template") - .setContent(BytesReference.bytes(jsonBuilder().startObject().field("template").value(searchSourceBuilder).endObject()), + .setContent(BytesReference.bytes( + jsonBuilder().startObject().startObject("script") + .field("lang", "mustache") + .field("source").value(searchSourceBuilder) + .endObject().endObject()), XContentType.JSON) .get()); From ca5822eae190c5a0cbe208f2e5d4f3d9d8a6a133 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Thu, 5 Jul 2018 09:36:40 -0400 Subject: [PATCH 14/19] Add unreleased version 6.3.2 --- server/src/main/java/org/elasticsearch/Version.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 911614e00134d..f22408ba2e504 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -172,6 +172,8 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_6_3_0 = new Version(V_6_3_0_ID, org.apache.lucene.util.Version.LUCENE_7_3_1); public static final int V_6_3_1_ID = 6030199; public static final Version V_6_3_1 = new Version(V_6_3_1_ID, org.apache.lucene.util.Version.LUCENE_7_3_1); + public static final int V_6_3_2_ID = 6030299; + public static final Version V_6_3_2 = new Version(V_6_3_2_ID, org.apache.lucene.util.Version.LUCENE_7_3_1); public static final int V_6_4_0_ID = 6040099; public static final Version V_6_4_0 = new Version(V_6_4_0_ID, org.apache.lucene.util.Version.LUCENE_7_4_0); public static final int V_7_0_0_alpha1_ID = 7000001; @@ -194,6 +196,8 @@ public static Version fromId(int id) { return V_7_0_0_alpha1; case V_6_4_0_ID: return V_6_4_0; + case V_6_3_2_ID: + return V_6_3_2; case V_6_3_1_ID: return V_6_3_1; case V_6_3_0_ID: From bd1c513422d63d8f7df1fac6743ba7fd7133fa9e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 5 Jul 2018 15:38:06 +0200 Subject: [PATCH 15/19] Reduce more raw types warnings (#31780) Similar to #31523. --- .../ingest/common/ForEachProcessor.java | 2 +- .../ingest/common/RemoveProcessor.java | 4 +- .../ingest/common/SortProcessor.java | 5 +- .../ingest/common/ForEachProcessorTests.java | 32 ++++++----- .../ingest/common/IngestRestartIT.java | 2 +- .../mustache/CustomMustacheFactory.java | 9 ++- .../PercolatorQuerySearchTests.java | 2 +- .../uhighlight/CustomUnifiedHighlighter.java | 2 +- .../elasticsearch/action/DocWriteRequest.java | 7 ++- .../ClusterUpdateSettingsRequest.java | 6 +- .../ClusterUpdateSettingsRequestBuilder.java | 4 +- .../indices/create/CreateIndexRequest.java | 11 ++-- .../create/CreateIndexRequestBuilder.java | 2 +- .../flush/TransportShardFlushAction.java | 5 +- .../mapping/get/GetMappingsResponse.java | 9 ++- .../mapping/put/PutMappingRequest.java | 3 +- .../admin/indices/rollover/Condition.java | 4 +- .../admin/indices/rollover/RolloverInfo.java | 12 ++-- .../indices/rollover/RolloverRequest.java | 12 ++-- .../rollover/TransportRolloverAction.java | 6 +- .../settings/put/UpdateSettingsRequest.java | 3 +- .../template/put/PutIndexTemplateRequest.java | 7 +-- .../put/PutIndexTemplateRequestBuilder.java | 4 +- .../action/bulk/BulkShardRequest.java | 2 +- .../action/bulk/TransportShardBulkAction.java | 9 +-- .../TransportSingleItemBulkWriteAction.java | 8 +-- .../action/index/IndexRequest.java | 4 +- .../action/search/SearchRequestBuilder.java | 4 +- .../broadcast/BroadcastShardRequest.java | 2 +- .../broadcast/TransportBroadcastAction.java | 1 - .../replication/ReplicationRequest.java | 1 - .../action/update/UpdateHelper.java | 5 +- .../action/update/UpdateRequest.java | 8 +-- .../action/update/UpdateRequestBuilder.java | 8 +-- .../routing/DelayedAllocationService.java | 2 +- .../common/geo/GeoShapeType.java | 22 ++++---- .../geo/builders/MultiPointBuilder.java | 3 +- .../inject/spi/DefaultElementVisitor.java | 2 +- .../common/inject/spi/Dependency.java | 2 +- .../common/logging/DeprecationLogger.java | 4 +- .../plain/BytesBinaryDVAtomicFieldData.java | 2 +- .../put/PutRepositoryRequestTests.java | 5 +- .../admin/indices/rollover/RolloverIT.java | 2 +- .../rollover/RolloverRequestTests.java | 8 +-- .../TransportRolloverActionTests.java | 10 ++-- .../IndicesShardStoreResponseTests.java | 16 ++++-- .../action/bulk/BulkRequestModifierTests.java | 14 ++--- .../bulk/TransportShardBulkActionTests.java | 56 +++++++++---------- .../action/search/SearchAsyncActionTests.java | 6 +- .../search/SearchScrollAsyncActionTests.java | 8 +-- .../action/support/IndicesOptionsTests.java | 8 +-- .../BroadcastReplicationTests.java | 4 +- .../action/update/UpdateRequestTests.java | 9 +-- .../cluster/ClusterModuleTests.java | 3 +- .../common/geo/GeoJsonShapeParserTests.java | 18 +++--- .../common/geo/GeoWKTShapeParserTests.java | 22 ++++---- .../GeometryCollectionBuilderTests.java | 4 +- .../support/XContentMapValuesTests.java | 37 ++++++------ .../gateway/MetaDataWriteDataNodesIT.java | 11 ++-- .../index/fielddata/GeoFieldDataTests.java | 9 +-- .../mapper/MultiFieldsIntegrationIT.java | 28 ++++++---- .../query/GeoPolygonQueryBuilderTests.java | 4 +- .../query/GeoShapeQueryBuilderTests.java | 17 +++--- .../index/query/RewriteableTests.java | 4 +- .../common/inject/ModuleTestCase.java | 46 +++++++-------- .../ml/datafeed/DatafeedJobBuilderTests.java | 14 ++--- 66 files changed, 304 insertions(+), 301 deletions(-) diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java index 1c64fdb7408ef..f5bf9cc959105 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java @@ -63,7 +63,7 @@ boolean isIgnoreMissing() { @Override public void execute(IngestDocument ingestDocument) throws Exception { - List values = ingestDocument.getFieldValue(field, List.class, ignoreMissing); + List values = ingestDocument.getFieldValue(field, List.class, ignoreMissing); if (values == null) { if (ignoreMissing) { return; diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java index 32d213694b1b5..3425bb8abe236 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java @@ -73,7 +73,9 @@ public RemoveProcessor create(Map registry, String pr final List fields = new ArrayList<>(); final Object field = ConfigurationUtils.readObject(TYPE, processorTag, config, "field"); if (field instanceof List) { - fields.addAll((List) field); + @SuppressWarnings("unchecked") + List stringList = (List) field; + fields.addAll(stringList); } else { fields.add((String) field); } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SortProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SortProcessor.java index 28e568233ebf5..7ff266efe6b91 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SortProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SortProcessor.java @@ -49,6 +49,7 @@ public enum SortOrder { this.direction = direction; } + @Override public String toString() { return this.direction; } @@ -94,13 +95,13 @@ String getTargetField() { @Override @SuppressWarnings("unchecked") public void execute(IngestDocument document) { - List list = document.getFieldValue(field, List.class); + List> list = document.getFieldValue(field, List.class); if (list == null) { throw new IllegalArgumentException("field [" + field + "] is null, cannot sort."); } - List copy = new ArrayList<>(list); + List> copy = new ArrayList<>(list); if (order.equals(SortOrder.ASCENDING)) { Collections.sort(copy); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java index 1491bd481bd07..ffc5bcd4ac930 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java @@ -19,13 +19,6 @@ package org.elasticsearch.ingest.common; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; import org.elasticsearch.ingest.CompoundProcessor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; @@ -34,6 +27,14 @@ import org.elasticsearch.script.TemplateScript; import org.elasticsearch.test.ESTestCase; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; + import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.hamcrest.Matchers.equalTo; @@ -54,7 +55,8 @@ public void testExecute() throws Exception { ); processor.execute(ingestDocument); - List result = ingestDocument.getFieldValue("values", List.class); + @SuppressWarnings("unchecked") + List result = ingestDocument.getFieldValue("values", List.class); assertThat(result.get(0), equalTo("FOO")); assertThat(result.get(1), equalTo("BAR")); assertThat(result.get(2), equalTo("BAZ")); @@ -204,12 +206,12 @@ public void testModifyFieldsOutsideArray() throws Exception { ), false); processor.execute(ingestDocument); - List result = ingestDocument.getFieldValue("values", List.class); + List result = ingestDocument.getFieldValue("values", List.class); assertThat(result.get(0), equalTo("STRING")); assertThat(result.get(1), equalTo(1)); assertThat(result.get(2), equalTo(null)); - List errors = ingestDocument.getFieldValue("errors", List.class); + List errors = ingestDocument.getFieldValue("errors", List.class); assertThat(errors.size(), equalTo(2)); } @@ -230,7 +232,7 @@ public void testScalarValueAllowsUnderscoreValueFieldToRemainAccessible() throws ForEachProcessor forEachProcessor = new ForEachProcessor("_tag", "values", processor, false); forEachProcessor.execute(ingestDocument); - List result = ingestDocument.getFieldValue("values", List.class); + List result = ingestDocument.getFieldValue("values", List.class); assertThat(result.get(0), equalTo("new_value")); assertThat(result.get(1), equalTo("new_value")); assertThat(result.get(2), equalTo("new_value")); @@ -263,13 +265,13 @@ public void testNestedForEach() throws Exception { "_tag", "values1", new ForEachProcessor("_tag", "_ingest._value.values2", testProcessor, false), false); processor.execute(ingestDocument); - List result = ingestDocument.getFieldValue("values1.0.values2", List.class); + List result = ingestDocument.getFieldValue("values1.0.values2", List.class); assertThat(result.get(0), equalTo("ABC")); assertThat(result.get(1), equalTo("DEF")); - result = ingestDocument.getFieldValue("values1.1.values2", List.class); - assertThat(result.get(0), equalTo("GHI")); - assertThat(result.get(1), equalTo("JKL")); + List result2 = ingestDocument.getFieldValue("values1.1.values2", List.class); + assertThat(result2.get(0), equalTo("GHI")); + assertThat(result2.get(1), equalTo("JKL")); } public void testIgnoreMissing() throws Exception { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java index 9658637f16444..69236144007bc 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java @@ -60,7 +60,7 @@ public static class CustomScriptPlugin extends MockScriptPlugin { protected Map, Object>> pluginScripts() { return Collections.singletonMap("my_script", script -> { @SuppressWarnings("unchecked") - Map ctx = (Map) script.get("ctx"); + Map ctx = (Map) script.get("ctx"); ctx.put("z", 0); return null; }); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java index 008613311f421..04835bed116d4 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java @@ -30,6 +30,7 @@ import com.github.mustachejava.codes.DefaultMustache; import com.github.mustachejava.codes.IterableCode; import com.github.mustachejava.codes.WriteCode; + import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; @@ -202,11 +203,9 @@ protected Function createFunction(Object resolved) { return null; } try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { - if (resolved == null) { - builder.nullValue(); - } else if (resolved instanceof Iterable) { + if (resolved instanceof Iterable) { builder.startArray(); - for (Object o : (Iterable) resolved) { + for (Object o : (Iterable) resolved) { builder.value(o); } builder.endArray(); @@ -254,7 +253,7 @@ protected Function createFunction(Object resolved) { return null; } else if (resolved instanceof Iterable) { StringJoiner joiner = new StringJoiner(delimiter); - for (Object o : (Iterable) resolved) { + for (Object o : (Iterable) resolved) { joiner.add(oh.stringify(o)); } return joiner.toString(); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java index 0650461e1a9d9..b7693f514393b 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java @@ -65,7 +65,7 @@ protected Map, Object>> pluginScripts() { scripts.put("1==1", vars -> Boolean.TRUE); scripts.put("use_fielddata_please", vars -> { LeafDocLookup leafDocLookup = (LeafDocLookup) vars.get("_doc"); - ScriptDocValues scriptDocValues = leafDocLookup.get("employees.name"); + ScriptDocValues scriptDocValues = leafDocLookup.get("employees.name"); return "virginia_potts".equals(scriptDocValues.get(0)); }); return scripts; diff --git a/server/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java b/server/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java index 2c8169c3ac41f..45ee7becc983e 100644 --- a/server/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java +++ b/server/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java @@ -173,7 +173,7 @@ private Collection rewriteCustomQuery(Query query) { SpanQuery[] innerQueries = new SpanQuery[terms[i].length]; for (int j = 0; j < terms[i].length; j++) { if (i == sizeMinus1) { - innerQueries[j] = new SpanMultiTermQueryWrapper(new PrefixQuery(terms[i][j])); + innerQueries[j] = new SpanMultiTermQueryWrapper(new PrefixQuery(terms[i][j])); } else { innerQueries[j] = new SpanTermQuery(terms[i][j]); } diff --git a/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java b/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java index fa0796cafe184..b0d553534e44d 100644 --- a/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java +++ b/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java @@ -57,6 +57,7 @@ public interface DocWriteRequest extends IndicesRequest { * Get the options for this request * @return the indices options */ + @Override IndicesOptions indicesOptions(); /** @@ -157,9 +158,9 @@ public static OpType fromString(String sOpType) { } /** read a document write (index/delete/update) request */ - static DocWriteRequest readDocumentRequest(StreamInput in) throws IOException { + static DocWriteRequest readDocumentRequest(StreamInput in) throws IOException { byte type = in.readByte(); - DocWriteRequest docWriteRequest; + DocWriteRequest docWriteRequest; if (type == 0) { IndexRequest indexRequest = new IndexRequest(); indexRequest.readFrom(in); @@ -179,7 +180,7 @@ static DocWriteRequest readDocumentRequest(StreamInput in) throws IOException { } /** write a document write (index/delete/update) request*/ - static void writeDocumentRequest(StreamOutput out, DocWriteRequest request) throws IOException { + static void writeDocumentRequest(StreamOutput out, DocWriteRequest request) throws IOException { if (request instanceof IndexRequest) { out.writeByte((byte) 0); ((IndexRequest) request).writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java index f13c30c53503b..7f74ae668e1bd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java @@ -108,8 +108,7 @@ public ClusterUpdateSettingsRequest transientSettings(String source, XContentTyp /** * Sets the transient settings to be updated. They will not survive a full cluster restart */ - @SuppressWarnings({"unchecked", "rawtypes"}) - public ClusterUpdateSettingsRequest transientSettings(Map source) { + public ClusterUpdateSettingsRequest transientSettings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); @@ -147,8 +146,7 @@ public ClusterUpdateSettingsRequest persistentSettings(String source, XContentTy /** * Sets the persistent settings to be updated. They will get applied cross restarts */ - @SuppressWarnings({"unchecked", "rawtypes"}) - public ClusterUpdateSettingsRequest persistentSettings(Map source) { + public ClusterUpdateSettingsRequest persistentSettings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestBuilder.java index 6d58c989a8f32..46ee53aaf97ab 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestBuilder.java @@ -62,7 +62,7 @@ public ClusterUpdateSettingsRequestBuilder setTransientSettings(String settings, /** * Sets the transient settings to be updated. They will not survive a full cluster restart */ - public ClusterUpdateSettingsRequestBuilder setTransientSettings(Map settings) { + public ClusterUpdateSettingsRequestBuilder setTransientSettings(Map settings) { request.transientSettings(settings); return this; } @@ -94,7 +94,7 @@ public ClusterUpdateSettingsRequestBuilder setPersistentSettings(String settings /** * Sets the persistent settings to be updated. They will get applied cross restarts */ - public ClusterUpdateSettingsRequestBuilder setPersistentSettings(Map settings) { + public ClusterUpdateSettingsRequestBuilder setPersistentSettings(Map settings) { request.persistentSettings(settings); return this; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index 5f5ba0e24baef..875d17eb54bc8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -58,9 +58,9 @@ import java.util.Set; import static org.elasticsearch.action.ValidateActions.addValidationError; -import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; +import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; /** * A request to create an index. Best created with {@link org.elasticsearch.client.Requests#createIndexRequest(String)}. @@ -189,8 +189,7 @@ public CreateIndexRequest settings(XContentBuilder builder) { /** * The settings to create the index with (either json/yaml/properties format) */ - @SuppressWarnings("unchecked") - public CreateIndexRequest settings(Map source) { + public CreateIndexRequest settings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); @@ -256,8 +255,7 @@ public CreateIndexRequest mapping(String type, XContentBuilder source) { * @param type The mapping type * @param source The mapping source */ - @SuppressWarnings("unchecked") - public CreateIndexRequest mapping(String type, Map source) { + public CreateIndexRequest mapping(String type, Map source) { if (mappings.containsKey(type)) { throw new IllegalStateException("mappings for type \"" + type + "\" were already defined"); } @@ -286,8 +284,7 @@ public CreateIndexRequest mapping(String type, Object... source) { /** * Sets the aliases that will be associated with the index when it gets created */ - @SuppressWarnings("unchecked") - public CreateIndexRequest aliases(Map source) { + public CreateIndexRequest aliases(Map source) { try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.map(source); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java index bc5bbf9046a88..cc8fb2c32c375 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestBuilder.java @@ -147,7 +147,7 @@ public CreateIndexRequestBuilder addMapping(String type, Object... source) { /** * Sets the aliases that will be associated with the index when it gets created */ - public CreateIndexRequestBuilder setAliases(Map source) { + public CreateIndexRequestBuilder setAliases(Map source) { request.aliases(source); return this; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java index c0dc528588fc6..ed1819a1d2480 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportShardFlushAction.java @@ -50,10 +50,11 @@ protected ReplicationResponse newResponseInstance() { } @Override - protected PrimaryResult shardOperationOnPrimary(ShardFlushRequest shardRequest, IndexShard primary) { + protected PrimaryResult shardOperationOnPrimary(ShardFlushRequest shardRequest, + IndexShard primary) { primary.flush(shardRequest.getRequest()); logger.trace("{} flush request executed on primary", primary.shardId()); - return new PrimaryResult(shardRequest, new ReplicationResponse()); + return new PrimaryResult(shardRequest, new ReplicationResponse()); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponse.java index 2bf52151d4b14..ad864c94e3693 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponse.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.indices.mapping.get; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.ParseField; @@ -27,7 +28,6 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -39,9 +39,6 @@ public class GetMappingsResponse extends ActionResponse implements ToXContentFra private static final ParseField MAPPINGS = new ParseField("mappings"); - private static final ObjectParser PARSER = - new ObjectParser("get-mappings", false, GetMappingsResponse::new); - private ImmutableOpenMap> mappings = ImmutableOpenMap.of(); GetMappingsResponse(ImmutableOpenMap> mappings) { @@ -101,13 +98,15 @@ public static GetMappingsResponse fromXContent(XContentParser parser) throws IOE for (Map.Entry entry : parts.entrySet()) { final String indexName = entry.getKey(); assert entry.getValue() instanceof Map : "expected a map as type mapping, but got: " + entry.getValue().getClass(); - final Map mapping = (Map) ((Map) entry.getValue()).get(MAPPINGS.getPreferredName()); + @SuppressWarnings("unchecked") + final Map mapping = (Map) ((Map) entry.getValue()).get(MAPPINGS.getPreferredName()); ImmutableOpenMap.Builder typeBuilder = new ImmutableOpenMap.Builder<>(); for (Map.Entry typeEntry : mapping.entrySet()) { final String typeName = typeEntry.getKey(); assert typeEntry.getValue() instanceof Map : "expected a map as inner type mapping, but got: " + typeEntry.getValue().getClass(); + @SuppressWarnings("unchecked") final Map fieldMappings = (Map) typeEntry.getValue(); MappingMetaData mmd = new MappingMetaData(typeName, fieldMappings); typeBuilder.put(typeName, mmd); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java index ec825a2a5ed96..dc201b38c3bee 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -256,8 +256,7 @@ public PutMappingRequest source(XContentBuilder mappingBuilder) { /** * The mapping source definition. */ - @SuppressWarnings("unchecked") - public PutMappingRequest source(Map mappingSource) { + public PutMappingRequest source(Map mappingSource) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(mappingSource); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/Condition.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/Condition.java index afbc9a554ed5e..6efebde18f577 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/Condition.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/Condition.java @@ -90,10 +90,10 @@ public Stats(long numDocs, long indexCreated, ByteSizeValue indexSize) { * Holder for evaluated condition result */ public static class Result { - public final Condition condition; + public final Condition condition; public final boolean matched; - protected Result(Condition condition, boolean matched) { + protected Result(Condition condition, boolean matched) { this.condition = condition; this.matched = matched; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverInfo.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverInfo.java index 291dd3a0ddae7..af593481e8a6d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverInfo.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverInfo.java @@ -45,7 +45,7 @@ public class RolloverInfo extends AbstractDiffable implements Writ @SuppressWarnings("unchecked") public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>("rollover_info", false, - (a, alias) -> new RolloverInfo(alias, (List) a[0], (Long) a[1])); + (a, alias) -> new RolloverInfo(alias, (List>) a[0], (Long) a[1])); static { PARSER.declareNamedObjects(ConstructingObjectParser.constructorArg(), (p, c, n) -> p.namedObject(Condition.class, n, c), CONDITION_FIELD); @@ -53,10 +53,10 @@ public class RolloverInfo extends AbstractDiffable implements Writ } private final String alias; - private final List metConditions; + private final List> metConditions; private final long time; - public RolloverInfo(String alias, List metConditions, long time) { + public RolloverInfo(String alias, List> metConditions, long time) { this.alias = alias; this.metConditions = metConditions; this.time = time; @@ -65,7 +65,7 @@ public RolloverInfo(String alias, List metConditions, long time) { public RolloverInfo(StreamInput in) throws IOException { this.alias = in.readString(); this.time = in.readVLong(); - this.metConditions = in.readNamedWriteableList(Condition.class); + this.metConditions = (List) in.readNamedWriteableList(Condition.class); } public static RolloverInfo parse(XContentParser parser, String alias) { @@ -76,7 +76,7 @@ public String getAlias() { return alias; } - public List getMetConditions() { + public List> getMetConditions() { return metConditions; } @@ -99,7 +99,7 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(alias); builder.startObject(CONDITION_FIELD.getPreferredName()); - for (Condition condition : metConditions) { + for (Condition condition : metConditions) { condition.toXContent(builder, params); } builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java index fe5ad65c4799b..48c9d46066034 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java @@ -45,7 +45,7 @@ public class RolloverRequest extends AcknowledgedRequest implements IndicesRequest, ToXContentObject { private static final ObjectParser PARSER = new ObjectParser<>("rollover"); - private static final ObjectParser, Void> CONDITION_PARSER = new ObjectParser<>("conditions"); + private static final ObjectParser>, Void> CONDITION_PARSER = new ObjectParser<>("conditions"); private static final ParseField CONDITIONS = new ParseField("conditions"); private static final ParseField MAX_AGE_CONDITION = new ParseField(MaxAgeCondition.NAME); @@ -78,7 +78,7 @@ public class RolloverRequest extends AcknowledgedRequest implem private String alias; private String newIndexName; private boolean dryRun; - private Map conditions = new HashMap<>(2); + private Map> conditions = new HashMap<>(2); //the index name "_na_" is never read back, what matters are settings, mappings and aliases private CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_"); @@ -106,7 +106,7 @@ public void readFrom(StreamInput in) throws IOException { dryRun = in.readBoolean(); int size = in.readVInt(); for (int i = 0; i < size; i++) { - Condition condition = in.readNamedWriteable(Condition.class); + Condition condition = in.readNamedWriteable(Condition.class); this.conditions.put(condition.name, condition); } createIndexRequest = new CreateIndexRequest(); @@ -120,7 +120,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(newIndexName); out.writeBoolean(dryRun); out.writeVInt(conditions.size()); - for (Condition condition : conditions.values()) { + for (Condition condition : conditions.values()) { if (condition.includedInVersion(out.getVersion())) { out.writeNamedWriteable(condition); } @@ -196,7 +196,7 @@ public boolean isDryRun() { return dryRun; } - Map getConditions() { + Map> getConditions() { return conditions; } @@ -221,7 +221,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws createIndexRequest.innerToXContent(builder, params); builder.startObject(CONDITIONS.getPreferredName()); - for (Condition condition : conditions.values()) { + for (Condition condition : conditions.values()) { condition.toXContent(builder, params); } builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index c7780d41fabd4..3fa046263afc7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -132,7 +132,7 @@ public void onResponse(IndicesStatsResponse statsResponse) { new RolloverResponse(sourceIndexName, rolloverIndexName, conditionResults, true, false, false, false)); return; } - List metConditions = rolloverRequest.getConditions().values().stream() + List> metConditions = rolloverRequest.getConditions().values().stream() .filter(condition -> conditionResults.get(condition.toString())).collect(Collectors.toList()); if (conditionResults.size() == 0 || metConditions.size() > 0) { CreateIndexClusterStateUpdateRequest updateRequest = prepareCreateIndexRequest(unresolvedName, rolloverIndexName, @@ -221,7 +221,7 @@ static String generateRolloverIndexName(String sourceIndexName, IndexNameExpress } } - static Map evaluateConditions(final Collection conditions, + static Map evaluateConditions(final Collection> conditions, final DocsStats docsStats, final IndexMetaData metaData) { final long numDocs = docsStats == null ? 0 : docsStats.getCount(); final long indexSize = docsStats == null ? 0 : docsStats.getTotalSizeInBytes(); @@ -231,7 +231,7 @@ static Map evaluateConditions(final Collection condi .collect(Collectors.toMap(result -> result.condition.toString(), result -> result.matched)); } - static Map evaluateConditions(final Collection conditions, final IndexMetaData metaData, + static Map evaluateConditions(final Collection> conditions, final IndexMetaData metaData, final IndicesStatsResponse statsResponse) { return evaluateConditions(conditions, statsResponse.getPrimaries().getDocs(), metaData); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java index 18c7d506c7275..b229e2c9e6a23 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java @@ -155,8 +155,7 @@ public UpdateSettingsRequest setPreserveExisting(boolean preserveExisting) { /** * Sets the settings to be updated (either json or yaml format) */ - @SuppressWarnings("unchecked") - public UpdateSettingsRequest settings(Map source) { + public UpdateSettingsRequest settings(Map source) { try { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map(source); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 5afba8f66aed3..d194b9acd1b7f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -61,9 +61,9 @@ import java.util.stream.Collectors; import static org.elasticsearch.action.ValidateActions.addValidationError; -import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; +import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; /** * A request to create an index template. @@ -309,7 +309,7 @@ public PutIndexTemplateRequest source(XContentBuilder templateBuilder) { * The template source definition. */ @SuppressWarnings("unchecked") - public PutIndexTemplateRequest source(Map templateSource) { + public PutIndexTemplateRequest source(Map templateSource) { Map source = templateSource; for (Map.Entry entry : source.entrySet()) { String name = entry.getKey(); @@ -411,8 +411,7 @@ public Set aliases() { /** * Sets the aliases that will be associated with the index when it gets created */ - @SuppressWarnings("unchecked") - public PutIndexTemplateRequest aliases(Map source) { + public PutIndexTemplateRequest aliases(Map source) { try { XContentBuilder builder = XContentFactory.jsonBuilder(); builder.map(source); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java index 5a9f359554bbf..3c14b1ab18f99 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestBuilder.java @@ -140,7 +140,7 @@ public PutIndexTemplateRequestBuilder addMapping(String type, Object... source) /** * Sets the aliases that will be associated with the index when it gets created */ - public PutIndexTemplateRequestBuilder setAliases(Map source) { + public PutIndexTemplateRequestBuilder setAliases(Map source) { request.aliases(source); return this; } @@ -221,7 +221,7 @@ public PutIndexTemplateRequestBuilder setSource(XContentBuilder templateBuilder) /** * The template source definition. */ - public PutIndexTemplateRequestBuilder setSource(Map templateSource) { + public PutIndexTemplateRequestBuilder setSource(Map templateSource) { request.source(templateSource); return this; } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java index 8e2dde7db6370..efb08a01e43ab 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java @@ -117,7 +117,7 @@ public void onRetry() { if (item.request() instanceof ReplicationRequest) { // all replication requests need to be notified here as well to ie. make sure that internal optimizations are // disabled see IndexRequest#canHaveDuplicates() - ((ReplicationRequest) item.request()).onRetry(); + ((ReplicationRequest) item.request()).onRetry(); } } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 7fc58b667c579..a78421a2328cb 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -205,7 +205,7 @@ static BulkItemResponse createPrimaryResponse(BulkItemResultHolder bulkItemResul return primaryResponse; } else if (operationResult.getResultType() == Engine.Result.Type.FAILURE) { - DocWriteRequest docWriteRequest = replicaRequest.request(); + DocWriteRequest docWriteRequest = replicaRequest.request(); Exception failure = operationResult.getFailure(); if (isConflictException(failure)) { logger.trace(() -> new ParameterizedMessage("{} failed to execute bulk item ({}) {}", @@ -240,7 +240,7 @@ static Translog.Location executeBulkItemRequest(IndexMetaData metaData, IndexSha int requestIndex, UpdateHelper updateHelper, LongSupplier nowInMillisSupplier, final MappingUpdatePerformer mappingUpdater) throws Exception { - final DocWriteRequest itemRequest = request.items()[requestIndex].request(); + final DocWriteRequest itemRequest = request.items()[requestIndex].request(); final DocWriteRequest.OpType opType = itemRequest.opType(); final BulkItemResultHolder responseHolder; switch (itemRequest.opType()) { @@ -486,7 +486,7 @@ public static Translog.Location performOnReplica(BulkShardRequest request, Index for (int i = 0; i < request.items().length; i++) { BulkItemRequest item = request.items()[i]; final Engine.Result operationResult; - DocWriteRequest docWriteRequest = item.request(); + DocWriteRequest docWriteRequest = item.request(); switch (replicaItemExecutionMode(item, i)) { case NORMAL: final DocWriteResponse primaryResponse = item.getPrimaryResponse().getResponse(); @@ -510,7 +510,7 @@ public static Translog.Location performOnReplica(BulkShardRequest request, Index return location; } - private static Engine.Result performOpOnReplica(DocWriteResponse primaryResponse, DocWriteRequest docWriteRequest, + private static Engine.Result performOpOnReplica(DocWriteResponse primaryResponse, DocWriteRequest docWriteRequest, IndexShard replica) throws Exception { final Engine.Result result; switch (docWriteRequest.opType()) { @@ -605,6 +605,7 @@ private static T executeOnPrimaryWhileHandlingMappingU class ConcreteMappingUpdatePerformer implements MappingUpdatePerformer { + @Override public void updateMappings(final Mapping update, final ShardId shardId, final String type) { assert update != null; assert shardId != null; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSingleItemBulkWriteAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSingleItemBulkWriteAction.java index ed17971a77c1d..892daae4bb275 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSingleItemBulkWriteAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSingleItemBulkWriteAction.java @@ -75,7 +75,7 @@ protected WritePrimaryResult shardOperationOnPrimary( BulkItemRequest[] itemRequests = new BulkItemRequest[1]; WriteRequest.RefreshPolicy refreshPolicy = request.getRefreshPolicy(); request.setRefreshPolicy(WriteRequest.RefreshPolicy.NONE); - itemRequests[0] = new BulkItemRequest(0, ((DocWriteRequest) request)); + itemRequests[0] = new BulkItemRequest(0, ((DocWriteRequest) request)); BulkShardRequest bulkShardRequest = new BulkShardRequest(request.shardId(), refreshPolicy, itemRequests); WritePrimaryResult bulkResult = shardBulkAction.shardOperationOnPrimary(bulkShardRequest, primary); @@ -98,7 +98,7 @@ protected WriteReplicaResult shardOperationOnReplica( Request replicaRequest, IndexShard replica) throws Exception { BulkItemRequest[] itemRequests = new BulkItemRequest[1]; WriteRequest.RefreshPolicy refreshPolicy = replicaRequest.getRefreshPolicy(); - itemRequests[0] = new BulkItemRequest(0, ((DocWriteRequest) replicaRequest)); + itemRequests[0] = new BulkItemRequest(0, ((DocWriteRequest) replicaRequest)); BulkShardRequest bulkShardRequest = new BulkShardRequest(replicaRequest.shardId(), refreshPolicy, itemRequests); WriteReplicaResult result = shardBulkAction.shardOperationOnReplica(bulkShardRequest, replica); // a replica operation can never throw a document-level failure, @@ -121,9 +121,9 @@ ActionListener wrapBulkResponse(ActionListener listener) }, listener::onFailure); } - public static BulkRequest toSingleItemBulkRequest(ReplicatedWriteRequest request) { + public static BulkRequest toSingleItemBulkRequest(ReplicatedWriteRequest request) { BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.add(((DocWriteRequest) request)); + bulkRequest.add(((DocWriteRequest) request)); bulkRequest.setRefreshPolicy(request.getRefreshPolicy()); bulkRequest.timeout(request.timeout()); bulkRequest.waitForActiveShards(request.waitForActiveShards()); diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index c3726d7641050..51997b32edf1d 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -282,7 +282,7 @@ public Map sourceAsMap() { * * @param source The map to index */ - public IndexRequest source(Map source) throws ElasticsearchGenerationException { + public IndexRequest source(Map source) throws ElasticsearchGenerationException { return source(source, Requests.INDEX_CONTENT_TYPE); } @@ -291,7 +291,7 @@ public IndexRequest source(Map source) throws ElasticsearchGenerationException { * * @param source The map to index */ - public IndexRequest source(Map source, XContentType contentType) throws ElasticsearchGenerationException { + public IndexRequest source(Map source, XContentType contentType) throws ElasticsearchGenerationException { try { XContentBuilder builder = XContentFactory.contentBuilder(contentType); builder.map(source); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 821a7d7be7f73..9389edeb345fc 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -26,11 +26,11 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.script.Script; -import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.rescore.RescorerBuilder; import org.elasticsearch.search.slice.SliceBuilder; @@ -341,7 +341,7 @@ public SearchRequestBuilder addSort(String field, SortOrder order) { * * @see org.elasticsearch.search.sort.SortBuilders */ - public SearchRequestBuilder addSort(SortBuilder sort) { + public SearchRequestBuilder addSort(SortBuilder sort) { sourceBuilder().sort(sort); return this; } diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java index 1012e8930bb02..51a77e19a0aa2 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastShardRequest.java @@ -38,7 +38,7 @@ public abstract class BroadcastShardRequest extends TransportRequest implements public BroadcastShardRequest() { } - protected BroadcastShardRequest(ShardId shardId, BroadcastRequest request) { + protected BroadcastShardRequest(ShardId shardId, BroadcastRequest> request) { this.shardId = shardId; this.originalIndices = new OriginalIndices(request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index 45a65a31390e6..3045f6ea43aa1 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -205,7 +205,6 @@ protected void onOperation(ShardRouting shard, int shardIndex, ShardResponse res } } - @SuppressWarnings({"unchecked"}) void onOperation(@Nullable ShardRouting shard, final ShardIterator shardIt, int shardIndex, Exception e) { // we set the shard failure always, even if its the first in the replication group, and the next one // will work (it will just override it...) diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java index c463ad76c856e..db043238feb3e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java @@ -150,7 +150,6 @@ public final Request waitForActiveShards(ActiveShardCount waitForActiveShards) { * shard count is passed in, instead of having to first call {@link ActiveShardCount#from(int)} * to get the ActiveShardCount. */ - @SuppressWarnings("unchecked") public final Request waitForActiveShards(final int waitForActiveShards) { return waitForActiveShards(ActiveShardCount.from(waitForActiveShards)); } diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 4c5accbb4ccb5..5212b1f35214c 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -77,7 +77,6 @@ public Result prepare(UpdateRequest request, IndexShard indexShard, LongSupplier * Prepares an update request by converting it into an index or delete request or an update response (no action, in the event of a * noop). */ - @SuppressWarnings("unchecked") protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult getResult, LongSupplier nowInMillis) { if (getResult.isExists() == false) { // If the document didn't exist, execute the update request as an upsert @@ -108,7 +107,8 @@ Tuple> executeScriptedUpsert(IndexRequest upse ctx = executeScript(script, ctx); UpdateOpType operation = UpdateOpType.lenientFromString((String) ctx.get(ContextFields.OP), logger, script.getIdOrCode()); - Map newSource = (Map) ctx.get(ContextFields.SOURCE); + @SuppressWarnings("unchecked") + Map newSource = (Map) ctx.get(ContextFields.SOURCE); if (operation != UpdateOpType.CREATE && operation != UpdateOpType.NONE) { // Only valid options for an upsert script are "create" (the default) or "none", meaning abort upsert @@ -248,6 +248,7 @@ Result prepareUpdateScriptRequest(ShardId shardId, UpdateRequest request, GetRes UpdateOpType operation = UpdateOpType.lenientFromString((String) ctx.get(ContextFields.OP), logger, request.script.getIdOrCode()); + @SuppressWarnings("unchecked") final Map updatedSourceAsMap = (Map) ctx.get(ContextFields.SOURCE); switch (operation) { diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index 3f74f7311c202..96816efe5322e 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -551,7 +551,7 @@ public UpdateRequest doc(XContentBuilder source) { /** * Sets the doc to use for updates when a script is not specified. */ - public UpdateRequest doc(Map source) { + public UpdateRequest doc(Map source) { safeDoc().source(source); return this; } @@ -559,7 +559,7 @@ public UpdateRequest doc(Map source) { /** * Sets the doc to use for updates when a script is not specified. */ - public UpdateRequest doc(Map source, XContentType contentType) { + public UpdateRequest doc(Map source, XContentType contentType) { safeDoc().source(source, contentType); return this; } @@ -637,7 +637,7 @@ public UpdateRequest upsert(XContentBuilder source) { /** * Sets the doc source of the update request to be used when the document does not exists. */ - public UpdateRequest upsert(Map source) { + public UpdateRequest upsert(Map source) { safeUpsertRequest().source(source); return this; } @@ -645,7 +645,7 @@ public UpdateRequest upsert(Map source) { /** * Sets the doc source of the update request to be used when the document does not exists. */ - public UpdateRequest upsert(Map source, XContentType contentType) { + public UpdateRequest upsert(Map source, XContentType contentType) { safeUpsertRequest().source(source, contentType); return this; } diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java index 74935adbbb283..9d1fd4a677f05 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java @@ -187,7 +187,7 @@ public UpdateRequestBuilder setDoc(XContentBuilder source) { /** * Sets the doc to use for updates when a script is not specified. */ - public UpdateRequestBuilder setDoc(Map source) { + public UpdateRequestBuilder setDoc(Map source) { request.doc(source); return this; } @@ -195,7 +195,7 @@ public UpdateRequestBuilder setDoc(Map source) { /** * Sets the doc to use for updates when a script is not specified. */ - public UpdateRequestBuilder setDoc(Map source, XContentType contentType) { + public UpdateRequestBuilder setDoc(Map source, XContentType contentType) { request.doc(source, contentType); return this; } @@ -262,7 +262,7 @@ public UpdateRequestBuilder setUpsert(XContentBuilder source) { /** * Sets the doc source of the update request to be used when the document does not exists. */ - public UpdateRequestBuilder setUpsert(Map source) { + public UpdateRequestBuilder setUpsert(Map source) { request.upsert(source); return this; } @@ -270,7 +270,7 @@ public UpdateRequestBuilder setUpsert(Map source) { /** * Sets the doc source of the update request to be used when the document does not exists. */ - public UpdateRequestBuilder setUpsert(Map source, XContentType contentType) { + public UpdateRequestBuilder setUpsert(Map source, XContentType contentType) { request.upsert(source, contentType); return this; } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/DelayedAllocationService.java b/server/src/main/java/org/elasticsearch/cluster/routing/DelayedAllocationService.java index fd7f8f6811fdf..b613a91abce99 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/DelayedAllocationService.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/DelayedAllocationService.java @@ -67,7 +67,7 @@ public class DelayedAllocationService extends AbstractLifecycleComponent impleme class DelayedRerouteTask extends ClusterStateUpdateTask { final TimeValue nextDelay; // delay until submitting the reroute command final long baseTimestampNanos; // timestamp (in nanos) upon which delay was calculated - volatile ScheduledFuture future; + volatile ScheduledFuture future; final AtomicBoolean cancelScheduling = new AtomicBoolean(); DelayedRerouteTask(TimeValue nextDelay, long baseTimestampNanos) { diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoShapeType.java b/server/src/main/java/org/elasticsearch/common/geo/GeoShapeType.java index ee480ffad7092..1b918f7241308 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeoShapeType.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeoShapeType.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.common.geo; -import org.locationtech.jts.geom.Coordinate; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.geo.builders.CircleBuilder; import org.elasticsearch.common.geo.builders.CoordinatesBuilder; @@ -35,6 +34,7 @@ import org.elasticsearch.common.geo.parsers.CoordinateNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; import org.elasticsearch.common.unit.DistanceUnit; +import org.locationtech.jts.geom.Coordinate; import java.util.ArrayList; import java.util.HashMap; @@ -48,7 +48,7 @@ public enum GeoShapeType { POINT("point") { @Override - public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + public PointBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, Orientation orientation, boolean coerce) { return new PointBuilder().coordinate(validate(coordinates, coerce).coordinate); } @@ -66,7 +66,7 @@ CoordinateNode validate(CoordinateNode coordinates, boolean coerce) { }, MULTIPOINT("multipoint") { @Override - public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + public MultiPointBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, Orientation orientation, boolean coerce) { validate(coordinates, coerce); CoordinatesBuilder coordinatesBuilder = new CoordinatesBuilder(); @@ -96,7 +96,7 @@ CoordinateNode validate(CoordinateNode coordinates, boolean coerce) { }, LINESTRING("linestring") { @Override - public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + public LineStringBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, Orientation orientation, boolean coerce) { validate(coordinates, coerce); CoordinatesBuilder line = new CoordinatesBuilder(); @@ -117,7 +117,7 @@ CoordinateNode validate(CoordinateNode coordinates, boolean coerce) { }, MULTILINESTRING("multilinestring") { @Override - public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + public MultiLineStringBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, Orientation orientation, boolean coerce) { validate(coordinates, coerce); MultiLineStringBuilder multiline = new MultiLineStringBuilder(); @@ -138,7 +138,7 @@ CoordinateNode validate(CoordinateNode coordinates, boolean coerce) { }, POLYGON("polygon") { @Override - public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + public PolygonBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, Orientation orientation, boolean coerce) { validate(coordinates, coerce); // build shell @@ -199,7 +199,7 @@ CoordinateNode validate(CoordinateNode coordinates, boolean coerce) { }, MULTIPOLYGON("multipolygon") { @Override - public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + public MultiPolygonBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, Orientation orientation, boolean coerce) { validate(coordinates, coerce); MultiPolygonBuilder polygons = new MultiPolygonBuilder(orientation); @@ -217,7 +217,7 @@ CoordinateNode validate(CoordinateNode coordinates, boolean coerce) { }, ENVELOPE("envelope") { @Override - public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + public EnvelopeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, Orientation orientation, boolean coerce) { validate(coordinates, coerce); // verify coordinate bounds, correct if necessary @@ -249,7 +249,7 @@ public String wktName() { }, CIRCLE("circle") { @Override - public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + public CircleBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, Orientation orientation, boolean coerce) { return new CircleBuilder().center(coordinates.coordinate).radius(radius); @@ -263,7 +263,7 @@ CoordinateNode validate(CoordinateNode coordinates, boolean coerce) { }, GEOMETRYCOLLECTION("geometrycollection") { @Override - public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, Orientation orientation, boolean coerce) { // noop, handled in parser return null; @@ -303,7 +303,7 @@ public static GeoShapeType forName(String geoshapename) { throw new IllegalArgumentException("unknown geo_shape ["+geoshapename+"]"); } - public abstract ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + public abstract ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, ShapeBuilder.Orientation orientation, boolean coerce); abstract CoordinateNode validate(CoordinateNode coordinates, boolean coerce); diff --git a/server/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java b/server/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java index c85f6bd439673..c39cc397a34ed 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java @@ -19,13 +19,12 @@ package org.elasticsearch.common.geo.builders; -import org.locationtech.jts.geom.Coordinate; - import org.elasticsearch.common.geo.GeoShapeType; import org.elasticsearch.common.geo.XShapeCollection; import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.locationtech.jts.geom.Coordinate; import org.locationtech.spatial4j.shape.Point; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/DefaultElementVisitor.java b/server/src/main/java/org/elasticsearch/common/inject/spi/DefaultElementVisitor.java index d86f0bacdbd32..3a50516fc5698 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/spi/DefaultElementVisitor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/spi/DefaultElementVisitor.java @@ -62,7 +62,7 @@ public V visit(ProviderLookup providerLookup) { } @Override - public V visit(InjectionRequest injectionRequest) { + public V visit(InjectionRequest injectionRequest) { return visitOther(injectionRequest); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/Dependency.java b/server/src/main/java/org/elasticsearch/common/inject/spi/Dependency.java index 0f8f6ed92cf45..d6c96ad3b3495 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/spi/Dependency.java +++ b/server/src/main/java/org/elasticsearch/common/inject/spi/Dependency.java @@ -106,7 +106,7 @@ public int hashCode() { @Override public boolean equals(Object o) { if (o instanceof Dependency) { - Dependency dependency = (Dependency) o; + Dependency dependency = (Dependency) o; return Objects.equals(injectionPoint, dependency.injectionPoint) && Objects.equals(parameterIndex, dependency.parameterIndex) && Objects.equals(key, dependency.key); diff --git a/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java b/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java index 7c5a6f9b2361f..d1ac53fff3b99 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java +++ b/server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java @@ -131,7 +131,7 @@ public void deprecated(String msg, Object... params) { // LRU set of keys used to determine if a deprecation message should be emitted to the deprecation logs private Set keys = Collections.newSetFromMap(Collections.synchronizedMap(new LinkedHashMap() { @Override - protected boolean removeEldestEntry(final Map.Entry eldest) { + protected boolean removeEldestEntry(final Map.Entry eldest) { return size() > 128; } })); @@ -390,7 +390,7 @@ static String encode(final String s) { final StringBuilder sb = new StringBuilder(s.length()); boolean encodingNeeded = false; for (int i = 0; i < s.length();) { - int current = (int) s.charAt(i); + int current = s.charAt(i); /* * Either the character does not need encoding or it does; when the character does not need encoding we append the character to * a buffer and move to the next character and when the character does need encoding, we peel off as many characters as possible diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVAtomicFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVAtomicFieldData.java index aa2775046ff1b..af4c7fd0da6b2 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVAtomicFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVAtomicFieldData.java @@ -89,7 +89,7 @@ public BytesRef nextValue() throws IOException { } @Override - public ScriptDocValues getScriptValues() { + public ScriptDocValues getScriptValues() { return new ScriptDocValues.BytesRefs(getBytesValues()); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequestTests.java index 9b88659a307f8..0c21a0b51e0a4 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryRequestTests.java @@ -35,6 +35,7 @@ public class PutRepositoryRequestTests extends ESTestCase { + @SuppressWarnings("unchecked") public void testCreateRepositoryToXContent() throws IOException { Map mapParams = new HashMap<>(); PutRepositoryRequest request = new PutRepositoryRequest(); @@ -62,11 +63,11 @@ public void testCreateRepositoryToXContent() throws IOException { assertThat(outputMap.get("name"), equalTo(request.name())); assertThat(outputMap.get("verify"), equalTo(request.verify())); assertThat(outputMap.get("type"), equalTo(request.type())); - Map settings = (Map) outputMap.get("settings"); + Map settings = (Map) outputMap.get("settings"); if (addSettings) { assertThat(settings.get(FsRepository.LOCATION_SETTING.getKey()), equalTo(".")); } else { - assertTrue(((Map) outputMap.get("settings")).isEmpty()); + assertTrue(((Map) outputMap.get("settings")).isEmpty()); } } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java index aa35d9d273a92..4d86dbbc51f33 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java @@ -273,7 +273,7 @@ public void testRolloverMaxSize() throws Exception { assertThat(response.getNewIndex(), equalTo("test-000002")); assertThat("Should rollover with a small max_size condition", response.isRolledOver(), equalTo(true)); final IndexMetaData oldIndex = client().admin().cluster().prepareState().get().getState().metaData().index("test-1"); - List metConditions = oldIndex.getRolloverInfos().get("test_alias").getMetConditions(); + List> metConditions = oldIndex.getRolloverInfos().get("test_alias").getMetConditions(); assertThat(metConditions.size(), equalTo(1)); assertThat(metConditions.get(0).toString(), equalTo(new MaxSizeCondition(maxSizeValue).toString())); assertThat(oldIndex.getRolloverInfos().get("test_alias").getTime(), diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java index 1e8d8e2a2932c..6443c0e5ce961 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -73,7 +73,7 @@ public void testConditionsParsing() throws Exception { .endObject() .endObject(); request.fromXContent(createParser(builder)); - Map conditions = request.getConditions(); + Map> conditions = request.getConditions(); assertThat(conditions.size(), equalTo(3)); MaxAgeCondition maxAgeCondition = (MaxAgeCondition)conditions.get(MaxAgeCondition.NAME); assertThat(maxAgeCondition.value.getMillis(), equalTo(TimeValue.timeValueHours(24 * 10).getMillis())); @@ -109,7 +109,7 @@ public void testParsingWithIndexSettings() throws Exception { .endObject() .endObject(); request.fromXContent(createParser(builder)); - Map conditions = request.getConditions(); + Map> conditions = request.getConditions(); assertThat(conditions.size(), equalTo(2)); assertThat(request.getCreateIndexRequest().mappings().size(), equalTo(1)); assertThat(request.getCreateIndexRequest().aliases().size(), equalTo(1)); @@ -129,8 +129,8 @@ public void testSerialize() throws Exception { cloneRequest.readFrom(in); assertThat(cloneRequest.getNewIndexName(), equalTo(originalRequest.getNewIndexName())); assertThat(cloneRequest.getAlias(), equalTo(originalRequest.getAlias())); - for (Map.Entry entry : cloneRequest.getConditions().entrySet()) { - Condition condition = originalRequest.getConditions().get(entry.getKey()); + for (Map.Entry> entry : cloneRequest.getConditions().entrySet()) { + Condition condition = originalRequest.getConditions().get(entry.getKey()); //here we compare the string representation as there is some information loss when serializing //and de-serializing MaxAgeCondition assertEquals(condition.toString(), entry.getValue().toString()); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index be88a69a8f4a3..6149c380cd737 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -63,7 +63,7 @@ public void testDocStatsSelectionFromPrimariesOnly() { long docsInPrimaryShards = 100; long docsInShards = 200; - final Condition condition = createTestCondition(); + final Condition condition = createTestCondition(); evaluateConditions(Sets.newHashSet(condition), createMetaData(), createIndicesStatResponse(docsInShards, docsInPrimaryShards)); final ArgumentCaptor argument = ArgumentCaptor.forClass(Condition.Stats.class); verify(condition).evaluate(argument.capture()); @@ -89,7 +89,7 @@ public void testEvaluateConditions() { .creationDate(System.currentTimeMillis() - TimeValue.timeValueHours(3).getMillis()) .settings(settings) .build(); - final Set conditions = Sets.newHashSet(maxDocsCondition, maxAgeCondition, maxSizeCondition); + final Set> conditions = Sets.newHashSet(maxDocsCondition, maxAgeCondition, maxSizeCondition); Map results = evaluateConditions(conditions, new DocsStats(matchMaxDocs, 0L, ByteSizeUnit.MB.toBytes(120)), metaData); assertThat(results.size(), equalTo(3)); @@ -117,7 +117,7 @@ public void testEvaluateWithoutDocStats() { MaxAgeCondition maxAgeCondition = new MaxAgeCondition(TimeValue.timeValueHours(randomIntBetween(1, 3))); MaxSizeCondition maxSizeCondition = new MaxSizeCondition(new ByteSizeValue(randomNonNegativeLong())); - Set conditions = Sets.newHashSet(maxDocsCondition, maxAgeCondition, maxSizeCondition); + Set> conditions = Sets.newHashSet(maxDocsCondition, maxAgeCondition, maxSizeCondition); final Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) @@ -285,8 +285,8 @@ private static IndexMetaData createMetaData() { .build(); } - private static Condition createTestCondition() { - final Condition condition = mock(Condition.class); + private static Condition createTestCondition() { + final Condition condition = mock(Condition.class); when(condition.evaluate(any())).thenReturn(new Condition.Result(condition, true)); return condition; } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java index d40199d1d103e..661f47b38a8a4 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java @@ -37,7 +37,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; @@ -74,9 +73,10 @@ public void testBasicSerialization() throws Exception { try (XContentParser parser = createParser(JsonXContent.jsonXContent, bytes)) { Map map = parser.map(); - List failureList = (List) map.get("failures"); + List failureList = (List) map.get("failures"); assertThat(failureList.size(), equalTo(1)); - HashMap failureMap = (HashMap) failureList.get(0); + @SuppressWarnings("unchecked") + Map failureMap = (Map) failureList.get(0); assertThat(failureMap.containsKey("index"), equalTo(true)); assertThat(((String) failureMap.get("index")), equalTo("test")); assertThat(failureMap.containsKey("shard"), equalTo(true)); @@ -84,18 +84,22 @@ public void testBasicSerialization() throws Exception { assertThat(failureMap.containsKey("node"), equalTo(true)); assertThat(((String) failureMap.get("node")), equalTo("node1")); + @SuppressWarnings("unchecked") Map indices = (Map) map.get("indices"); for (String index : new String[] {"test", "test2"}) { assertThat(indices.containsKey(index), equalTo(true)); + @SuppressWarnings("unchecked") Map shards = ((Map) ((Map) indices.get(index)).get("shards")); assertThat(shards.size(), equalTo(2)); for (String shardId : shards.keySet()) { - HashMap shardStoresStatus = (HashMap) shards.get(shardId); + @SuppressWarnings("unchecked") + Map shardStoresStatus = (Map) shards.get(shardId); assertThat(shardStoresStatus.containsKey("stores"), equalTo(true)); - List stores = (ArrayList) shardStoresStatus.get("stores"); + List stores = (List) shardStoresStatus.get("stores"); assertThat(stores.size(), equalTo(storeStatusList.size())); for (int i = 0; i < stores.size(); i++) { - HashMap storeInfo = ((HashMap) stores.get(i)); + @SuppressWarnings("unchecked") + Map storeInfo = ((Map) stores.get(i)); IndicesShardStoresResponse.StoreStatus storeStatus = storeStatusList.get(i); assertThat(((String) storeInfo.get("allocation_id")), equalTo((storeStatus.getAllocationId()))); assertThat(storeInfo.containsKey("allocation"), equalTo(true)); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestModifierTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestModifierTests.java index e7bd34e76ef3a..82ed518256169 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestModifierTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestModifierTests.java @@ -19,12 +19,6 @@ package org.elasticsearch.action.bulk; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.index.IndexRequest; @@ -34,6 +28,12 @@ import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -114,7 +114,7 @@ public void onFailure(Exception e) { }); List originalResponses = new ArrayList<>(); - for (DocWriteRequest actionRequest : bulkRequest.requests()) { + for (DocWriteRequest actionRequest : bulkRequest.requests()) { IndexRequest indexRequest = (IndexRequest) actionRequest; IndexResponse indexResponse = new IndexResponse(new ShardId("index", "_na_", 0), indexRequest.type(), indexRequest.id(), 1, 17, 1, true); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java index f4a72dccdcc73..006d2d04fdde4 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java @@ -64,9 +64,9 @@ import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.anyBoolean; -import static org.mockito.Mockito.anyLong; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyBoolean; +import static org.mockito.Matchers.anyLong; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -91,7 +91,7 @@ private IndexMetaData indexMetaData() throws IOException { public void testShouldExecuteReplicaItem() throws Exception { // Successful index request should be replicated - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") + DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); DocWriteResponse response = new IndexResponse(shardId, "type", "id", 1, 17, 1, randomBoolean()); BulkItemRequest request = new BulkItemRequest(0, writeRequest); @@ -121,9 +121,9 @@ public void testShouldExecuteReplicaItem() throws Exception { assertThat(replicaItemExecutionMode(request, 0), equalTo(ReplicaItemExecutionMode.FAILURE)); // NOOP requests should not be replicated - writeRequest = new UpdateRequest("index", "type", "id"); + DocWriteRequest updateRequest = new UpdateRequest("index", "type", "id"); response = new UpdateResponse(shardId, "type", "id", 1, DocWriteResponse.Result.NOOP); - request = new BulkItemRequest(0, writeRequest); + request = new BulkItemRequest(0, updateRequest); request.setPrimaryResponse(new BulkItemResponse(0, DocWriteRequest.OpType.UPDATE, response)); assertThat(replicaItemExecutionMode(request, 0), @@ -137,7 +137,8 @@ public void testExecuteBulkIndexRequest() throws Exception { BulkItemRequest[] items = new BulkItemRequest[1]; boolean create = randomBoolean(); - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE).create(create); + DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE) + .create(create); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); items[0] = primaryRequest; BulkShardRequest bulkShardRequest = @@ -208,7 +209,7 @@ public void testSkipBulkIndexRequestIfAborted() throws Exception { BulkItemRequest[] items = new BulkItemRequest[randomIntBetween(2, 5)]; for (int i = 0; i < items.length; i++) { - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id_" + i) + DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id_" + i) .source(Requests.INDEX_CONTENT_TYPE) .opType(DocWriteRequest.OpType.INDEX); items[i] = new BulkItemRequest(i, writeRequest); @@ -258,7 +259,7 @@ public void testExecuteBulkIndexRequestWithRejection() throws Exception { IndexShard shard = newStartedShard(true); BulkItemRequest[] items = new BulkItemRequest[1]; - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") + DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); items[0] = new BulkItemRequest(0, writeRequest); BulkShardRequest bulkShardRequest = @@ -281,7 +282,7 @@ public void testExecuteBulkIndexRequestWithErrorWhileUpdatingMapping() throws Ex IndexShard shard = newStartedShard(true); BulkItemRequest[] items = new BulkItemRequest[1]; - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") + DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); items[0] = new BulkItemRequest(0, writeRequest); BulkShardRequest bulkShardRequest = @@ -323,7 +324,7 @@ public void testExecuteBulkDeleteRequest() throws Exception { IndexShard shard = newStartedShard(true); BulkItemRequest[] items = new BulkItemRequest[1]; - DocWriteRequest writeRequest = new DeleteRequest("index", "_doc", "id"); + DocWriteRequest writeRequest = new DeleteRequest("index", "_doc", "id"); items[0] = new BulkItemRequest(0, writeRequest); BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); @@ -339,7 +340,7 @@ public void testExecuteBulkDeleteRequest() throws Exception { assertThat(newLocation, not(location)); BulkItemRequest replicaRequest = bulkShardRequest.items()[0]; - DocWriteRequest replicaDeleteRequest = replicaRequest.request(); + DocWriteRequest replicaDeleteRequest = replicaRequest.request(); BulkItemResponse primaryResponse = replicaRequest.getPrimaryResponse(); DeleteResponse response = primaryResponse.getResponse(); @@ -405,7 +406,7 @@ public void testExecuteBulkDeleteRequest() throws Exception { } public void testNoopUpdateReplicaRequest() throws Exception { - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") + DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") .source(Requests.INDEX_CONTENT_TYPE, "field", "value"); BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest); @@ -436,7 +437,7 @@ public void testNoopUpdateReplicaRequest() throws Exception { } public void testUpdateReplicaRequestWithFailure() throws Exception { - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE); + DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE); BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest); Exception err = new ElasticsearchException("I'm dead <(x.x)>"); @@ -472,7 +473,7 @@ public void testUpdateReplicaRequestWithFailure() throws Exception { } public void testUpdateReplicaRequestWithConflictFailure() throws Exception { - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE); + DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE); BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest); Exception err = new VersionConflictEngineException(shardId, "_doc", "id", @@ -509,7 +510,7 @@ public void testUpdateReplicaRequestWithConflictFailure() throws Exception { } public void testUpdateReplicaRequestWithSuccess() throws Exception { - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") + DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") .source(Requests.INDEX_CONTENT_TYPE); BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest); @@ -545,7 +546,7 @@ public void testUpdateReplicaRequestWithSuccess() throws Exception { public void testCalculateTranslogLocation() throws Exception { final Translog.Location original = new Translog.Location(0, 0, 0); - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") + DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") .source(Requests.INDEX_CONTENT_TYPE); BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest); BulkItemResultHolder results = new BulkItemResultHolder(null, null, replicaRequest); @@ -659,7 +660,6 @@ public void testProcessUpdateResponse() throws Exception { BulkItemRequest[] itemRequests = new BulkItemRequest[1]; itemRequests[0] = request; - BulkShardRequest bulkShardRequest = new BulkShardRequest(shard.shardId(), RefreshPolicy.NONE, itemRequests); BulkItemResultHolder holder = TransportShardBulkAction.processUpdateResponse(updateRequest, "index", indexResult, translate, shard, 7); @@ -671,7 +671,7 @@ public void testProcessUpdateResponse() throws Exception { assertThat(holder.operationResult, equalTo(indexResult)); BulkItemRequest replicaBulkRequest = holder.replicaRequest; assertThat(replicaBulkRequest.id(), equalTo(7)); - DocWriteRequest replicaRequest = replicaBulkRequest.request(); + DocWriteRequest replicaRequest = replicaBulkRequest.request(); assertThat(replicaRequest, instanceOf(IndexRequest.class)); assertThat(replicaRequest, equalTo(indexRequest)); @@ -685,7 +685,7 @@ public void testProcessUpdateResponse() throws Exception { assertThat(deleteHolder.operationResult, equalTo(deleteResult)); BulkItemRequest delReplicaBulkRequest = deleteHolder.replicaRequest; assertThat(delReplicaBulkRequest.id(), equalTo(8)); - DocWriteRequest delReplicaRequest = delReplicaBulkRequest.request(); + DocWriteRequest delReplicaRequest = delReplicaBulkRequest.request(); assertThat(delReplicaRequest, instanceOf(DeleteRequest.class)); assertThat(delReplicaRequest, equalTo(deleteRequest)); @@ -699,13 +699,11 @@ public void testExecuteUpdateRequestOnce() throws Exception { Map source = new HashMap<>(); BulkItemRequest[] items = new BulkItemRequest[1]; boolean create = randomBoolean(); - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE).create(create); + DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE) + .create(create); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); items[0] = primaryRequest; - BulkShardRequest bulkShardRequest = - new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - Translog.Location location = new Translog.Location(0, 0, 0); IndexRequest indexRequest = new IndexRequest("index", "_doc", "id"); indexRequest.source(source); @@ -730,7 +728,7 @@ public void testExecuteUpdateRequestOnce() throws Exception { assertThat(updateResp.getGetResult(), equalTo(null)); BulkItemRequest replicaBulkRequest = holder.replicaRequest; assertThat(replicaBulkRequest.id(), equalTo(0)); - DocWriteRequest replicaRequest = replicaBulkRequest.request(); + DocWriteRequest replicaRequest = replicaBulkRequest.request(); assertThat(replicaRequest, instanceOf(IndexRequest.class)); assertThat(replicaRequest, equalTo(indexRequest)); @@ -747,19 +745,15 @@ public void testExecuteUpdateRequestOnceWithFailure() throws Exception { source.put("foo", "bar"); BulkItemRequest[] items = new BulkItemRequest[1]; boolean create = randomBoolean(); - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") + DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar") .create(create); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); items[0] = primaryRequest; - BulkShardRequest bulkShardRequest = - new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - Translog.Location location = new Translog.Location(0, 0, 0); IndexRequest indexRequest = new IndexRequest("index", "_doc", "id"); indexRequest.source(source); - DocWriteResponse.Result docWriteResult = DocWriteResponse.Result.CREATED; Exception prepareFailure = new IllegalArgumentException("I failed to do something!"); UpdateHelper updateHelper = new FailingUpdateHelper(prepareFailure); UpdateRequest updateRequest = new UpdateRequest("index", "_doc", "id"); @@ -849,6 +843,7 @@ public Translog.Location getTranslogLocation() { /** Doesn't perform any mapping updates */ public static class NoopMappingUpdatePerformer implements MappingUpdatePerformer { + @Override public void updateMappings(Mapping update, ShardId shardId, String type) { } } @@ -860,6 +855,7 @@ private class ThrowingMappingUpdatePerformer implements MappingUpdatePerformer { this.e = e; } + @Override public void updateMappings(Mapping update, ShardId shardId, String type) { throw e; } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index e4cbc1fcd80b1..2726de237582d 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -96,7 +96,7 @@ public void onFailure(Exception e) { lookup.put(replicaNode.getId(), new MockConnection(replicaNode)); Map aliasFilters = Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY)); AtomicInteger numRequests = new AtomicInteger(0); - AbstractSearchAsyncAction asyncAction = + AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction( "test", logger, @@ -190,7 +190,7 @@ public void onFailure(Exception e) { CountDownLatch awaitInitialRequests = new CountDownLatch(1); AtomicInteger numRequests = new AtomicInteger(0); AtomicInteger numResponses = new AtomicInteger(0); - AbstractSearchAsyncAction asyncAction = + AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction( "test", logger, @@ -297,7 +297,7 @@ public void sendFreeContext(Transport.Connection connection, long contextId, Ori lookup.put(replicaNode.getId(), new MockConnection(replicaNode)); Map aliasFilters = Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY)); final ExecutorService executor = Executors.newFixedThreadPool(randomIntBetween(1, Runtime.getRuntime().availableProcessors())); - AbstractSearchAsyncAction asyncAction = + AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction( "test", logger, diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java index 9b1781d4f785a..fbc3b1975def5 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java @@ -128,9 +128,9 @@ public void testFailNextPhase() throws InterruptedException { request.scroll(new Scroll(TimeValue.timeValueMinutes(1))); CountDownLatch latch = new CountDownLatch(1); AtomicInteger movedCounter = new AtomicInteger(0); - ActionListener listener = new ActionListener() { + ActionListener listener = new ActionListener() { @Override - public void onResponse(Object o) { + public void onResponse(SearchResponse o) { try { fail("got a result"); } finally { @@ -374,9 +374,9 @@ public void testAllShardsFailed() throws InterruptedException { SearchScrollRequest request = new SearchScrollRequest(); request.scroll(new Scroll(TimeValue.timeValueMinutes(1))); CountDownLatch latch = new CountDownLatch(1); - ActionListener listener = new ActionListener() { + ActionListener listener = new ActionListener() { @Override - public void onResponse(Object o) { + public void onResponse(SearchResponse o) { try { fail("got a result"); } finally { diff --git a/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java b/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java index 3f754d601b501..8e94764cc7acc 100644 --- a/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java @@ -308,15 +308,15 @@ public void testToXContent() throws IOException { boolean open = wildcardStates.contains(WildcardStates.OPEN); if (open) { - assertTrue(((List)map.get("expand_wildcards")).contains("open")); + assertTrue(((List) map.get("expand_wildcards")).contains("open")); } else { - assertFalse(((List)map.get("expand_wildcards")).contains("open")); + assertFalse(((List) map.get("expand_wildcards")).contains("open")); } boolean closed = wildcardStates.contains(WildcardStates.CLOSED); if (closed) { - assertTrue(((List)map.get("expand_wildcards")).contains("closed")); + assertTrue(((List) map.get("expand_wildcards")).contains("closed")); } else { - assertFalse(((List)map.get("expand_wildcards")).contains("closed")); + assertFalse(((List) map.get("expand_wildcards")).contains("closed")); } assertEquals(map.get("ignore_unavailable"), options.contains(Option.IGNORE_UNAVAILABLE)); assertEquals(map.get("allow_no_indices"), options.contains(Option.ALLOW_NO_INDICES)); diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index 4e3af6cc41277..3ef599d8c9bf3 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -208,8 +208,8 @@ private class TestBroadcastReplicationAction extends TransportBroadcastReplicati protected final Set>> capturedShardRequests = ConcurrentCollections.newConcurrentSet(); TestBroadcastReplicationAction(Settings settings, ClusterService clusterService, TransportService transportService, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - TransportReplicationAction replicatedBroadcastShardAction) { + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + TransportReplicationAction replicatedBroadcastShardAction) { super("internal:test-broadcast-replication-action", DummyBroadcastRequest::new, settings, clusterService, transportService, actionFilters, indexNameExpressionResolver, replicatedBroadcastShardAction); } diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 036e8b4ca6c97..8e6db7d776191 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -140,6 +140,7 @@ public void setUp() throws Exception { updateHelper = new UpdateHelper(settings, scriptService); } + @SuppressWarnings("unchecked") public void testFromXContent() throws Exception { UpdateRequest request = new UpdateRequest("test", "type", "1"); // simple script @@ -233,7 +234,7 @@ public void testFromXContent() throws Exception { Map upsertDoc = XContentHelper.convertToMap(request.upsertRequest().source(), true, request.upsertRequest().getContentType()).v2(); assertThat(upsertDoc.get("field1").toString(), equalTo("value1")); - assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2")); + assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2")); request = new UpdateRequest("test", "type", "1"); request.fromXContent(createParser(XContentFactory.jsonBuilder().startObject() @@ -260,7 +261,7 @@ public void testFromXContent() throws Exception { assertThat(params.get("param1").toString(), equalTo("value1")); upsertDoc = XContentHelper.convertToMap(request.upsertRequest().source(), true, request.upsertRequest().getContentType()).v2(); assertThat(upsertDoc.get("field1").toString(), equalTo("value1")); - assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2")); + assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2")); // script with doc request = new UpdateRequest("test", "type", "1"); @@ -275,7 +276,7 @@ public void testFromXContent() throws Exception { .endObject())); Map doc = request.doc().sourceAsMap(); assertThat(doc.get("field1").toString(), equalTo("value1")); - assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2")); + assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2")); } public void testUnknownFieldParsing() throws Exception { @@ -424,7 +425,7 @@ private void runTimeoutTest(final GetResult getResult, final UpdateRequest updat ESTestCase::randomNonNegativeLong); final Streamable action = result.action(); assertThat(action, instanceOf(ReplicationRequest.class)); - final ReplicationRequest request = (ReplicationRequest) action; + final ReplicationRequest request = (ReplicationRequest) action; assertThat(request.timeout(), equalTo(updateRequest.timeout())); } diff --git a/server/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java b/server/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java index efd8026645249..4c7a42de2ee95 100644 --- a/server/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/ClusterModuleTests.java @@ -167,8 +167,7 @@ public void testUnknownShardsAllocator() { public void testShardsAllocatorFactoryNull() { Settings settings = Settings.builder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), "bad").build(); - NullPointerException e = expectThrows(NullPointerException.class, () -> - newClusterModuleWithShardsAllocator(settings, "bad", () -> null)); + expectThrows(NullPointerException.class, () -> newClusterModuleWithShardsAllocator(settings, "bad", () -> null)); } // makes sure that the allocation deciders are setup in the correct order, such that the diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java index f054450f00abe..17f25d1556d48 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java @@ -19,17 +19,10 @@ package org.elasticsearch.common.geo; -import org.locationtech.jts.geom.Coordinate; -import org.locationtech.jts.geom.LineString; -import org.locationtech.jts.geom.LinearRing; -import org.locationtech.jts.geom.MultiLineString; -import org.locationtech.jts.geom.Point; -import org.locationtech.jts.geom.Polygon; - import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.Strings; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.settings.Settings; @@ -41,6 +34,12 @@ import org.elasticsearch.index.mapper.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions; +import org.locationtech.jts.geom.Coordinate; +import org.locationtech.jts.geom.LineString; +import org.locationtech.jts.geom.LinearRing; +import org.locationtech.jts.geom.MultiLineString; +import org.locationtech.jts.geom.Point; +import org.locationtech.jts.geom.Polygon; import org.locationtech.spatial4j.exception.InvalidShapeException; import org.locationtech.spatial4j.shape.Circle; import org.locationtech.spatial4j.shape.Rectangle; @@ -828,7 +827,7 @@ public void testParseMultiPoint() throws IOException { .endArray() .endObject(); - ShapeCollection expected = shapeCollection( + ShapeCollection expected = shapeCollection( SPATIAL_CONTEXT.makePoint(100, 0), SPATIAL_CONTEXT.makePoint(101, 1.0)); assertGeometryEquals(expected, multiPointGeoJson); @@ -951,6 +950,7 @@ public void testParseMultiPolygon() throws IOException { assertGeometryEquals(jtsGeom(withHoles), multiPolygonGeoJson); } + @Override public void testParseGeometryCollection() throws IOException { XContentBuilder geometryCollectionGeoJson = XContentFactory.jsonBuilder() .startObject() diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoWKTShapeParserTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoWKTShapeParserTests.java index 3189a4fcdb091..696279ece4b80 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoWKTShapeParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoWKTShapeParserTests.java @@ -18,12 +18,6 @@ */ package org.elasticsearch.common.geo; -import org.locationtech.jts.geom.Coordinate; -import org.locationtech.jts.geom.LineString; -import org.locationtech.jts.geom.LinearRing; -import org.locationtech.jts.geom.MultiLineString; -import org.locationtech.jts.geom.Point; -import org.locationtech.jts.geom.Polygon; import org.apache.lucene.geo.GeoTestUtil; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; @@ -50,6 +44,12 @@ import org.elasticsearch.index.mapper.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.locationtech.jts.geom.Coordinate; +import org.locationtech.jts.geom.LineString; +import org.locationtech.jts.geom.LinearRing; +import org.locationtech.jts.geom.MultiLineString; +import org.locationtech.jts.geom.Point; +import org.locationtech.jts.geom.Polygon; import org.locationtech.spatial4j.exception.InvalidShapeException; import org.locationtech.spatial4j.shape.Rectangle; import org.locationtech.spatial4j.shape.Shape; @@ -69,7 +69,7 @@ */ public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase { - private static XContentBuilder toWKTContent(ShapeBuilder builder, boolean generateMalformed) + private static XContentBuilder toWKTContent(ShapeBuilder builder, boolean generateMalformed) throws IOException { String wkt = builder.toWKT(); if (generateMalformed) { @@ -84,12 +84,12 @@ private static XContentBuilder toWKTContent(ShapeBuilder builder, boolean genera return XContentFactory.jsonBuilder().value(wkt); } - private void assertExpected(Shape expected, ShapeBuilder builder) throws IOException { + private void assertExpected(Shape expected, ShapeBuilder builder) throws IOException { XContentBuilder xContentBuilder = toWKTContent(builder, false); assertGeometryEquals(expected, xContentBuilder); } - private void assertMalformed(ShapeBuilder builder) throws IOException { + private void assertMalformed(ShapeBuilder builder) throws IOException { XContentBuilder xContentBuilder = toWKTContent(builder, true); assertValidException(xContentBuilder, ElasticsearchParseException.class); } @@ -114,7 +114,7 @@ public void testParseMultiPoint() throws IOException { coordinates.add(new Coordinate(p.lon(), p.lat())); shapes[i] = SPATIAL_CONTEXT.makePoint(p.lon(), p.lat()); } - ShapeCollection expected = shapeCollection(shapes); + ShapeCollection expected = shapeCollection(shapes); assertExpected(expected, new MultiPointBuilder(coordinates)); assertMalformed(new MultiPointBuilder(coordinates)); } @@ -314,7 +314,7 @@ public void testParsePolyWithStoredZ() throws IOException { Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath()); final GeoShapeFieldMapper mapperBuilder = new GeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext); - ShapeBuilder shapeBuilder = ShapeParser.parse(parser, mapperBuilder); + ShapeBuilder shapeBuilder = ShapeParser.parse(parser, mapperBuilder); assertEquals(shapeBuilder.numDimensions(), 3); } diff --git a/server/src/test/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilderTests.java b/server/src/test/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilderTests.java index c0ada3725b05f..b44ba6769d79f 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilderTests.java @@ -66,10 +66,10 @@ protected GeometryCollectionBuilder createMutation(GeometryCollectionBuilder ori } static GeometryCollectionBuilder mutate(GeometryCollectionBuilder original) throws IOException { - GeometryCollectionBuilder mutation = (GeometryCollectionBuilder) copyShape(original); + GeometryCollectionBuilder mutation = copyShape(original); if (mutation.shapes.size() > 0) { int shapePosition = randomIntBetween(0, mutation.shapes.size() - 1); - ShapeBuilder shapeToChange = mutation.shapes.get(shapePosition); + ShapeBuilder shapeToChange = mutation.shapes.get(shapePosition); switch (shapeToChange.type()) { case POINT: shapeToChange = PointBuilderTests.mutate((PointBuilder) shapeToChange); diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java index 4acb497c46bd9..534da56150135 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java @@ -118,7 +118,7 @@ public void testExtractValue() throws Exception { extValue = XContentMapValues.extractValue("path1.test", map); assertThat(extValue, instanceOf(List.class)); - List extListValue = (List) extValue; + List extListValue = (List) extValue; assertThat(extListValue, hasSize(2)); builder = XContentFactory.jsonBuilder().startObject() @@ -137,7 +137,7 @@ public void testExtractValue() throws Exception { extValue = XContentMapValues.extractValue("path1.path2.test", map); assertThat(extValue, instanceOf(List.class)); - extListValue = (List) extValue; + extListValue = (List) extValue; assertThat(extListValue, hasSize(2)); assertThat(extListValue.get(0).toString(), equalTo("value1")); assertThat(extListValue.get(1).toString(), equalTo("value2")); @@ -225,13 +225,13 @@ public void testNestedFiltering() { assertThat(filteredMap.size(), equalTo(1)); assertThat(((List) filteredMap.get("array")), hasSize(1)); - assertThat(((Map) ((List) filteredMap.get("array")).get(0)).size(), equalTo(1)); - assertThat((Integer) ((Map) ((List) filteredMap.get("array")).get(0)).get("nested"), equalTo(2)); + assertThat(((Map) ((List) filteredMap.get("array")).get(0)).size(), equalTo(1)); + assertThat((Integer) ((Map) ((List) filteredMap.get("array")).get(0)).get("nested"), equalTo(2)); filteredMap = XContentMapValues.filter(map, new String[]{"array.*"}, Strings.EMPTY_ARRAY); assertThat(filteredMap.size(), equalTo(1)); assertThat(((List) filteredMap.get("array")), hasSize(1)); - assertThat(((Map) ((List) filteredMap.get("array")).get(0)).size(), equalTo(2)); + assertThat(((Map) ((List) filteredMap.get("array")).get(0)).size(), equalTo(2)); map.clear(); map.put("field", "value"); @@ -285,16 +285,16 @@ public void testCompleteObjectFiltering() { filteredMap = XContentMapValues.filter(map, new String[]{"array"}, new String[]{}); assertThat(filteredMap.size(), equalTo(1)); - assertThat(((List) filteredMap.get("array")).size(), equalTo(2)); - assertThat((Integer) ((List) filteredMap.get("array")).get(0), equalTo(1)); - assertThat(((Map) ((List) filteredMap.get("array")).get(1)).size(), equalTo(2)); + assertThat(((List) filteredMap.get("array")).size(), equalTo(2)); + assertThat((Integer) ((List) filteredMap.get("array")).get(0), equalTo(1)); + assertThat(((Map) ((List) filteredMap.get("array")).get(1)).size(), equalTo(2)); filteredMap = XContentMapValues.filter(map, new String[]{"array"}, new String[]{"*.field2"}); assertThat(filteredMap.size(), equalTo(1)); assertThat(((List) filteredMap.get("array")), hasSize(2)); - assertThat((Integer) ((List) filteredMap.get("array")).get(0), equalTo(1)); - assertThat(((Map) ((List) filteredMap.get("array")).get(1)).size(), equalTo(1)); - assertThat(((Map) ((List) filteredMap.get("array")).get(1)).get("field").toString(), equalTo("value")); + assertThat((Integer) ((List) filteredMap.get("array")).get(0), equalTo(1)); + assertThat(((Map) ((List) filteredMap.get("array")).get(1)).size(), equalTo(1)); + assertThat(((Map) ((List) filteredMap.get("array")).get(1)).get("field").toString(), equalTo("value")); } @SuppressWarnings("unchecked") @@ -331,7 +331,7 @@ public void testFilterIncludesUsingStarPrefix() { assertThat(filteredMap.size(), equalTo(3)); assertThat(filteredMap, hasKey("field")); assertThat(filteredMap, hasKey("obj")); - assertThat(((Map) filteredMap.get("obj")).size(), equalTo(1)); + assertThat(((Map) filteredMap.get("obj")).size(), equalTo(1)); assertThat(((Map) filteredMap.get("obj")), hasKey("field")); assertThat(filteredMap, hasKey("n_obj")); assertThat(((Map) filteredMap.get("n_obj")).size(), equalTo(1)); @@ -371,6 +371,7 @@ public void testThatFilterIncludesEmptyObjectWhenUsingExcludes() throws Exceptio assertThat(mapTuple.v2(), equalTo(filteredSource)); } + @SuppressWarnings("unchecked") public void testNotOmittingObjectsWithExcludedProperties() throws Exception { XContentBuilder builder = XContentFactory.jsonBuilder().startObject() .startObject("obj") @@ -383,7 +384,7 @@ public void testNotOmittingObjectsWithExcludedProperties() throws Exception { assertThat(filteredSource.size(), equalTo(1)); assertThat(filteredSource, hasKey("obj")); - assertThat(((Map) filteredSource.get("obj")).size(), equalTo(0)); + assertThat(((Map) filteredSource.get("obj")).size(), equalTo(0)); } @SuppressWarnings({"unchecked"}) @@ -403,20 +404,20 @@ public void testNotOmittingObjectWithNestedExcludedObject() throws Exception { assertThat(filteredSource.size(), equalTo(1)); assertThat(filteredSource, hasKey("obj1")); - assertThat(((Map) filteredSource.get("obj1")).size(), equalTo(0)); + assertThat(((Map) filteredSource.get("obj1")).size(), equalTo(0)); // explicit include filteredSource = XContentMapValues.filter(mapTuple.v2(), new String[]{"obj1"}, new String[]{"*.obj2"}); assertThat(filteredSource.size(), equalTo(1)); assertThat(filteredSource, hasKey("obj1")); - assertThat(((Map) filteredSource.get("obj1")).size(), equalTo(0)); + assertThat(((Map) filteredSource.get("obj1")).size(), equalTo(0)); // wild card include filteredSource = XContentMapValues.filter(mapTuple.v2(), new String[]{"*.obj2"}, new String[]{"*.obj3"}); assertThat(filteredSource.size(), equalTo(1)); assertThat(filteredSource, hasKey("obj1")); assertThat(((Map) filteredSource.get("obj1")), hasKey("obj2")); - assertThat(((Map) ((Map) filteredSource.get("obj1")).get("obj2")).size(), equalTo(0)); + assertThat(((Map) ((Map) filteredSource.get("obj1")).get("obj2")).size(), equalTo(0)); } @SuppressWarnings({"unchecked"}) @@ -433,9 +434,9 @@ public void testIncludingObjectWithNestedIncludedObject() throws Exception { assertThat(filteredSource.size(), equalTo(1)); assertThat(filteredSource, hasKey("obj1")); - assertThat(((Map) filteredSource.get("obj1")).size(), equalTo(1)); + assertThat(((Map) filteredSource.get("obj1")).size(), equalTo(1)); assertThat(((Map) filteredSource.get("obj1")), hasKey("obj2")); - assertThat(((Map) ((Map) filteredSource.get("obj1")).get("obj2")).size(), equalTo(0)); + assertThat(((Map) ((Map) filteredSource.get("obj1")).get("obj2")).size(), equalTo(0)); } diff --git a/server/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java b/server/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java index 68a6d23980266..f2bacc154bf46 100644 --- a/server/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java +++ b/server/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java @@ -35,8 +35,8 @@ import java.nio.file.Files; import java.nio.file.Path; -import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -80,6 +80,7 @@ public void testMetaIsRemovedIfAllShardsFromIndexRemoved() throws Exception { assertIndexInMetaState(masterNode, index); } + @SuppressWarnings("unchecked") public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception { String masterNode = internalCluster().startMasterOnlyNode(Settings.EMPTY); final String dataNode = internalCluster().startDataOnlyNode(Settings.EMPTY); @@ -108,11 +109,11 @@ public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception { .endObject()).get(); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(index).addTypes("_doc").get(); - assertNotNull(((LinkedHashMap) (getMappingsResponse.getMappings().get(index).get("_doc").getSourceAsMap().get("properties"))).get("integer_field")); + assertNotNull(((Map) (getMappingsResponse.getMappings().get(index).get("_doc").getSourceAsMap().get("properties"))).get("integer_field")); // make sure it was also written on red node although index is closed ImmutableOpenMap indicesMetaData = getIndicesMetaDataOnNode(dataNode); - assertNotNull(((LinkedHashMap) (indicesMetaData.get(index).getMappings().get("_doc").getSourceAsMap().get("properties"))).get("integer_field")); + assertNotNull(((Map) (indicesMetaData.get(index).getMappings().get("_doc").getSourceAsMap().get("properties"))).get("integer_field")); assertThat(indicesMetaData.get(index).getState(), equalTo(IndexMetaData.State.CLOSE)); /* Try the same and see if this also works if node was just restarted. @@ -133,11 +134,11 @@ public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception { .endObject()).get(); getMappingsResponse = client().admin().indices().prepareGetMappings(index).addTypes("_doc").get(); - assertNotNull(((LinkedHashMap) (getMappingsResponse.getMappings().get(index).get("_doc").getSourceAsMap().get("properties"))).get("float_field")); + assertNotNull(((Map) (getMappingsResponse.getMappings().get(index).get("_doc").getSourceAsMap().get("properties"))).get("float_field")); // make sure it was also written on red node although index is closed indicesMetaData = getIndicesMetaDataOnNode(dataNode); - assertNotNull(((LinkedHashMap) (indicesMetaData.get(index).getMappings().get("_doc").getSourceAsMap().get("properties"))).get("float_field")); + assertNotNull(((Map) (indicesMetaData.get(index).getMappings().get("_doc").getSourceAsMap().get("properties"))).get("float_field")); assertThat(indicesMetaData.get(index).getState(), equalTo(IndexMetaData.State.CLOSE)); // finally check that meta data is also written of index opened again diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/GeoFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/GeoFieldDataTests.java index 3c362d48c118a..a2d2474886381 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/GeoFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/GeoFieldDataTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.index.fielddata.plain.AbstractAtomicGeoPointFieldData; import java.util.List; + import static org.hamcrest.Matchers.greaterThanOrEqualTo; /** @@ -153,7 +154,7 @@ protected void fillExtendedMvSet() throws Exception { @Override public void testSingleValueAllSet() throws Exception { fillSingleValueAllSet(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -169,7 +170,7 @@ public void testSingleValueAllSet() throws Exception { @Override public void testSingleValueWithMissing() throws Exception { fillSingleValueWithMissing(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -185,7 +186,7 @@ public void testSingleValueWithMissing() throws Exception { @Override public void testMultiValueAllSet() throws Exception { fillMultiValueAllSet(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -201,7 +202,7 @@ public void testMultiValueAllSet() throws Exception { @Override public void testMultiValueWithMissing() throws Exception { fillMultiValueWithMissing(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsIntegrationIT.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsIntegrationIT.java index 8dbddcc5daa54..1db40ac402674 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldsIntegrationIT.java @@ -43,6 +43,7 @@ import static org.hamcrest.Matchers.nullValue; public class MultiFieldsIntegrationIT extends ESIntegTestCase { + @SuppressWarnings("unchecked") public void testMultiFields() throws Exception { assertAcked( client().admin().indices().prepareCreate("my-index") @@ -53,10 +54,10 @@ public void testMultiFields() throws Exception { MappingMetaData mappingMetaData = getMappingsResponse.mappings().get("my-index").get("my-type"); assertThat(mappingMetaData, not(nullValue())); Map mappingSource = mappingMetaData.sourceAsMap(); - Map titleFields = ((Map) XContentMapValues.extractValue("properties.title.fields", mappingSource)); + Map titleFields = ((Map) XContentMapValues.extractValue("properties.title.fields", mappingSource)); assertThat(titleFields.size(), equalTo(1)); assertThat(titleFields.get("not_analyzed"), notNullValue()); - assertThat(((Map)titleFields.get("not_analyzed")).get("type").toString(), equalTo("keyword")); + assertThat(((Map) titleFields.get("not_analyzed")).get("type").toString(), equalTo("keyword")); client().prepareIndex("my-index", "my-type", "1") .setSource("title", "Multi fields") @@ -81,13 +82,13 @@ public void testMultiFields() throws Exception { mappingMetaData = getMappingsResponse.mappings().get("my-index").get("my-type"); assertThat(mappingMetaData, not(nullValue())); mappingSource = mappingMetaData.sourceAsMap(); - assertThat(((Map) XContentMapValues.extractValue("properties.title", mappingSource)).size(), equalTo(2)); - titleFields = ((Map) XContentMapValues.extractValue("properties.title.fields", mappingSource)); + assertThat(((Map) XContentMapValues.extractValue("properties.title", mappingSource)).size(), equalTo(2)); + titleFields = ((Map) XContentMapValues.extractValue("properties.title.fields", mappingSource)); assertThat(titleFields.size(), equalTo(2)); assertThat(titleFields.get("not_analyzed"), notNullValue()); - assertThat(((Map)titleFields.get("not_analyzed")).get("type").toString(), equalTo("keyword")); + assertThat(((Map) titleFields.get("not_analyzed")).get("type").toString(), equalTo("keyword")); assertThat(titleFields.get("uncased"), notNullValue()); - assertThat(((Map)titleFields.get("uncased")).get("analyzer").toString(), equalTo("whitespace")); + assertThat(((Map) titleFields.get("uncased")).get("analyzer").toString(), equalTo("whitespace")); client().prepareIndex("my-index", "my-type", "1") .setSource("title", "Multi fields") @@ -100,6 +101,7 @@ public void testMultiFields() throws Exception { assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); } + @SuppressWarnings("unchecked") public void testGeoPointMultiField() throws Exception { assertAcked( client().admin().indices().prepareCreate("my-index") @@ -110,13 +112,13 @@ public void testGeoPointMultiField() throws Exception { MappingMetaData mappingMetaData = getMappingsResponse.mappings().get("my-index").get("my-type"); assertThat(mappingMetaData, not(nullValue())); Map mappingSource = mappingMetaData.sourceAsMap(); - Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource)); + Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource)); logger.info("Keys: {}", aField.keySet()); assertThat(aField.size(), equalTo(2)); assertThat(aField.get("type").toString(), equalTo("geo_point")); assertThat(aField.get("fields"), notNullValue()); - Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource)); + Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource)); assertThat(bField.size(), equalTo(1)); assertThat(bField.get("type").toString(), equalTo("keyword")); @@ -130,6 +132,7 @@ public void testGeoPointMultiField() throws Exception { assertThat(countResponse.getHits().getTotalHits(), equalTo(1L)); } + @SuppressWarnings("unchecked") public void testCompletionMultiField() throws Exception { assertAcked( client().admin().indices().prepareCreate("my-index") @@ -140,12 +143,12 @@ public void testCompletionMultiField() throws Exception { MappingMetaData mappingMetaData = getMappingsResponse.mappings().get("my-index").get("my-type"); assertThat(mappingMetaData, not(nullValue())); Map mappingSource = mappingMetaData.sourceAsMap(); - Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource)); + Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource)); assertThat(aField.size(), equalTo(6)); assertThat(aField.get("type").toString(), equalTo("completion")); assertThat(aField.get("fields"), notNullValue()); - Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource)); + Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource)); assertThat(bField.size(), equalTo(1)); assertThat(bField.get("type").toString(), equalTo("keyword")); @@ -154,6 +157,7 @@ public void testCompletionMultiField() throws Exception { assertThat(countResponse.getHits().getTotalHits(), equalTo(1L)); } + @SuppressWarnings("unchecked") public void testIpMultiField() throws Exception { assertAcked( client().admin().indices().prepareCreate("my-index") @@ -164,12 +168,12 @@ public void testIpMultiField() throws Exception { MappingMetaData mappingMetaData = getMappingsResponse.mappings().get("my-index").get("my-type"); assertThat(mappingMetaData, not(nullValue())); Map mappingSource = mappingMetaData.sourceAsMap(); - Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource)); + Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource)); assertThat(aField.size(), equalTo(2)); assertThat(aField.get("type").toString(), equalTo("ip")); assertThat(aField.get("fields"), notNullValue()); - Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource)); + Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource)); assertThat(bField.size(), equalTo(1)); assertThat(bField.get("type").toString(), equalTo("keyword")); diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java index 6ff134931f4ae..9671656436662 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.query; -import org.locationtech.jts.geom.Coordinate; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParsingException; @@ -29,6 +28,7 @@ import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.geo.RandomShapeGenerator; import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; +import org.locationtech.jts.geom.Coordinate; import org.locationtech.spatial4j.shape.jts.JtsGeometry; import java.io.IOException; @@ -73,7 +73,7 @@ public void testToQuery() throws IOException { } private static List randomPolygon() { - ShapeBuilder shapeBuilder = null; + ShapeBuilder shapeBuilder = null; // This is a temporary fix because sometimes the RandomShapeGenerator // returns null. This is if there is an error generating the polygon. So // in this case keep trying until we successfully generate one diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java index 6356b2122edbf..eafb4995f726d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.index.query; -import org.locationtech.jts.geom.Coordinate; - import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MatchNoDocsQuery; @@ -44,6 +42,7 @@ import org.elasticsearch.test.geo.RandomShapeGenerator; import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType; import org.junit.After; +import org.locationtech.jts.geom.Coordinate; import java.io.IOException; @@ -60,7 +59,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase indexedShapeToReturn; @Override protected GeoShapeQueryBuilder doCreateTestQueryBuilder() { @@ -68,7 +67,7 @@ protected GeoShapeQueryBuilder doCreateTestQueryBuilder() { } private GeoShapeQueryBuilder doCreateTestQueryBuilder(boolean indexedShape) { ShapeType shapeType = ShapeType.randomType(random()); - ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType); + ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType); GeoShapeQueryBuilder builder; clearShapeFields(); if (indexedShape == false) { @@ -166,7 +165,7 @@ public void testToQuery() throws IOException { } public void testNoFieldName() throws Exception { - ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null); + ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new GeoShapeQueryBuilder(null, shape)); assertEquals("fieldName is required", e.getMessage()); } @@ -188,14 +187,14 @@ public void testNoIndexedShapeType() throws IOException { } public void testNoRelation() throws IOException { - ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null); + ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null); GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.relation(null)); assertEquals("No Shape Relation defined", e.getMessage()); } public void testInvalidRelation() throws IOException { - ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null); + ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null); GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); builder.strategy(SpatialStrategy.TERM); expectThrows(IllegalArgumentException.class, () -> builder.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN))); @@ -266,7 +265,7 @@ public void testMultipleRewrite() throws IOException { public void testIgnoreUnmapped() throws IOException { ShapeType shapeType = ShapeType.randomType(random()); - ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType); + ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType); final GeoShapeQueryBuilder queryBuilder = new GeoShapeQueryBuilder("unmapped", shape); queryBuilder.ignoreUnmapped(true); Query query = queryBuilder.toQuery(createShardContext()); @@ -282,7 +281,7 @@ public void testIgnoreUnmapped() throws IOException { public void testWrongFieldType() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); ShapeType shapeType = ShapeType.randomType(random()); - ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType); + ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType); final GeoShapeQueryBuilder queryBuilder = new GeoShapeQueryBuilder(STRING_FIELD_NAME, shape); QueryShardException e = expectThrows(QueryShardException.class, () -> queryBuilder.toQuery(createShardContext())); assertThat(e.getMessage(), containsString("Field [mapped_string] is not of type [geo_shape] but of type [text]")); diff --git a/server/src/test/java/org/elasticsearch/index/query/RewriteableTests.java b/server/src/test/java/org/elasticsearch/index/query/RewriteableTests.java index fbc65d04318b4..254f9b3fcad5f 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RewriteableTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RewriteableTests.java @@ -66,7 +66,7 @@ public void testRewriteAndFetch() throws ExecutionException, InterruptedExceptio public void testRewriteList() throws IOException { QueryRewriteContext context = new QueryRewriteContext(null, null, null, null); - List rewriteableList = new ArrayList(); + List rewriteableList = new ArrayList<>(); int numInstances = randomIntBetween(1, 10); rewriteableList.add(new TestRewriteable(randomIntBetween(1, Rewriteable.MAX_REWRITE_ROUNDS))); for (int i = 0; i < numInstances; i++) { @@ -103,7 +103,7 @@ private static final class TestRewriteable implements Rewriteable supplier) { this.numRewrites = numRewrites; this.fetch = fetch; this.supplier = supplier; diff --git a/test/framework/src/main/java/org/elasticsearch/common/inject/ModuleTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/inject/ModuleTestCase.java index c041b59fc2b8c..51d7d5aae3abe 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/inject/ModuleTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/common/inject/ModuleTestCase.java @@ -42,17 +42,17 @@ public abstract class ModuleTestCase extends ESTestCase { /** Configures the module and asserts "clazz" is bound to "to". */ - public void assertBinding(Module module, Class to, Class clazz) { + public void assertBinding(Module module, Class to, Class clazz) { List elements = Elements.getElements(module); for (Element element : elements) { if (element instanceof LinkedKeyBinding) { - LinkedKeyBinding binding = (LinkedKeyBinding) element; + LinkedKeyBinding binding = (LinkedKeyBinding) element; if (to.equals(binding.getKey().getTypeLiteral().getType())) { assertSame(clazz, binding.getLinkedKey().getTypeLiteral().getType()); return; } } else if (element instanceof UntargettedBinding) { - UntargettedBinding binding = (UntargettedBinding) element; + UntargettedBinding binding = (UntargettedBinding) element; if (to.equals(binding.getKey().getTypeLiteral().getType())) { assertSame(clazz, to); return; @@ -67,16 +67,16 @@ public void assertBinding(Module module, Class to, Class clazz) { } /** Configures the module and asserts "clazz" is not bound to anything. */ - public void assertNotBound(Module module, Class clazz) { + public void assertNotBound(Module module, Class clazz) { List elements = Elements.getElements(module); for (Element element : elements) { if (element instanceof LinkedKeyBinding) { - LinkedKeyBinding binding = (LinkedKeyBinding) element; + LinkedKeyBinding binding = (LinkedKeyBinding) element; if (clazz.equals(binding.getKey().getTypeLiteral().getType())) { fail("Found binding for " + clazz.getName() + " to " + binding.getKey().getTypeLiteral().getType().getTypeName()); } } else if (element instanceof UntargettedBinding) { - UntargettedBinding binding = (UntargettedBinding) element; + UntargettedBinding binding = (UntargettedBinding) element; if (clazz.equals(binding.getKey().getTypeLiteral().getType())) { fail("Found binding for " + clazz.getName()); } @@ -107,18 +107,18 @@ public void assertBindingFailure(Module module, String... msgs) { * Configures the module and checks a Map<String, Class> of the "to" class * is bound to "theClass". */ - public void assertMapMultiBinding(Module module, Class to, Class theClass) { + public void assertMapMultiBinding(Module module, Class to, Class theClass) { List elements = Elements.getElements(module); Set bindings = new HashSet<>(); boolean providerFound = false; for (Element element : elements) { if (element instanceof LinkedKeyBinding) { - LinkedKeyBinding binding = (LinkedKeyBinding) element; + LinkedKeyBinding binding = (LinkedKeyBinding) element; if (to.equals(binding.getKey().getTypeLiteral().getType())) { bindings.add(binding.getLinkedKey().getTypeLiteral().getType()); } } else if (element instanceof ProviderInstanceBinding) { - ProviderInstanceBinding binding = (ProviderInstanceBinding) element; + ProviderInstanceBinding binding = (ProviderInstanceBinding) element; String setType = binding.getKey().getTypeLiteral().getType().toString(); if (setType.equals("java.util.Map")) { providerFound = true; @@ -138,18 +138,18 @@ public void assertMapMultiBinding(Module module, Class to, Class theClass) { * is bound to "classes". There may be more classes bound * to "to" than just "classes". */ - public void assertSetMultiBinding(Module module, Class to, Class... classes) { + public void assertSetMultiBinding(Module module, Class to, Class... classes) { List elements = Elements.getElements(module); Set bindings = new HashSet<>(); boolean providerFound = false; for (Element element : elements) { if (element instanceof LinkedKeyBinding) { - LinkedKeyBinding binding = (LinkedKeyBinding) element; + LinkedKeyBinding binding = (LinkedKeyBinding) element; if (to.equals(binding.getKey().getTypeLiteral().getType())) { bindings.add(binding.getLinkedKey().getTypeLiteral().getType()); } } else if (element instanceof ProviderInstanceBinding) { - ProviderInstanceBinding binding = (ProviderInstanceBinding) element; + ProviderInstanceBinding binding = (ProviderInstanceBinding) element; String setType = binding.getKey().getTypeLiteral().getType().toString(); if (setType.equals("java.util.Set<" + to.getName() + ">")) { providerFound = true; @@ -157,7 +157,7 @@ public void assertSetMultiBinding(Module module, Class to, Class... classes) { } } - for (Class clazz : classes) { + for (Class clazz : classes) { if (bindings.contains(clazz) == false) { fail("Expected to find " + clazz.getName() + " as set binding to " + to.getName() + ", found these classes:\n" + bindings); } @@ -180,12 +180,12 @@ public static T bindAndGetInstance(Module module, Class to) { List elements = Elements.getElements(module); for (Element element : elements) { if (element instanceof InstanceBinding) { - InstanceBinding binding = (InstanceBinding) element; + InstanceBinding binding = (InstanceBinding) element; if (to.equals(binding.getKey().getTypeLiteral().getType())) { return to.cast(binding.getInstance()); } } else if (element instanceof ProviderInstanceBinding) { - ProviderInstanceBinding binding = (ProviderInstanceBinding) element; + ProviderInstanceBinding binding = (ProviderInstanceBinding) element; if (to.equals(binding.getKey().getTypeLiteral().getType())) { return to.cast(binding.getProviderInstance().get()); } @@ -203,7 +203,7 @@ public void assertInstanceBindingWithAnnotation(Module module, Class to, List elements = Elements.getElements(module); for (Element element : elements) { if (element instanceof InstanceBinding) { - InstanceBinding binding = (InstanceBinding) element; + InstanceBinding binding = (InstanceBinding) element; if (to.equals(binding.getKey().getTypeLiteral().getType())) { if (annotation == null || annotation.equals(binding.getKey().getAnnotationType())) { assertTrue(tester.test(to.cast(binding.getInstance()))); @@ -211,7 +211,7 @@ public void assertInstanceBindingWithAnnotation(Module module, Class to, } } } else if (element instanceof ProviderInstanceBinding) { - ProviderInstanceBinding binding = (ProviderInstanceBinding) element; + ProviderInstanceBinding binding = (ProviderInstanceBinding) element; if (to.equals(binding.getKey().getTypeLiteral().getType())) { assertTrue(tester.test(to.cast(binding.getProviderInstance().get()))); return; @@ -232,27 +232,27 @@ public void assertInstanceBindingWithAnnotation(Module module, Class to, @SuppressWarnings("unchecked") public void assertMapInstanceBinding(Module module, Class keyType, Class valueType, Map expected) throws Exception { // this method is insane because java type erasure makes it incredibly difficult... - Map keys = new HashMap<>(); - Map values = new HashMap<>(); + Map> keys = new HashMap<>(); + Map, V> values = new HashMap<>(); List elements = Elements.getElements(module); for (Element element : elements) { if (element instanceof InstanceBinding) { - InstanceBinding binding = (InstanceBinding) element; + InstanceBinding binding = (InstanceBinding) element; if (binding.getKey().getRawType().equals(valueType)) { values.put(binding.getKey(), (V) binding.getInstance()); } else if (binding.getInstance() instanceof Map.Entry) { - Map.Entry entry = (Map.Entry) binding.getInstance(); + Map.Entry entry = (Map.Entry) binding.getInstance(); Object key = entry.getKey(); Object providerValue = entry.getValue(); if (key.getClass().equals(keyType) && providerValue instanceof ProviderLookup.ProviderImpl) { - ProviderLookup.ProviderImpl provider = (ProviderLookup.ProviderImpl) providerValue; + ProviderLookup.ProviderImpl provider = (ProviderLookup.ProviderImpl) providerValue; keys.put((K) key, provider.getKey()); } } } } for (Map.Entry entry : expected.entrySet()) { - Key valueKey = keys.get(entry.getKey()); + Key valueKey = keys.get(entry.getKey()); assertNotNull("Could not find binding for key [" + entry.getKey() + "], found these keys:\n" + keys.keySet(), valueKey); V value = values.get(valueKey); assertNotNull("Could not find value for instance key [" + valueKey + "], found these bindings:\n" + elements); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java index aaa392feff2d3..6dd52626f7de1 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobBuilderTests.java @@ -67,8 +67,8 @@ public void init() { }).when(jobProvider).dataCounts(any(), any(), any()); doAnswer(invocationOnMock -> { - @SuppressWarnings("rawtypes") - Consumer consumer = (Consumer) invocationOnMock.getArguments()[3]; + @SuppressWarnings("unchecked") + Consumer consumer = (Consumer) invocationOnMock.getArguments()[3]; consumer.accept(new ResourceNotFoundException("dummy")); return null; }).when(jobProvider).bucketsViaInternalClient(any(), any(), any(), any()); @@ -153,8 +153,8 @@ public void testBuild_GivenBucketsRequestFails() { Exception error = new RuntimeException("error"); doAnswer(invocationOnMock -> { - @SuppressWarnings("rawtypes") - Consumer consumer = (Consumer) invocationOnMock.getArguments()[3]; + @SuppressWarnings("unchecked") + Consumer consumer = (Consumer) invocationOnMock.getArguments()[3]; consumer.accept(error); return null; }).when(jobProvider).bucketsViaInternalClient(any(), any(), any(), any()); @@ -176,11 +176,11 @@ private void givenLatestTimes(long latestRecordTimestamp, long latestBucketTimes }).when(jobProvider).dataCounts(any(), any(), any()); doAnswer(invocationOnMock -> { - @SuppressWarnings("rawtypes") - Consumer consumer = (Consumer) invocationOnMock.getArguments()[2]; + @SuppressWarnings("unchecked") + Consumer> consumer = (Consumer>) invocationOnMock.getArguments()[2]; Bucket bucket = mock(Bucket.class); when(bucket.getTimestamp()).thenReturn(new Date(latestBucketTimestamp)); - QueryPage bucketQueryPage = new QueryPage(Collections.singletonList(bucket), 1, Bucket.RESULTS_FIELD); + QueryPage bucketQueryPage = new QueryPage(Collections.singletonList(bucket), 1, Bucket.RESULTS_FIELD); consumer.accept(bucketQueryPage); return null; }).when(jobProvider).bucketsViaInternalClient(any(), any(), any(), any()); From 1099060735cb088e654586306847f5763c3c7345 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 5 Jul 2018 09:43:43 -0400 Subject: [PATCH 16/19] Test: Do not remove xpack templates when cleaning (#31642) At the end of every `ESRestTestCase` we clean the cluster which includes deleting all of the templates. If xpack is installed it'll automatically recreate a few templates every time they are removed. Which is slow. This change stops the cleanup from removing the xpack templates. It cuts the time to run the docs tests more than in half and it probably saves a bit more time on other tests as well. --- .../test/rest/ESRestTestCase.java | 53 ++++++++++++++++++- 1 file changed, 51 insertions(+), 2 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 8737378dbd715..81a9598496bf5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -30,6 +30,7 @@ import org.apache.http.message.BasicHeader; import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.ssl.SSLContexts; +import org.apache.http.util.EntityUtils; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -259,7 +260,7 @@ private void wipeCluster() throws IOException { if (preserveIndicesUponCompletion() == false) { // wipe indices try { - adminClient().performRequest("DELETE", "*"); + adminClient().performRequest(new Request("DELETE", "*")); } catch (ResponseException e) { // 404 here just means we had no indexes if (e.getResponse().getStatusLine().getStatusCode() != 404) { @@ -270,7 +271,30 @@ private void wipeCluster() throws IOException { // wipe index templates if (preserveTemplatesUponCompletion() == false) { - adminClient().performRequest("DELETE", "_template/*"); + if (hasXPack()) { + /* + * Delete only templates that xpack doesn't automatically + * recreate. Deleting them doesn't hurt anything, but it + * slows down the test because xpack will just recreate + * them. + */ + Request request = new Request("GET", "_cat/templates"); + request.addParameter("h", "name"); + String templates = EntityUtils.toString(adminClient().performRequest(request).getEntity()); + if (false == "".equals(templates)) { + for (String template : templates.split("\n")) { + if (isXPackTemplate(template)) continue; + if ("".equals(template)) { + throw new IllegalStateException("empty template in templates list:\n" + templates); + } + logger.debug("Clearing template [{}]", template); + adminClient().performRequest(new Request("DELETE", "_template/" + template)); + } + } + } else { + logger.debug("Clearing all templates"); + adminClient().performRequest(new Request("DELETE", "_template/*")); + } } wipeSnapshots(); @@ -585,4 +609,29 @@ protected static Map getAsMap(final String endpoint) throws IOEx assertNotNull(responseEntity); return responseEntity; } + + /** + * Is this template one that is automatically created by xpack? + */ + private static boolean isXPackTemplate(String name) { + if (name.startsWith(".monitoring-")) { + return true; + } + if (name.startsWith(".watch-history-")) { + return true; + } + if (name.startsWith(".ml-")) { + return true; + } + switch (name) { + case ".triggered_watches": + case ".watches": + case "logstash-index-template": + case "security_audit_log": + return true; + default: + return false; + } + } + } From 6e9bd2652fb67fc3b76e97d273542f6213ddcab5 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Thu, 5 Jul 2018 19:49:58 +0300 Subject: [PATCH 17/19] SQL: Fix incorrect message for aliases (#31792) * SQL: Fix incorrect message for aliases Fix the naming in the verification message thrown for aliases over multiple indices with different mappings. --- .../sql/analysis/index/IndexResolver.java | 2 +- .../analysis/index/IndexResolverTests.java | 60 +++++++++++++++++++ .../src/test/resources/mapping-numeric.json | 16 +++++ 3 files changed, 77 insertions(+), 1 deletion(-) create mode 100644 x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java create mode 100644 x-pack/plugin/sql/src/test/resources/mapping-numeric.json diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java index 1800c170b7cff..10586c991b1ac 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java @@ -253,7 +253,7 @@ static IndexResolution merge(List resolutions, String indexWild // need the same mapping across all resolutions if (!merged.get().mapping().equals(resolution.get().mapping())) { return IndexResolution.invalid( - "[" + indexWildcard + "] points to indices [" + resolution.get().name() + "] " + "[" + indexWildcard + "] points to indices [" + merged.get().name() + "] " + "and [" + resolution.get().name() + "] which have different mappings. " + "When using multiple indices, the mappings must be identical."); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java new file mode 100644 index 0000000000000..639356b2997f9 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.analysis.index; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.type.EsField; +import org.elasticsearch.xpack.sql.type.TypesTests; + +import java.util.Arrays; +import java.util.Map; + +public class IndexResolverTests extends ESTestCase { + + public void testMergeSameMapping() throws Exception { + Map oneMapping = TypesTests.loadMapping("mapping-basic.json", true); + Map sameMapping = TypesTests.loadMapping("mapping-basic.json", true); + assertNotSame(oneMapping, sameMapping); + assertEquals(oneMapping, sameMapping); + + String wildcard = "*"; + IndexResolution resolution = IndexResolver.merge( + Arrays.asList(IndexResolution.valid(new EsIndex("a", oneMapping)), IndexResolution.valid(new EsIndex("b", sameMapping))), + wildcard); + + assertTrue(resolution.isValid()); + + EsIndex esIndex = resolution.get(); + + assertEquals(wildcard, esIndex.name()); + assertEquals(sameMapping, esIndex.mapping()); + } + + public void testMergeDifferentMapping() throws Exception { + Map oneMapping = TypesTests.loadMapping("mapping-basic.json", true); + Map sameMapping = TypesTests.loadMapping("mapping-basic.json", true); + Map differentMapping = TypesTests.loadMapping("mapping-numeric.json", true); + + assertNotSame(oneMapping, sameMapping); + assertEquals(oneMapping, sameMapping); + assertNotEquals(oneMapping, differentMapping); + + String wildcard = "*"; + IndexResolution resolution = IndexResolver.merge( + Arrays.asList(IndexResolution.valid(new EsIndex("a", oneMapping)), + IndexResolution.valid(new EsIndex("b", sameMapping)), + IndexResolution.valid(new EsIndex("diff", differentMapping))), + wildcard); + + assertFalse(resolution.isValid()); + + MappingException ex = expectThrows(MappingException.class, () -> resolution.get()); + assertEquals( + "[*] points to indices [a] and [diff] which have different mappings. " + + "When using multiple indices, the mappings must be identical.", + ex.getMessage()); + } +} diff --git a/x-pack/plugin/sql/src/test/resources/mapping-numeric.json b/x-pack/plugin/sql/src/test/resources/mapping-numeric.json new file mode 100644 index 0000000000000..a95ecfb3aa706 --- /dev/null +++ b/x-pack/plugin/sql/src/test/resources/mapping-numeric.json @@ -0,0 +1,16 @@ +{ + "properties" : { + "byte" : { + "type" : "byte" + }, + "short" : { + "type" : "short" + }, + "integer" : { + "type" : "integer" + }, + "long" : { + "type" : "long" + } + } +} From 07470c950bb67a77a05d79c607171f50aad4e3f4 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Thu, 5 Jul 2018 20:07:03 +0300 Subject: [PATCH 18/19] SQL: Allow long literals (#31777) Fix bug that caused integral literals to be only Integer (rejecting Long). This commit fixes that and picks either an Integer or Long based on size. --- .../xpack/sql/parser/ExpressionBuilder.java | 10 +++- .../xpack/sql/type/DataTypeConversion.java | 6 +- .../xpack/sql/parser/ExpressionTests.java | 59 +++++++++++++++++++ .../sql/type/DataTypeConversionTests.java | 17 ++++-- 4 files changed, 83 insertions(+), 9 deletions(-) create mode 100644 x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index a6185def278a1..35eb76af67c7b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -452,8 +452,14 @@ public Object visitDecimalLiteral(DecimalLiteralContext ctx) { @Override public Object visitIntegerLiteral(IntegerLiteralContext ctx) { BigDecimal bigD = new BigDecimal(ctx.getText()); - // TODO: this can be improved to use the smallest type available - return new Literal(source(ctx), bigD.longValueExact(), DataType.INTEGER); + + long value = bigD.longValueExact(); + DataType type = DataType.LONG; + // try to downsize to int if possible (since that's the most common type) + if ((int) value == value) { + type = DataType.INTEGER; + } + return new Literal(source(ctx), value, type); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java index c0bc9b6e52908..605cb11beba9d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java @@ -154,7 +154,7 @@ private static Conversion conversionToLong(DataType from) { return Conversion.INTEGER_TO_LONG; } if (from == BOOLEAN) { - return Conversion.BOOL_TO_INT; // We emit an int here which is ok because of Java's casting rules + return Conversion.BOOL_TO_LONG; } if (from.isString()) { return Conversion.STRING_TO_LONG; @@ -407,7 +407,9 @@ public enum Conversion { NUMERIC_TO_BOOLEAN(fromLong(value -> value != 0)), STRING_TO_BOOLEAN(fromString(DataTypeConversion::convertToBoolean, "Boolean")), - DATE_TO_BOOLEAN(fromDate(value -> value != 0)); + DATE_TO_BOOLEAN(fromDate(value -> value != 0)), + + BOOL_TO_LONG(fromBool(value -> value ? 1L : 0L)); private final Function converter; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java new file mode 100644 index 0000000000000..3702939dd377e --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.parser; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Neg; +import org.elasticsearch.xpack.sql.type.DataType; + +public class ExpressionTests extends ESTestCase { + + private final SqlParser parser = new SqlParser(); + + public void testLiteralLong() throws Exception { + Expression lt = parser.createExpression(String.valueOf(Long.MAX_VALUE)); + assertEquals(Literal.class, lt.getClass()); + Literal l = (Literal) lt; + assertEquals(Long.MAX_VALUE, l.value()); + assertEquals(DataType.LONG, l.dataType()); + } + + public void testLiteralLongNegative() throws Exception { + // Long.MIN_VALUE doesn't work since it is being interpreted as negate positive.long which is 1 higher than Long.MAX_VALUE + Expression lt = parser.createExpression(String.valueOf(-Long.MAX_VALUE)); + assertEquals(Neg.class, lt.getClass()); + Neg n = (Neg) lt; + assertTrue(n.foldable()); + assertEquals(-Long.MAX_VALUE, n.fold()); + assertEquals(DataType.LONG, n.dataType()); + } + + public void testLiteralInteger() throws Exception { + Expression lt = parser.createExpression(String.valueOf(Integer.MAX_VALUE)); + assertEquals(Literal.class, lt.getClass()); + Literal l = (Literal) lt; + assertEquals(Integer.MAX_VALUE, l.value()); + assertEquals(DataType.INTEGER, l.dataType()); + } + + public void testLiteralIntegerWithShortValue() throws Exception { + Expression lt = parser.createExpression(String.valueOf(Short.MAX_VALUE)); + assertEquals(Literal.class, lt.getClass()); + Literal l = (Literal) lt; + assertEquals(Integer.valueOf(Short.MAX_VALUE), l.value()); + assertEquals(DataType.INTEGER, l.dataType()); + } + + public void testLiteralIntegerWithByteValue() throws Exception { + Expression lt = parser.createExpression(String.valueOf(Byte.MAX_VALUE)); + assertEquals(Literal.class, lt.getClass()); + Literal l = (Literal) lt; + assertEquals(Integer.valueOf(Byte.MAX_VALUE), l.value()); + assertEquals(DataType.INTEGER, l.dataType()); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java index 8f5477f1951e9..1db470211f057 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java @@ -45,8 +45,8 @@ public void testConversionToLong() { { Conversion conversion = DataTypeConversion.conversionFor(DataType.BOOLEAN, to); assertNull(conversion.convert(null)); - assertEquals(1, conversion.convert(true)); - assertEquals(0, conversion.convert(false)); + assertEquals(1L, conversion.convert(true)); + assertEquals(0L, conversion.convert(false)); } Conversion conversion = DataTypeConversion.conversionFor(DataType.KEYWORD, to); assertNull(conversion.convert(null)); @@ -141,12 +141,19 @@ public void testConversionToBoolean() { assertEquals(true, conversion.convert(-10)); assertEquals(false, conversion.convert(0)); } + { + Conversion conversion = DataTypeConversion.conversionFor(DataType.LONG, DataType.BOOLEAN); + assertNull(conversion.convert(null)); + assertEquals(true, conversion.convert(10L)); + assertEquals(true, conversion.convert(-10L)); + assertEquals(false, conversion.convert(0L)); + } { Conversion conversion = DataTypeConversion.conversionFor(DataType.DOUBLE, DataType.BOOLEAN); assertNull(conversion.convert(null)); - assertEquals(true, conversion.convert(10.0)); - assertEquals(true, conversion.convert(-10.0)); - assertEquals(false, conversion.convert(0.0)); + assertEquals(true, conversion.convert(10.0d)); + assertEquals(true, conversion.convert(-10.0d)); + assertEquals(false, conversion.convert(0.0d)); } { Conversion conversion = DataTypeConversion.conversionFor(DataType.KEYWORD, DataType.BOOLEAN); From 09e8ac816736f0d5eb04aa60d509426f668b11bd Mon Sep 17 00:00:00 2001 From: Sohaib Iftikhar Date: Thu, 5 Jul 2018 19:52:25 +0200 Subject: [PATCH 19/19] REST high-level client: add get index API (#31703) Also added master_timeout parameter for the indices.get spec Relates to #27205 --- .../elasticsearch/client/IndicesClient.java | 29 ++++++ .../client/RequestConverters.java | 16 ++++ .../elasticsearch/client/IndicesClientIT.java | 72 +++++++++++++++ .../client/RequestConvertersTests.java | 33 +++++++ .../IndicesClientDocumentationIT.java | 78 ++++++++++++++++ .../high-level/indices/get_index.asciidoc | 88 +++++++++++++++++++ .../high-level/supported-apis.asciidoc | 2 + .../rest-api-spec/api/indices.get.json | 4 + 8 files changed, 322 insertions(+) create mode 100644 docs/java-rest/high-level/indices/get_index.asciidoc diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index 641480535c5ad..2944b49bf18b0 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -39,6 +39,7 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; @@ -437,6 +438,34 @@ public void getSettingsAsync(GetSettingsRequest getSettingsRequest, RequestOptio GetSettingsResponse::fromXContent, listener, emptySet()); } + /** + * Retrieve information about one or more indexes + * See + * Indices Get Index API on elastic.co + * @param getIndexRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public GetIndexResponse get(GetIndexRequest getIndexRequest, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(getIndexRequest, RequestConverters::getIndex, options, + GetIndexResponse::fromXContent, emptySet()); + } + + /** + * Retrieve information about one or more indexes + * See + * Indices Get Index API on elastic.co + * @param getIndexRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void getAsync(GetIndexRequest getIndexRequest, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(getIndexRequest, RequestConverters::getIndex, options, + GetIndexResponse::fromXContent, listener, emptySet()); + } + /** * Force merge one or more indices using the Force Merge API. * See diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 26f0b5c647404..efc48d9057550 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -834,6 +834,22 @@ static Request getSettings(GetSettingsRequest getSettingsRequest) { return request; } + static Request getIndex(GetIndexRequest getIndexRequest) { + String[] indices = getIndexRequest.indices() == null ? Strings.EMPTY_ARRAY : getIndexRequest.indices(); + + String endpoint = endpoint(indices); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + Params params = new Params(request); + params.withIndicesOptions(getIndexRequest.indicesOptions()); + params.withLocal(getIndexRequest.local()); + params.withIncludeDefaults(getIndexRequest.includeDefaults()); + params.withHuman(getIndexRequest.humanReadable()); + params.withMasterTimeout(getIndexRequest.masterNodeTimeout()); + + return request; + } + static Request indicesExist(GetIndexRequest getIndexRequest) { // this can be called with no indices as argument by transport client, not via REST though if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index 405653a3841eb..39070a07b31d6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -45,6 +45,7 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; @@ -99,6 +100,7 @@ import java.util.Map; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractRawValues; import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue; import static org.hamcrest.CoreMatchers.hasItem; @@ -112,6 +114,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; +import static org.hamcrest.core.IsInstanceOf.instanceOf; public class IndicesClientIT extends ESRestHighLevelClientTestCase { @@ -326,6 +329,75 @@ public void testGetSettingsWithDefaultsFiltered() throws IOException { assertEquals(1, getSettingsResponse.getIndexToDefaultSettings().get("get_settings_index").size()); } + @SuppressWarnings("unchecked") + public void testGetIndex() throws IOException { + String indexName = "get_index_test"; + Settings basicSettings = Settings.builder() + .put(SETTING_NUMBER_OF_SHARDS, 1) + .put(SETTING_NUMBER_OF_REPLICAS, 0) + .build(); + String mappings = "\"type-1\":{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}"; + createIndex(indexName, basicSettings, mappings); + + GetIndexRequest getIndexRequest = new GetIndexRequest() + .indices(indexName).includeDefaults(false); + GetIndexResponse getIndexResponse = + execute(getIndexRequest, highLevelClient().indices()::get, highLevelClient().indices()::getAsync); + + // default settings should be null + assertNull(getIndexResponse.getSetting(indexName, "index.refresh_interval")); + assertEquals("1", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_SHARDS)); + assertEquals("0", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_REPLICAS)); + assertNotNull(getIndexResponse.getMappings().get(indexName)); + assertNotNull(getIndexResponse.getMappings().get(indexName).get("type-1")); + Object o = getIndexResponse.getMappings().get(indexName).get("type-1").getSourceAsMap().get("properties"); + assertThat(o, instanceOf(Map.class)); + //noinspection unchecked + assertThat(((Map) o).get("field-1"), instanceOf(Map.class)); + //noinspection unchecked + Map fieldMapping = (Map) ((Map) o).get("field-1"); + assertEquals("integer", fieldMapping.get("type")); + } + + @SuppressWarnings("unchecked") + public void testGetIndexWithDefaults() throws IOException { + String indexName = "get_index_test"; + Settings basicSettings = Settings.builder() + .put(SETTING_NUMBER_OF_SHARDS, 1) + .put(SETTING_NUMBER_OF_REPLICAS, 0) + .build(); + String mappings = "\"type-1\":{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}"; + createIndex(indexName, basicSettings, mappings); + + GetIndexRequest getIndexRequest = new GetIndexRequest() + .indices(indexName).includeDefaults(true); + GetIndexResponse getIndexResponse = + execute(getIndexRequest, highLevelClient().indices()::get, highLevelClient().indices()::getAsync); + + assertNotNull(getIndexResponse.getSetting(indexName, "index.refresh_interval")); + assertEquals(IndexSettings.DEFAULT_REFRESH_INTERVAL, + getIndexResponse.defaultSettings().get(indexName).getAsTime("index.refresh_interval", null)); + assertEquals("1", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_SHARDS)); + assertEquals("0", getIndexResponse.getSetting(indexName, SETTING_NUMBER_OF_REPLICAS)); + assertNotNull(getIndexResponse.getMappings().get(indexName)); + assertNotNull(getIndexResponse.getMappings().get(indexName).get("type-1")); + Object o = getIndexResponse.getMappings().get(indexName).get("type-1").getSourceAsMap().get("properties"); + assertThat(o, instanceOf(Map.class)); + assertThat(((Map) o).get("field-1"), instanceOf(Map.class)); + Map fieldMapping = (Map) ((Map) o).get("field-1"); + assertEquals("integer", fieldMapping.get("type")); + } + + public void testGetIndexNonExistentIndex() throws IOException { + String nonExistentIndex = "index_that_doesnt_exist"; + assertFalse(indexExists(nonExistentIndex)); + + GetIndexRequest getIndexRequest = new GetIndexRequest().indices(nonExistentIndex); + ElasticsearchException exception = expectThrows(ElasticsearchException.class, + () -> execute(getIndexRequest, highLevelClient().indices()::get, highLevelClient().indices()::getAsync)); + assertEquals(RestStatus.NOT_FOUND, exception.status()); + } + public void testPutMapping() throws IOException { // Add mappings to index String indexName = "mapping_index"; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index aa3788af7494b..e2e025ea4763c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -584,6 +584,39 @@ public void testGetSettings() throws IOException { assertThat(request.getEntity(), nullValue()); } + public void testGetIndex() throws IOException { + String[] indicesUnderTest = randomBoolean() ? null : randomIndicesNames(0, 5); + + GetIndexRequest getIndexRequest = new GetIndexRequest().indices(indicesUnderTest); + + Map expectedParams = new HashMap<>(); + setRandomMasterTimeout(getIndexRequest, expectedParams); + setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams); + setRandomLocal(getIndexRequest, expectedParams); + setRandomHumanReadable(getIndexRequest, expectedParams); + + if (randomBoolean()) { + // the request object will not have include_defaults present unless it is set to + // true + getIndexRequest.includeDefaults(randomBoolean()); + if (getIndexRequest.includeDefaults()) { + expectedParams.put("include_defaults", Boolean.toString(true)); + } + } + + StringJoiner endpoint = new StringJoiner("/", "/", ""); + if (indicesUnderTest != null && indicesUnderTest.length > 0) { + endpoint.add(String.join(",", indicesUnderTest)); + } + + Request request = RequestConverters.getIndex(getIndexRequest); + + assertThat(endpoint.toString(), equalTo(request.getEndpoint())); + assertThat(request.getParameters(), equalTo(expectedParams)); + assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); + assertThat(request.getEntity(), nullValue()); + } + public void testDeleteIndexEmptyIndices() { String[] indices = randomBoolean() ? null : Strings.EMPTY_ARRAY; ActionRequestValidationException validationException = new DeleteIndexRequest(indices).validate(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 964757db372ae..23dab5b21e2ab 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -44,6 +44,7 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; @@ -89,12 +90,14 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import java.io.IOException; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -1235,6 +1238,81 @@ public void onFailure(Exception e) { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + public void testGetIndex() throws Exception { + RestHighLevelClient client = highLevelClient(); + + { + Settings settings = Settings.builder().put("number_of_shards", 3).build(); + String mappings = "{\"properties\":{\"field-1\":{\"type\":\"integer\"}}}"; + CreateIndexResponse createIndexResponse = client.indices().create( + new CreateIndexRequest("index", settings).mapping("doc", mappings, XContentType.JSON), + RequestOptions.DEFAULT); + assertTrue(createIndexResponse.isAcknowledged()); + } + + // tag::get-index-request + GetIndexRequest request = new GetIndexRequest().indices("index"); // <1> + // end::get-index-request + + // tag::get-index-request-indicesOptions + request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1> + // end::get-index-request-indicesOptions + + // tag::get-index-request-includeDefaults + request.includeDefaults(true); // <1> + // end::get-index-request-includeDefaults + + // tag::get-index-execute + GetIndexResponse getIndexResponse = client.indices().get(request, RequestOptions.DEFAULT); + // end::get-index-execute + + // tag::get-index-response + ImmutableOpenMap indexMappings = getIndexResponse.getMappings().get("index"); // <1> + Map indexTypeMappings = indexMappings.get("doc").getSourceAsMap(); // <2> + List indexAliases = getIndexResponse.getAliases().get("index"); // <3> + String numberOfShardsString = getIndexResponse.getSetting("index", "index.number_of_shards"); // <4> + Settings indexSettings = getIndexResponse.getSettings().get("index"); // <5> + Integer numberOfShards = indexSettings.getAsInt("index.number_of_shards", null); // <6> + TimeValue time = getIndexResponse.defaultSettings().get("index") + .getAsTime("index.refresh_interval", null); // <7> + // end::get-index-response + + assertEquals( + Collections.singletonMap("properties", + Collections.singletonMap("field-1", Collections.singletonMap("type", "integer"))), + indexTypeMappings + ); + assertTrue(indexAliases.isEmpty()); + assertEquals(IndexSettings.DEFAULT_REFRESH_INTERVAL, time); + assertEquals("3", numberOfShardsString); + assertEquals(Integer.valueOf(3), numberOfShards); + + // tag::get-index-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(GetIndexResponse getIndexResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::get-index-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::get-index-execute-async + client.indices().getAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::get-index-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + public void testForceMergeIndex() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/docs/java-rest/high-level/indices/get_index.asciidoc b/docs/java-rest/high-level/indices/get_index.asciidoc new file mode 100644 index 0000000000000..5fb1599613ad3 --- /dev/null +++ b/docs/java-rest/high-level/indices/get_index.asciidoc @@ -0,0 +1,88 @@ +[[java-rest-high-get-index]] +=== Get Index API + +[[java-rest-high-get-index-request]] +==== Get Index Request + +A `GetIndexRequest` requires one or more `index` arguments: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-index-request] +-------------------------------------------------- +<1> The index whose information we want to retrieve + +==== Optional arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-index-request-includeDefaults] +-------------------------------------------------- +<1> If true, defaults will be returned for settings not explicitly set on the index + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-index-request-indicesOptions] +-------------------------------------------------- +<1> Setting `IndicesOptions` controls how unavailable indices are resolved and +how wildcard expressions are expanded + +[[java-rest-high-get-index-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-index-execute] +-------------------------------------------------- + +[[java-rest-high-get-index-async]] +==== Asynchronous Execution + +The asynchronous execution of a Get Index request requires both the `GetIndexRequest` +instance and an `ActionListener` instance to be passed to the asynchronous +method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-index-execute-async] +-------------------------------------------------- +<1> The `GetIndexRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `GetIndexResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-index-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument. +<2> Called in case of failure. The raised exception is provided as an argument. + +[[java-rest-high-get-index-response]] +==== Get Index Response + +The returned `GetIndexResponse` allows to retrieve information about the +executed operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-index-response] +-------------------------------------------------- +<1> Retrieve a Map of different types to `MappingMetadata` for `index`. +<2> Retrieve a Map for the properties for document type `doc`. +<3> Get the list of aliases for `index`. +<4> Get the value for the setting string `index.number_of_shards` for `index`. If the setting was not explicitly +specified but was part of the default settings (and includeDefault was `true`) then the default setting would be +retrieved. +<5> Retrieve all settings for `index`. +<6> The `Settings` objects gives more flexibility. Here it is used to extract the setting `index.number_of_shards` as an +integer. +<7> Get the default setting `index.refresh_interval` (if `includeDefault` was set to `true`). If `includeDefault` was set +to `false`, `getIndexResponse.defaultSettings()` will return an empty map. \ No newline at end of file diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 5308646eabad4..71420eb087f85 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -78,6 +78,7 @@ Index Management:: * <> * <> * <> +* <> Mapping Management:: * <> @@ -114,6 +115,7 @@ include::indices/get_settings.asciidoc[] include::indices/put_template.asciidoc[] include::indices/validate_query.asciidoc[] include::indices/get_templates.asciidoc[] +include::indices/get_index.asciidoc[] == Cluster APIs diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json index f615718c7d4e2..6474b8acf5298 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json @@ -39,6 +39,10 @@ "type": "boolean", "description": "Whether to return all default setting for each of the indices.", "default": false + }, + "master_timeout": { + "type" : "time", + "description" : "Specify timeout for connection to master" } } },