From 653477eb6bfc89f3efaaa8e200ecaf8a0f54690a Mon Sep 17 00:00:00 2001 From: Kaituo Li <4302485+hamersu9t@users.noreply.github.com> Date: Tue, 8 Nov 2022 10:19:50 -0800 Subject: [PATCH] remove bwc code related to 1.0 and earlier version (#714) This PR addresses AD compile failure because the core removed 1x Version constants. (check https://github.com/opensearch-project/OpenSearch/pull/5021) OpenSearch does not support N-2 version compatibility. This is inherited from Elasticsearch and Lucene. So version 1x is not compatible with 3.0. Thus removal of deprecated 1x code. This PR follows suite and removes backward compatibility code on OpenSearch 1.0 and older versions. So we won't support direct upgrade from 1.x domains to 3.x. Testing done: 1. gradle build. Note that CI workflow will fail due to missing job scheduler. We are using a job scheduler from distribution. But due to a circular dependency on -SNAPSHOT builds being published to maven that require the distribution build to be successful (check https://github.com/opensearch-project/opensearch-build/issues/1463), AD compilation failure caused the job scheduler to be taken out. Not including the latest job scheduler will cause AD build to fail. So we have a chicken and egg problem: this PR fixes AD build failure and the PR itself cannot build due to missing job scheduler. To run gradle build, I changed to use local job scheduler and verified gradle build succeeded. Once I merge this PR. job scheduler should be able to build and be published to maven. Future AD CI should be unblocked after that. Signed-off-by: Kaituo Li --- .../opensearch/ad/cluster/ADVersionUtil.java | 4 - .../org/opensearch/ad/cluster/HashRing.java | 2 +- .../opensearch/ad/model/ADTaskProfile.java | 3 +- .../org/opensearch/ad/model/ModelProfile.java | 33 +- .../transport/ADTaskProfileNodeResponse.java | 4 +- .../ad/transport/EntityProfileRequest.java | 24 +- .../ad/transport/EntityProfileResponse.java | 18 +- .../ad/transport/EntityResultRequest.java | 38 +- .../ad/transport/ForwardADTaskRequest.java | 8 +- .../ad/transport/ProfileNodeResponse.java | 19 +- .../ad/transport/ProfileResponse.java | 28 +- .../ad/transport/RCFResultResponse.java | 3 +- src/main/java/org/opensearch/ad/util/Bwc.java | 32 - src/test/java/org/opensearch/BwcTests.java | 564 ------------------ .../opensearch/EntityProfileRequest1_0.java | 105 ---- .../opensearch/EntityProfileResponse1_0.java | 172 ------ .../opensearch/EntityResultRequest1_0.java | 105 ---- .../java/org/opensearch/ModelProfile1_0.java | 114 ---- .../opensearch/ProfileNodeResponse1_0.java | 134 ----- .../org/opensearch/ProfileResponse1_0.java | 169 ------ .../org/opensearch/RCFResultResponse1_0.java | 87 --- .../opensearch/StreamInputOutputTests.java | 293 +++++++++ .../ad/EntityProfileRunnerTests.java | 16 + .../ad/cluster/ADVersionUtilTests.java | 13 +- .../opensearch/ad/cluster/HashRingTests.java | 14 +- .../ad/transport/ADTaskProfileTests.java | 8 +- .../transport/CronTransportActionTests.java | 11 +- .../transport/ForwardADTaskRequestTests.java | 6 +- .../ad/transport/MultiEntityResultTests.java | 7 +- .../test/org/opensearch/ad/util/FakeNode.java | 3 +- 30 files changed, 364 insertions(+), 1673 deletions(-) delete mode 100644 src/main/java/org/opensearch/ad/util/Bwc.java delete mode 100644 src/test/java/org/opensearch/BwcTests.java delete mode 100644 src/test/java/org/opensearch/EntityProfileRequest1_0.java delete mode 100644 src/test/java/org/opensearch/EntityProfileResponse1_0.java delete mode 100644 src/test/java/org/opensearch/EntityResultRequest1_0.java delete mode 100644 src/test/java/org/opensearch/ModelProfile1_0.java delete mode 100644 src/test/java/org/opensearch/ProfileNodeResponse1_0.java delete mode 100644 src/test/java/org/opensearch/ProfileResponse1_0.java delete mode 100644 src/test/java/org/opensearch/RCFResultResponse1_0.java create mode 100644 src/test/java/org/opensearch/StreamInputOutputTests.java diff --git a/src/main/java/org/opensearch/ad/cluster/ADVersionUtil.java b/src/main/java/org/opensearch/ad/cluster/ADVersionUtil.java index 35c1f7ec..e5ace331 100644 --- a/src/main/java/org/opensearch/ad/cluster/ADVersionUtil.java +++ b/src/main/java/org/opensearch/ad/cluster/ADVersionUtil.java @@ -42,8 +42,4 @@ public static String normalizeVersion(String adVersion) { } return normalizedVersion.toString(); } - - public static boolean compatibleWithVersionOnOrAfter1_1(Version adVersion) { - return adVersion != null && adVersion.onOrAfter(Version.V_1_1_0); - } } diff --git a/src/main/java/org/opensearch/ad/cluster/HashRing.java b/src/main/java/org/opensearch/ad/cluster/HashRing.java index 3998de29..121338e8 100644 --- a/src/main/java/org/opensearch/ad/cluster/HashRing.java +++ b/src/main/java/org/opensearch/ad/cluster/HashRing.java @@ -292,7 +292,7 @@ private void buildCircles(Set removedNodeIds, Set addedNodeIds, // rebuild AD version hash ring with cooldown after all new node added. rebuildCirclesForRealtimeAD(); - if (!dataMigrator.isMigrated() && circles.size() > 0 && circles.lastEntry().getKey().onOrAfter(Version.V_1_1_0)) { + if (!dataMigrator.isMigrated() && circles.size() > 0) { // Find owning node with highest AD version to make sure the data migration logic be compatible to // latest AD version when upgrade. Optional owningNode = getOwningNodeWithHighestAdVersion(DEFAULT_HASH_RING_MODEL_ID); diff --git a/src/main/java/org/opensearch/ad/model/ADTaskProfile.java b/src/main/java/org/opensearch/ad/model/ADTaskProfile.java index 1b3a4b62..af0a06a5 100644 --- a/src/main/java/org/opensearch/ad/model/ADTaskProfile.java +++ b/src/main/java/org/opensearch/ad/model/ADTaskProfile.java @@ -20,7 +20,6 @@ import org.opensearch.Version; import org.opensearch.ad.annotation.Generated; -import org.opensearch.ad.cluster.ADVersionUtil; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; @@ -181,7 +180,7 @@ public void writeTo(StreamOutput out, Version adVersion) throws IOException { out.writeOptionalInt(thresholdModelTrainingDataSize); out.writeOptionalLong(modelSizeInBytes); out.writeOptionalString(nodeId); - if (ADVersionUtil.compatibleWithVersionOnOrAfter1_1(adVersion)) { + if (adVersion != null) { out.writeOptionalString(taskId); out.writeOptionalString(adTaskType); out.writeOptionalInt(detectorTaskSlots); diff --git a/src/main/java/org/opensearch/ad/model/ModelProfile.java b/src/main/java/org/opensearch/ad/model/ModelProfile.java index ebc13fd1..953c4e16 100644 --- a/src/main/java/org/opensearch/ad/model/ModelProfile.java +++ b/src/main/java/org/opensearch/ad/model/ModelProfile.java @@ -17,7 +17,6 @@ import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang.builder.ToStringBuilder; import org.opensearch.ad.constant.CommonName; -import org.opensearch.ad.util.Bwc; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.io.stream.Writeable; @@ -43,42 +42,26 @@ public ModelProfile(String modelId, Entity entity, long modelSizeInBytes) { public ModelProfile(StreamInput in) throws IOException { this.modelId = in.readString(); - if (Bwc.supportMultiCategoryFields(in.getVersion())) { - if (in.readBoolean()) { - this.entity = new Entity(in); - } else { - this.entity = null; - } + if (in.readBoolean()) { + this.entity = new Entity(in); } else { this.entity = null; } + this.modelSizeInBytes = in.readLong(); - if (!Bwc.supportMultiCategoryFields(in.getVersion())) { - // removed nodeId since Opensearch 1.1 - // read it and do no assignment - in.readString(); - } } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(modelId); - if (Bwc.supportMultiCategoryFields(out.getVersion())) { - if (entity != null) { - out.writeBoolean(true); - entity.writeTo(out); - } else { - out.writeBoolean(false); - } + if (entity != null) { + out.writeBoolean(true); + entity.writeTo(out); + } else { + out.writeBoolean(false); } out.writeLong(modelSizeInBytes); - // removed nodeId since Opensearch 1.1 - if (!Bwc.supportMultiCategoryFields(out.getVersion())) { - // write empty string for node id as we don't have it - // otherwise, we will get EOFException - out.writeString(CommonName.EMPTY_FIELD); - } } public String getModelId() { diff --git a/src/main/java/org/opensearch/ad/transport/ADTaskProfileNodeResponse.java b/src/main/java/org/opensearch/ad/transport/ADTaskProfileNodeResponse.java index e0ab1a08..5b74cc99 100644 --- a/src/main/java/org/opensearch/ad/transport/ADTaskProfileNodeResponse.java +++ b/src/main/java/org/opensearch/ad/transport/ADTaskProfileNodeResponse.java @@ -17,7 +17,6 @@ import org.apache.logging.log4j.Logger; import org.opensearch.Version; import org.opensearch.action.support.nodes.BaseNodeResponse; -import org.opensearch.ad.cluster.ADVersionUtil; import org.opensearch.ad.model.ADTaskProfile; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.io.stream.StreamInput; @@ -50,8 +49,7 @@ public ADTaskProfile getAdTaskProfile() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (adTaskProfile != null - && (ADVersionUtil.compatibleWithVersionOnOrAfter1_1(remoteAdVersion) || adTaskProfile.getNodeId() != null)) { + if (adTaskProfile != null && (remoteAdVersion != null || adTaskProfile.getNodeId() != null)) { out.writeBoolean(true); adTaskProfile.writeTo(out, remoteAdVersion); } else { diff --git a/src/main/java/org/opensearch/ad/transport/EntityProfileRequest.java b/src/main/java/org/opensearch/ad/transport/EntityProfileRequest.java index daa69078..37d4fd3e 100644 --- a/src/main/java/org/opensearch/ad/transport/EntityProfileRequest.java +++ b/src/main/java/org/opensearch/ad/transport/EntityProfileRequest.java @@ -23,7 +23,6 @@ import org.opensearch.ad.constant.CommonName; import org.opensearch.ad.model.Entity; import org.opensearch.ad.model.EntityProfileName; -import org.opensearch.ad.util.Bwc; import org.opensearch.common.Strings; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -41,17 +40,8 @@ public class EntityProfileRequest extends ActionRequest implements ToXContentObj public EntityProfileRequest(StreamInput in) throws IOException { super(in); adID = in.readString(); - if (Bwc.supportMultiCategoryFields(in.getVersion())) { - entityValue = new Entity(in); - } else { - // entity profile involving an old node won't work. Read - // EntityProfileTransportAction.doExecute for details. Read - // a string to not cause EOF exception. - // Cannot assign null to entityValue as old node has no logic to - // deal with a null entity. - String oldFormatEntityString = in.readString(); - entityValue = Entity.createSingleAttributeEntity(CommonName.EMPTY_FIELD, oldFormatEntityString); - } + entityValue = new Entity(in); + int size = in.readVInt(); profilesToCollect = new HashSet(); if (size != 0) { @@ -84,14 +74,8 @@ public Set getProfilesToCollect() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(adID); - if (Bwc.supportMultiCategoryFields(out.getVersion())) { - entityValue.writeTo(out); - } else { - // entity profile involving an old node won't work. Read - // EntityProfileTransportAction.doExecute for details. Write - // a string to not cause EOF exception. - out.writeString(entityValue.toString()); - } + entityValue.writeTo(out); + out.writeVInt(profilesToCollect.size()); for (EntityProfileName profile : profilesToCollect) { out.writeEnum(profile); diff --git a/src/main/java/org/opensearch/ad/transport/EntityProfileResponse.java b/src/main/java/org/opensearch/ad/transport/EntityProfileResponse.java index f4fc49aa..91d45766 100644 --- a/src/main/java/org/opensearch/ad/transport/EntityProfileResponse.java +++ b/src/main/java/org/opensearch/ad/transport/EntityProfileResponse.java @@ -19,9 +19,7 @@ import org.apache.commons.lang.builder.ToStringBuilder; import org.opensearch.action.ActionResponse; import org.opensearch.ad.constant.CommonName; -import org.opensearch.ad.model.ModelProfile; import org.opensearch.ad.model.ModelProfileOnNode; -import org.opensearch.ad.util.Bwc; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.common.xcontent.ToXContentObject; @@ -82,14 +80,7 @@ public EntityProfileResponse(StreamInput in) throws IOException { lastActiveMs = in.readLong(); totalUpdates = in.readLong(); if (in.readBoolean()) { - if (Bwc.supportMultiCategoryFields(in.getVersion())) { - modelProfile = new ModelProfileOnNode(in); - } else { - // we don't have model information from old node - ModelProfile profile = new ModelProfile(in); - modelProfile = new ModelProfileOnNode("", profile); - } - + modelProfile = new ModelProfileOnNode(in); } else { modelProfile = null; } @@ -118,12 +109,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeLong(totalUpdates); if (modelProfile != null) { out.writeBoolean(true); - if (Bwc.supportMultiCategoryFields(out.getVersion())) { - modelProfile.writeTo(out); - } else { - ModelProfile oldFormatModelProfile = modelProfile.getModelProfile(); - oldFormatModelProfile.writeTo(out); - } + modelProfile.writeTo(out); } else { out.writeBoolean(false); } diff --git a/src/main/java/org/opensearch/ad/transport/EntityResultRequest.java b/src/main/java/org/opensearch/ad/transport/EntityResultRequest.java index 213412ea..4ffbd465 100644 --- a/src/main/java/org/opensearch/ad/transport/EntityResultRequest.java +++ b/src/main/java/org/opensearch/ad/transport/EntityResultRequest.java @@ -14,7 +14,6 @@ import static org.opensearch.action.ValidateActions.addValidationError; import java.io.IOException; -import java.util.HashMap; import java.util.Locale; import java.util.Map; @@ -25,7 +24,6 @@ import org.opensearch.ad.constant.CommonErrorMessages; import org.opensearch.ad.constant.CommonName; import org.opensearch.ad.model.Entity; -import org.opensearch.ad.util.Bwc; import org.opensearch.common.Strings; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -46,21 +44,7 @@ public EntityResultRequest(StreamInput in) throws IOException { // guarded with version check. Just in case we receive requests from older node where we use String // to represent an entity - if (Bwc.supportMultiCategoryFields(in.getVersion())) { - this.entities = in.readMap(Entity::new, StreamInput::readDoubleArray); - } else { - // receive a request from a version before OpenSearch 1.1 - // the old request uses Map instead of Map to represent entities - // since it only supports one categorical field. - Map oldFormatEntities = in.readMap(StreamInput::readString, StreamInput::readDoubleArray); - entities = new HashMap<>(); - for (Map.Entry entry : oldFormatEntities.entrySet()) { - // we don't know the category field name as we don't have access to detector config object - // so we put empty string as the category field name for now. Will handle the case - // in EntityResultTransportAciton. - entities.put(Entity.createSingleAttributeEntity(CommonName.EMPTY_FIELD, entry.getKey()), entry.getValue()); - } - } + this.entities = in.readMap(Entity::new, StreamInput::readDoubleArray); this.start = in.readLong(); this.end = in.readLong(); @@ -96,25 +80,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(this.detectorId); // guarded with version check. Just in case we send requests to older node where we use String // to represent an entity - if (Bwc.supportMultiCategoryFields(out.getVersion())) { - out.writeMap(entities, (s, e) -> e.writeTo(s), StreamOutput::writeDoubleArray); - } else { - Map oldFormatEntities = new HashMap<>(); - for (Map.Entry entry : entities.entrySet()) { - Map attributes = entry.getKey().getAttributes(); - if (attributes.size() != 1) { - // cannot send a multi-category field entity to old node since it will - // cause EOF exception and stop the detector. The issue - // is temporary and will be gone after upgrade completes. - // Since one EntityResultRequest is sent to one node, we can safely - // ignore the rest of the requests. - LOG.info("Skip sending multi-category entities to an incompatible node. Attributes: ", attributes); - break; - } - oldFormatEntities.put(entry.getKey().getAttributes().entrySet().iterator().next().getValue(), entry.getValue()); - } - out.writeMap(oldFormatEntities, StreamOutput::writeString, StreamOutput::writeDoubleArray); - } + out.writeMap(entities, (s, e) -> e.writeTo(s), StreamOutput::writeDoubleArray); out.writeLong(this.start); out.writeLong(this.end); diff --git a/src/main/java/org/opensearch/ad/transport/ForwardADTaskRequest.java b/src/main/java/org/opensearch/ad/transport/ForwardADTaskRequest.java index 612dfd63..6f22811b 100644 --- a/src/main/java/org/opensearch/ad/transport/ForwardADTaskRequest.java +++ b/src/main/java/org/opensearch/ad/transport/ForwardADTaskRequest.java @@ -20,7 +20,6 @@ import org.opensearch.Version; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestValidationException; -import org.opensearch.ad.cluster.ADVersionUtil; import org.opensearch.ad.common.exception.ADVersionException; import org.opensearch.ad.constant.CommonErrorMessages; import org.opensearch.ad.model.ADTask; @@ -64,11 +63,8 @@ public ForwardADTaskRequest( Integer availableTaskSlots, Version remoteAdVersion ) { - if (!ADVersionUtil.compatibleWithVersionOnOrAfter1_1(remoteAdVersion)) { - throw new ADVersionException( - detector.getDetectorId(), - "Can't forward AD task request to node running AD version " + remoteAdVersion - ); + if (remoteAdVersion == null) { + throw new ADVersionException(detector.getDetectorId(), "Can't forward AD task request to node running null AD version "); } this.detector = detector; this.detectionDateRange = detectionDateRange; diff --git a/src/main/java/org/opensearch/ad/transport/ProfileNodeResponse.java b/src/main/java/org/opensearch/ad/transport/ProfileNodeResponse.java index c4567642..cbf32d2a 100644 --- a/src/main/java/org/opensearch/ad/transport/ProfileNodeResponse.java +++ b/src/main/java/org/opensearch/ad/transport/ProfileNodeResponse.java @@ -18,7 +18,6 @@ import org.opensearch.action.support.nodes.BaseNodeResponse; import org.opensearch.ad.constant.CommonName; import org.opensearch.ad.model.ModelProfile; -import org.opensearch.ad.util.Bwc; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -51,7 +50,7 @@ public ProfileNodeResponse(StreamInput in) throws IOException { shingleSize = in.readInt(); activeEntities = in.readVLong(); totalUpdates = in.readVLong(); - if (Bwc.supportMultiCategoryFields(in.getVersion()) && in.readBoolean()) { + if (in.readBoolean()) { // added after OpenSearch 1.0 modelProfiles = in.readList(ModelProfile::new); modelCount = in.readVLong(); @@ -111,15 +110,13 @@ public void writeTo(StreamOutput out) throws IOException { out.writeInt(shingleSize); out.writeVLong(activeEntities); out.writeVLong(totalUpdates); - if (Bwc.supportMultiCategoryFields(out.getVersion())) { - // added after OpenSearch 1.0 - if (modelProfiles != null) { - out.writeBoolean(true); - out.writeList(modelProfiles); - out.writeVLong(modelCount); - } else { - out.writeBoolean(false); - } + // added after OpenSearch 1.0 + if (modelProfiles != null) { + out.writeBoolean(true); + out.writeList(modelProfiles); + out.writeVLong(modelCount); + } else { + out.writeBoolean(false); } } diff --git a/src/main/java/org/opensearch/ad/transport/ProfileResponse.java b/src/main/java/org/opensearch/ad/transport/ProfileResponse.java index 9c16573a..9d28a77e 100644 --- a/src/main/java/org/opensearch/ad/transport/ProfileResponse.java +++ b/src/main/java/org/opensearch/ad/transport/ProfileResponse.java @@ -23,7 +23,6 @@ import org.opensearch.ad.constant.CommonName; import org.opensearch.ad.model.ModelProfile; import org.opensearch.ad.model.ModelProfileOnNode; -import org.opensearch.ad.util.Bwc; import org.opensearch.cluster.ClusterName; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -65,13 +64,7 @@ public ProfileResponse(StreamInput in) throws IOException { int size = in.readVInt(); modelProfile = new ModelProfileOnNode[size]; for (int i = 0; i < size; i++) { - if (Bwc.supportMultiCategoryFields(in.getVersion())) { - modelProfile[i] = new ModelProfileOnNode(in); - } else { - // we don't have model information from old node - ModelProfile profile = new ModelProfile(in); - modelProfile[i] = new ModelProfileOnNode(CommonName.EMPTY_FIELD, profile); - } + modelProfile[i] = new ModelProfileOnNode(in); } shingleSize = in.readInt(); @@ -79,9 +72,7 @@ public ProfileResponse(StreamInput in) throws IOException { totalSizeInBytes = in.readVLong(); activeEntities = in.readVLong(); totalUpdates = in.readVLong(); - if (Bwc.supportMultiCategoryFields(in.getVersion())) { - modelCount = in.readVLong(); - } + modelCount = in.readVLong(); } /** @@ -140,15 +131,8 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(modelProfile.length); - if (Bwc.supportMultiCategoryFields(out.getVersion())) { - for (ModelProfileOnNode profile : modelProfile) { - profile.writeTo(out); - } - } else { - for (ModelProfileOnNode profile : modelProfile) { - ModelProfile oldFormatModelProfile = profile.getModelProfile(); - oldFormatModelProfile.writeTo(out); - } + for (ModelProfileOnNode profile : modelProfile) { + profile.writeTo(out); } out.writeInt(shingleSize); @@ -156,9 +140,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(totalSizeInBytes); out.writeVLong(activeEntities); out.writeVLong(totalUpdates); - if (Bwc.supportMultiCategoryFields(out.getVersion())) { - out.writeVLong(modelCount); - } + out.writeVLong(modelCount); } @Override diff --git a/src/main/java/org/opensearch/ad/transport/RCFResultResponse.java b/src/main/java/org/opensearch/ad/transport/RCFResultResponse.java index f7f74907..c9f7f2ad 100644 --- a/src/main/java/org/opensearch/ad/transport/RCFResultResponse.java +++ b/src/main/java/org/opensearch/ad/transport/RCFResultResponse.java @@ -15,7 +15,6 @@ import org.opensearch.Version; import org.opensearch.action.ActionResponse; -import org.opensearch.ad.cluster.ADVersionUtil; import org.opensearch.ad.constant.CommonName; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; @@ -168,7 +167,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeDouble(confidence); out.writeVInt(forestSize); out.writeDoubleArray(attribution); - if (ADVersionUtil.compatibleWithVersionOnOrAfter1_1(remoteAdVersion)) { + if (remoteAdVersion != null) { out.writeLong(totalUpdates); out.writeDouble(anomalyGrade); out.writeOptionalInt(relativeIndex); diff --git a/src/main/java/org/opensearch/ad/util/Bwc.java b/src/main/java/org/opensearch/ad/util/Bwc.java deleted file mode 100644 index 6f921b0e..00000000 --- a/src/main/java/org/opensearch/ad/util/Bwc.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.ad.util; - -import org.opensearch.Version; - -/** - * A helper class for various feature backward compatibility test - * - */ -public class Bwc { - public static boolean DISABLE_BWC = true; - - /** - * We are gonna start supporting multi-category fields since version 1.1.0. - * - * @param version test version - * @return whether the version support multiple category fields - */ - public static boolean supportMultiCategoryFields(Version version) { - return version.after(Version.V_1_0_0); - } -} diff --git a/src/test/java/org/opensearch/BwcTests.java b/src/test/java/org/opensearch/BwcTests.java deleted file mode 100644 index b7cf289e..00000000 --- a/src/test/java/org/opensearch/BwcTests.java +++ /dev/null @@ -1,564 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch; - -import static java.util.Collections.emptyMap; -import static java.util.Collections.emptySet; -import static org.hamcrest.Matchers.equalTo; -import static org.opensearch.test.OpenSearchTestCase.randomDouble; -import static org.opensearch.test.OpenSearchTestCase.randomDoubleBetween; -import static org.opensearch.test.OpenSearchTestCase.randomIntBetween; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import org.junit.BeforeClass; -import org.opensearch.action.FailedNodeException; -import org.opensearch.ad.AbstractADTest; -import org.opensearch.ad.constant.CommonName; -import org.opensearch.ad.model.Entity; -import org.opensearch.ad.model.EntityProfileName; -import org.opensearch.ad.model.ModelProfile; -import org.opensearch.ad.model.ModelProfileOnNode; -import org.opensearch.ad.transport.EntityProfileAction; -import org.opensearch.ad.transport.EntityProfileRequest; -import org.opensearch.ad.transport.EntityProfileResponse; -import org.opensearch.ad.transport.EntityResultRequest; -import org.opensearch.ad.transport.ProfileNodeResponse; -import org.opensearch.ad.transport.ProfileResponse; -import org.opensearch.ad.transport.RCFResultResponse; -import org.opensearch.ad.util.Bwc; -import org.opensearch.cluster.ClusterName; -import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.transport.TransportAddress; - -/** - * Put in core package so that we can using Version's package private constructor - * - */ -public class BwcTests extends AbstractADTest { - public static Version V_1_1_0 = new Version(1010099, org.apache.lucene.util.Version.LUCENE_8_8_2); - private EntityResultRequest entityResultRequest1_1; - private EntityResultRequest1_0 entityResultRequest1_0; - private String detectorId; - private long start, end; - private Map entities1_1, convertedEntities1_0; - private Map entities1_0; - private BytesStreamOutput output1_1, output1_0; - private String categoryField, categoryValue, categoryValue2; - private double[] feature; - private EntityProfileRequest entityProfileRequest1_1; - private EntityProfileRequest1_0 entityProfileRequest1_0; - private Entity entity, entity2, convertedEntity; - private Set profilesToCollect; - private String nodeId = "abc"; - private String modelId = "123"; - private long modelSize = 712480L; - private long modelSize2 = 112480L; - private EntityProfileResponse entityProfileResponse1_1; - private EntityProfileResponse1_0 entityProfileResponse1_0; - private ModelProfileOnNode convertedModelProfileOnNode; - private ProfileResponse profileResponse1_1; - private ProfileResponse1_0 profileResponse1_0; - private ModelProfileOnNode[] convertedModelProfileOnNodeArray; - private ModelProfile1_0[] convertedModelProfile; - private RCFResultResponse rcfResultResponse1_1; - private RCFResultResponse1_0 rcfResultResponse1_0; - - private boolean areEqualWithArrayValue(Map first, Map second) { - if (first.size() != second.size()) { - return false; - } - - return first.entrySet().stream().allMatch(e -> Arrays.equals(e.getValue(), second.get(e.getKey()))); - } - - private boolean areEqualEntityArrayValue1_0(Map first, Map second) { - if (first.size() != second.size()) { - return false; - } - - return first.entrySet().stream().allMatch(e -> Arrays.equals(e.getValue(), second.get(e.getKey()))); - } - - @BeforeClass - public static void setUpBeforeClass() { - Bwc.DISABLE_BWC = false; - } - - @Override - public void setUp() throws Exception { - super.setUp(); - - categoryField = "a"; - categoryValue = "b"; - categoryValue2 = "b2"; - - feature = new double[] { 0.3 }; - detectorId = "123"; - - entity = Entity.createSingleAttributeEntity(categoryField, categoryValue); - entity2 = Entity.createSingleAttributeEntity(categoryField, categoryValue2); - convertedEntity = Entity.createSingleAttributeEntity(CommonName.EMPTY_FIELD, categoryValue); - - output1_1 = new BytesStreamOutput(); - output1_1.setVersion(V_1_1_0); - - output1_0 = new BytesStreamOutput(); - output1_0.setVersion(Version.V_1_0_0); - } - - private void setUpEntityResultRequest() { - entities1_1 = new HashMap<>(); - entities1_1.put(entity, feature); - start = 10L; - end = 20L; - entityResultRequest1_1 = new EntityResultRequest(detectorId, entities1_1, start, end); - - entities1_0 = new HashMap<>(); - entities1_0.put(categoryValue, feature); - entityResultRequest1_0 = new EntityResultRequest1_0(detectorId, entities1_0, start, end); - convertedEntities1_0 = new HashMap(); - convertedEntities1_0.put(convertedEntity, feature); - } - - /** - * For EntityResultRequest, the input is a 1.1 stream. - * @throws IOException when serialization/deserialization has issues. - */ - public void testDeSerializeEntityResultRequest1_1() throws IOException { - setUpEntityResultRequest(); - - entityResultRequest1_1.writeTo(output1_1); - - StreamInput streamInput = output1_1.bytes().streamInput(); - streamInput.setVersion(V_1_1_0); - EntityResultRequest readRequest = new EntityResultRequest(streamInput); - assertThat(readRequest.getDetectorId(), equalTo(detectorId)); - assertThat(readRequest.getStart(), equalTo(start)); - assertThat(readRequest.getEnd(), equalTo(end)); - assertTrue(areEqualWithArrayValue(readRequest.getEntities(), entities1_1)); - } - - /** - * For EntityResultRequest, the input is a 1.0 stream. - * @throws IOException when serialization/deserialization has issues. - */ - public void testDeSerializeEntityResultRequest1_0() throws IOException { - setUpEntityResultRequest(); - - entityResultRequest1_0.writeTo(output1_0); - - StreamInput streamInput = output1_0.bytes().streamInput(); - streamInput.setVersion(Version.V_1_0_0); - EntityResultRequest readRequest = new EntityResultRequest(streamInput); - assertThat(readRequest.getDetectorId(), equalTo(detectorId)); - assertThat(readRequest.getStart(), equalTo(start)); - assertThat(readRequest.getEnd(), equalTo(end)); - assertTrue(areEqualWithArrayValue(readRequest.getEntities(), convertedEntities1_0)); - } - - /** - * For EntityResultRequest, the output is a 1.0 stream. - * @throws IOException when serialization/deserialization has issues. - */ - public void testSerializateEntityResultRequest1_0() throws IOException { - setUpEntityResultRequest(); - - entityResultRequest1_1.writeTo(output1_0); - - StreamInput streamInput = output1_0.bytes().streamInput(); - streamInput.setVersion(Version.V_1_0_0); - EntityResultRequest1_0 readRequest = new EntityResultRequest1_0(streamInput); - assertThat(readRequest.getDetectorId(), equalTo(detectorId)); - assertThat(readRequest.getStart(), equalTo(start)); - assertThat(readRequest.getEnd(), equalTo(end)); - assertTrue(areEqualEntityArrayValue1_0(readRequest.getEntities(), entityResultRequest1_0.getEntities())); - } - - private void setUpEntityProfileRequest() { - profilesToCollect = new HashSet(); - profilesToCollect.add(EntityProfileName.STATE); - entityProfileRequest1_1 = new EntityProfileRequest(detectorId, entity, profilesToCollect); - entityProfileRequest1_0 = new EntityProfileRequest1_0(detectorId, categoryValue, profilesToCollect); - } - - /** - * For EntityResultRequest, the input is a 1.1 stream. - * @throws IOException when serialization/deserialization has issues. - */ - public void testDeserializeEntityProfileRequest1_1() throws IOException { - setUpEntityProfileRequest(); - - entityProfileRequest1_1.writeTo(output1_1); - - StreamInput streamInput = output1_1.bytes().streamInput(); - streamInput.setVersion(V_1_1_0); - EntityProfileRequest readRequest = new EntityProfileRequest(streamInput); - assertThat(readRequest.getAdID(), equalTo(detectorId)); - assertThat(readRequest.getEntityValue(), equalTo(entity)); - assertThat(readRequest.getProfilesToCollect(), equalTo(profilesToCollect)); - } - - /** - * For EntityResultRequest, the input is a 1.0 stream. - * @throws IOException when serialization/deserialization has issues. - */ - public void testDeserializeEntityProfileRequest1_0() throws IOException { - setUpEntityProfileRequest(); - - entityProfileRequest1_0.writeTo(output1_0); - - StreamInput streamInput = output1_0.bytes().streamInput(); - streamInput.setVersion(Version.V_1_0_0); - EntityProfileRequest readRequest = new EntityProfileRequest(streamInput); - assertThat(readRequest.getAdID(), equalTo(detectorId)); - assertThat(readRequest.getEntityValue(), equalTo(convertedEntity)); - assertThat(readRequest.getProfilesToCollect(), equalTo(profilesToCollect)); - } - - /** - * For EntityResultRequest, the output is a 1.0 stream. - * @throws IOException when serialization/deserialization has issues. - */ - public void testSerializeEntityProfileRequest1_0() throws IOException { - setUpEntityProfileRequest(); - - entityProfileRequest1_1.writeTo(output1_0); - - StreamInput streamInput = output1_0.bytes().streamInput(); - streamInput.setVersion(Version.V_1_0_0); - EntityProfileRequest1_0 readRequest = new EntityProfileRequest1_0(streamInput); - assertThat(readRequest.getAdID(), equalTo(detectorId)); - assertThat(readRequest.getEntityValue(), equalTo(entity.toString())); - assertThat(readRequest.getProfilesToCollect(), equalTo(profilesToCollect)); - } - - private void setUpEntityProfileResponse() { - long lastActiveTimestamp = 10L; - EntityProfileResponse.Builder builder = new EntityProfileResponse.Builder(); - builder.setLastActiveMs(lastActiveTimestamp).build(); - ModelProfile modelProfile = new ModelProfile(modelId, entity, modelSize); - ModelProfileOnNode model = new ModelProfileOnNode(nodeId, modelProfile); - builder.setModelProfile(model); - entityProfileResponse1_1 = builder.build(); - - EntityProfileResponse1_0.Builder builder1_0 = new EntityProfileResponse1_0.Builder(); - builder1_0.setLastActiveMs(lastActiveTimestamp).build(); - ModelProfile1_0 modelProfile1_0 = new ModelProfile1_0(modelId, modelSize, nodeId); - builder1_0.setModelProfile(modelProfile1_0); - entityProfileResponse1_0 = builder1_0.build(); - ModelProfile convertedModelProfile = new ModelProfile(modelId, null, modelSize); - convertedModelProfileOnNode = new ModelProfileOnNode(CommonName.EMPTY_FIELD, convertedModelProfile); - } - - /** - * For EntityProfileResponse, the input is a 1.1 stream. - * @throws IOException when serialization/deserialization has issues. - */ - public void testDeserializeEntityProfileResponse1_1() throws IOException { - setUpEntityProfileResponse(); - - entityProfileResponse1_1.writeTo(output1_1); - - StreamInput streamInput = output1_1.bytes().streamInput(); - streamInput.setVersion(V_1_1_0); - EntityProfileResponse readResponse = EntityProfileAction.INSTANCE.getResponseReader().read(streamInput); - assertThat(readResponse.getModelProfile(), equalTo(entityProfileResponse1_1.getModelProfile())); - assertThat(readResponse.getLastActiveMs(), equalTo(entityProfileResponse1_1.getLastActiveMs())); - assertThat(readResponse.getTotalUpdates(), equalTo(entityProfileResponse1_0.getTotalUpdates())); - } - - /** - * For EntityProfileResponse, the input is a 1.0 stream. - * @throws IOException when serialization/deserialization has issues. - */ - public void testDeserializeEntityProfileResponse1_0() throws IOException { - setUpEntityProfileResponse(); - - entityProfileResponse1_0.writeTo(output1_0); - - StreamInput streamInput = output1_0.bytes().streamInput(); - streamInput.setVersion(Version.V_1_0_0); - EntityProfileResponse readResponse = EntityProfileAction.INSTANCE.getResponseReader().read(streamInput); - assertThat(readResponse.getModelProfile(), equalTo(convertedModelProfileOnNode)); - assertThat(readResponse.getLastActiveMs(), equalTo(entityProfileResponse1_0.getLastActiveMs())); - assertThat(readResponse.getTotalUpdates(), equalTo(entityProfileResponse1_0.getTotalUpdates())); - } - - /** - * For EntityProfileResponse, the output is a 1.0 stream. - * @throws IOException when serialization/deserialization has issues. - */ - public void testSerializeEntityProfileResponse1_0() throws IOException { - setUpEntityProfileResponse(); - - entityProfileResponse1_1.writeTo(output1_0); - - StreamInput streamInput = output1_0.bytes().streamInput(); - streamInput.setVersion(Version.V_1_0_0); - EntityProfileResponse1_0 readResponse = new EntityProfileResponse1_0(streamInput); - assertThat(readResponse.getModelProfile(), equalTo(new ModelProfile1_0(modelId, modelSize, CommonName.EMPTY_FIELD))); - assertThat(readResponse.getLastActiveMs(), equalTo(entityProfileResponse1_1.getLastActiveMs())); - assertThat(readResponse.getTotalUpdates(), equalTo(entityProfileResponse1_0.getTotalUpdates())); - } - - @SuppressWarnings("serial") - private void setUpProfileResponse() { - String node1 = "node1"; - String nodeName1 = "nodename1"; - DiscoveryNode discoveryNode1_1 = new DiscoveryNode( - nodeName1, - node1, - new TransportAddress(TransportAddress.META_ADDRESS, 9300), - emptyMap(), - emptySet(), - V_1_1_0 - ); - - String node2 = "node2"; - String nodeName2 = "nodename2"; - DiscoveryNode discoveryNode2 = new DiscoveryNode( - nodeName2, - node2, - new TransportAddress(TransportAddress.META_ADDRESS, 9301), - emptyMap(), - emptySet(), - V_1_1_0 - ); - - String model1Id = "model1"; - String model2Id = "model2"; - - Map modelSizeMap1 = new HashMap() { - { - put(model1Id, modelSize); - put(model2Id, modelSize2); - } - }; - Map modelSizeMap2 = new HashMap(); - - int shingleSize = 8; - - ModelProfile modelProfile = new ModelProfile(model1Id, entity, modelSize); - ModelProfile modelProfile2 = new ModelProfile(model2Id, entity2, modelSize2); - - ProfileNodeResponse profileNodeResponse1 = new ProfileNodeResponse( - discoveryNode1_1, - modelSizeMap1, - shingleSize, - 0, - 0, - Arrays.asList(modelProfile, modelProfile2), - modelSizeMap1.size() - ); - ProfileNodeResponse profileNodeResponse2 = new ProfileNodeResponse( - discoveryNode2, - modelSizeMap2, - -1, - 0, - 0, - new ArrayList<>(), - modelSizeMap2.size() - ); - List profileNodeResponses = Arrays.asList(profileNodeResponse1, profileNodeResponse2); - List failures = Collections.emptyList(); - - ClusterName clusterName = new ClusterName("test-cluster-name"); - profileResponse1_1 = new ProfileResponse(clusterName, profileNodeResponses, failures); - - ProfileNodeResponse1_0 profileNodeResponse1_1_0 = new ProfileNodeResponse1_0(discoveryNode1_1, modelSizeMap1, shingleSize, 0, 0); - ProfileNodeResponse1_0 profileNodeResponse2_1_0 = new ProfileNodeResponse1_0(discoveryNode2, modelSizeMap2, -1, 0, 0); - List profileNodeResponses1_0 = Arrays.asList(profileNodeResponse1_1_0, profileNodeResponse2_1_0); - profileResponse1_0 = new ProfileResponse1_0(clusterName, profileNodeResponses1_0, failures); - - convertedModelProfileOnNodeArray = new ModelProfileOnNode[2]; - ModelProfile convertedModelProfile1 = new ModelProfile(model1Id, null, modelSize); - convertedModelProfileOnNodeArray[0] = new ModelProfileOnNode(CommonName.EMPTY_FIELD, convertedModelProfile1); - - ModelProfile convertedModelProfile2 = new ModelProfile(model2Id, null, modelSize2); - convertedModelProfileOnNodeArray[1] = new ModelProfileOnNode(CommonName.EMPTY_FIELD, convertedModelProfile2); - - convertedModelProfile = new ModelProfile1_0[2]; - convertedModelProfile[0] = new ModelProfile1_0(model1Id, modelSize, CommonName.EMPTY_FIELD); - convertedModelProfile[1] = new ModelProfile1_0(model2Id, modelSize2, CommonName.EMPTY_FIELD); - } - - /** - * For ProfileResponse, the input is a 1.1 stream. - * @throws IOException when serialization/deserialization has issues. - */ - public void testDeserializeProfileResponse1_1() throws IOException { - setUpProfileResponse(); - - profileResponse1_1.writeTo(output1_1); - - StreamInput streamInput = output1_1.bytes().streamInput(); - streamInput.setVersion(V_1_1_0); - ProfileResponse readResponse = new ProfileResponse(streamInput); - assertThat(readResponse.getModelProfile(), equalTo(profileResponse1_1.getModelProfile())); - assertThat(readResponse.getShingleSize(), equalTo(profileResponse1_1.getShingleSize())); - assertThat(readResponse.getActiveEntities(), equalTo(profileResponse1_1.getActiveEntities())); - assertThat(readResponse.getTotalUpdates(), equalTo(profileResponse1_1.getTotalUpdates())); - assertThat(readResponse.getCoordinatingNode(), equalTo(profileResponse1_1.getCoordinatingNode())); - assertThat(readResponse.getTotalSizeInBytes(), equalTo(profileResponse1_1.getTotalSizeInBytes())); - assertThat(readResponse.getModelCount(), equalTo(profileResponse1_1.getModelCount())); - } - - /** - * For ProfileResponse, the input is a 1.0 stream. - * @throws IOException when serialization/deserialization has issues. - */ - public void testDeserializeProfileResponse1_0() throws IOException { - setUpProfileResponse(); - - profileResponse1_0.writeTo(output1_0); - - StreamInput streamInput = output1_0.bytes().streamInput(); - streamInput.setVersion(Version.V_1_0_0); - ProfileResponse readResponse = new ProfileResponse(streamInput); - ModelProfileOnNode[] actualModelProfileOnNode = readResponse.getModelProfile(); - - // since ProfileResponse1_0's constructor iterates modelSize and modelSize is - // a HashMap. The iteration order is not deterministic. We have to sort the - // results in an ordered fashion to compare with expected value. - Arrays.sort(actualModelProfileOnNode, new Comparator() { - @Override - public int compare(ModelProfileOnNode o1, ModelProfileOnNode o2) { - return o1.getModelId().compareTo(o2.getModelId()); - } - - }); - assertThat(actualModelProfileOnNode, equalTo(convertedModelProfileOnNodeArray)); - assertThat(readResponse.getShingleSize(), equalTo(profileResponse1_1.getShingleSize())); - assertThat(readResponse.getActiveEntities(), equalTo(profileResponse1_1.getActiveEntities())); - assertThat(readResponse.getTotalUpdates(), equalTo(profileResponse1_1.getTotalUpdates())); - assertThat(readResponse.getCoordinatingNode(), equalTo(profileResponse1_1.getCoordinatingNode())); - assertThat(readResponse.getTotalSizeInBytes(), equalTo(profileResponse1_1.getTotalSizeInBytes())); - assertThat(readResponse.getModelCount(), equalTo(0L)); - } - - /** - * For ProfileResponse, the output is a 1.0 stream. - * @throws IOException when serialization/deserialization has issues. - */ - public void testSerializeProfileResponse1_0() throws IOException { - setUpProfileResponse(); - - profileResponse1_1.writeTo(output1_0); - - StreamInput streamInput = output1_0.bytes().streamInput(); - streamInput.setVersion(Version.V_1_0_0); - ProfileResponse1_0 readResponse = new ProfileResponse1_0(streamInput); - assertThat(readResponse.getModelProfile(), equalTo(convertedModelProfile)); - assertThat(readResponse.getShingleSize(), equalTo(profileResponse1_1.getShingleSize())); - assertThat(readResponse.getActiveEntities(), equalTo(profileResponse1_1.getActiveEntities())); - assertThat(readResponse.getTotalUpdates(), equalTo(profileResponse1_1.getTotalUpdates())); - assertThat(readResponse.getCoordinatingNode(), equalTo(profileResponse1_1.getCoordinatingNode())); - assertThat(readResponse.getTotalSizeInBytes(), equalTo(profileResponse1_1.getTotalSizeInBytes())); - } - - /** - * jacoco complained line coverage is 0.5, but we only have one line method - * that is covered. It flags the class name not covered. - * Use solution mentioned in https://tinyurl.com/2pttzsd3 - */ - @SuppressWarnings("static-access") - public void testBwcInstance() { - Bwc bwc = new Bwc(); - assertNotNull(bwc); - } - - private void setUpRCFResultResponse() { - rcfResultResponse1_1 = new RCFResultResponse( - 0.345, - 0.123, - 30, - new double[] { 0.3, 0.7 }, - 134, - 0.4, - Version.CURRENT, - randomIntBetween(-3, 0), - new double[] { randomDoubleBetween(0, 1.0, true), randomDoubleBetween(0, 1.0, true) }, - new double[][] { new double[] { randomDouble(), randomDouble() } }, - new double[] { randomDoubleBetween(0, 1.0, true), randomDoubleBetween(0, 1.0, true) }, - randomDoubleBetween(1.1, 10.0, true) - ); - rcfResultResponse1_0 = new RCFResultResponse1_0(0.345, 0.123, 30, new double[] { 0.3, 0.7 }); - } - - /** - * For RCFResultResponse, the input is a 1.1 stream. - * @throws IOException when serialization/deserialization has issues. - */ - public void testDeserializeRCFResultResponse1_1() throws IOException { - setUpRCFResultResponse(); - - rcfResultResponse1_1.writeTo(output1_1); - - StreamInput streamInput = output1_1.bytes().streamInput(); - streamInput.setVersion(V_1_1_0); - RCFResultResponse readResponse = new RCFResultResponse(streamInput); - assertArrayEquals(readResponse.getAttribution(), rcfResultResponse1_1.getAttribution(), 0.001); - assertThat(readResponse.getConfidence(), equalTo(rcfResultResponse1_1.getConfidence())); - assertThat(readResponse.getForestSize(), equalTo(rcfResultResponse1_1.getForestSize())); - assertThat(readResponse.getTotalUpdates(), equalTo(rcfResultResponse1_1.getTotalUpdates())); - assertThat(readResponse.getRCFScore(), equalTo(rcfResultResponse1_1.getRCFScore())); - } - - /** - * For RCFResultResponse, the input is a 1.0 stream. - * @throws IOException when serialization/deserialization has issues. - */ - public void testDeserializeRCFResultResponse1_0() throws IOException { - setUpRCFResultResponse(); - - rcfResultResponse1_0.writeTo(output1_0); - - StreamInput streamInput = output1_0.bytes().streamInput(); - streamInput.setVersion(Version.V_1_0_0); - RCFResultResponse readResponse = new RCFResultResponse(streamInput); - assertArrayEquals(readResponse.getAttribution(), rcfResultResponse1_0.getAttribution(), 0.001); - assertThat(readResponse.getConfidence(), equalTo(rcfResultResponse1_0.getConfidence())); - assertThat(readResponse.getForestSize(), equalTo(rcfResultResponse1_0.getForestSize())); - assertThat(readResponse.getTotalUpdates(), equalTo(0L)); - assertThat(readResponse.getRCFScore(), equalTo(rcfResultResponse1_0.getRCFScore())); - } - - /** - * For RCFResultResponse, the output is a 1.0 stream. - * @throws IOException when serialization/deserialization has issues. - */ - public void testSerializeRCFResultResponse1_0() throws IOException { - setUpRCFResultResponse(); - - rcfResultResponse1_1.writeTo(output1_0); - - StreamInput streamInput = output1_0.bytes().streamInput(); - streamInput.setVersion(Version.V_1_0_0); - RCFResultResponse1_0 readResponse = new RCFResultResponse1_0(streamInput); - assertArrayEquals(readResponse.getAttribution(), rcfResultResponse1_0.getAttribution(), 0.001); - assertThat(readResponse.getConfidence(), equalTo(rcfResultResponse1_0.getConfidence())); - assertThat(readResponse.getForestSize(), equalTo(rcfResultResponse1_0.getForestSize())); - assertThat(readResponse.getRCFScore(), equalTo(rcfResultResponse1_0.getRCFScore())); - } -} diff --git a/src/test/java/org/opensearch/EntityProfileRequest1_0.java b/src/test/java/org/opensearch/EntityProfileRequest1_0.java deleted file mode 100644 index 90d35627..00000000 --- a/src/test/java/org/opensearch/EntityProfileRequest1_0.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch; - -import static org.opensearch.action.ValidateActions.addValidationError; - -import java.io.IOException; -import java.util.HashSet; -import java.util.Set; - -import org.opensearch.action.ActionRequest; -import org.opensearch.action.ActionRequestValidationException; -import org.opensearch.ad.constant.CommonErrorMessages; -import org.opensearch.ad.constant.CommonName; -import org.opensearch.ad.model.EntityProfileName; -import org.opensearch.common.Strings; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; - -public class EntityProfileRequest1_0 extends ActionRequest implements ToXContentObject { - public static final String ENTITY = "entity"; - public static final String PROFILES = "profiles"; - private String adID; - private String entityValue; - private Set profilesToCollect; - - public EntityProfileRequest1_0(StreamInput in) throws IOException { - super(in); - adID = in.readString(); - entityValue = in.readString(); - int size = in.readVInt(); - profilesToCollect = new HashSet(); - if (size != 0) { - for (int i = 0; i < size; i++) { - profilesToCollect.add(in.readEnum(EntityProfileName.class)); - } - } - } - - public EntityProfileRequest1_0(String adID, String entityValue, Set profilesToCollect) { - super(); - this.adID = adID; - this.entityValue = entityValue; - this.profilesToCollect = profilesToCollect; - } - - public String getAdID() { - return adID; - } - - public String getEntityValue() { - return entityValue; - } - - public Set getProfilesToCollect() { - return profilesToCollect; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(adID); - out.writeString(entityValue); - out.writeVInt(profilesToCollect.size()); - for (EntityProfileName profile : profilesToCollect) { - out.writeEnum(profile); - } - } - - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException validationException = null; - if (Strings.isEmpty(adID)) { - validationException = addValidationError(CommonErrorMessages.AD_ID_MISSING_MSG, validationException); - } - if (Strings.isEmpty(entityValue)) { - validationException = addValidationError("Entity value is missing", validationException); - } - if (profilesToCollect == null || profilesToCollect.isEmpty()) { - validationException = addValidationError(CommonErrorMessages.EMPTY_PROFILES_COLLECT, validationException); - } - return validationException; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CommonName.ID_JSON_KEY, adID); - builder.field(ENTITY, entityValue); - builder.field(PROFILES, profilesToCollect); - builder.endObject(); - return builder; - } -} diff --git a/src/test/java/org/opensearch/EntityProfileResponse1_0.java b/src/test/java/org/opensearch/EntityProfileResponse1_0.java deleted file mode 100644 index 78fe8e73..00000000 --- a/src/test/java/org/opensearch/EntityProfileResponse1_0.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch; - -import java.io.IOException; -import java.util.Optional; - -import org.apache.commons.lang.builder.EqualsBuilder; -import org.apache.commons.lang.builder.HashCodeBuilder; -import org.apache.commons.lang.builder.ToStringBuilder; -import org.opensearch.action.ActionResponse; -import org.opensearch.ad.constant.CommonName; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; - -public class EntityProfileResponse1_0 extends ActionResponse implements ToXContentObject { - public static final String ACTIVE = "active"; - public static final String LAST_ACTIVE_TS = "last_active_timestamp"; - public static final String TOTAL_UPDATES = "total_updates"; - private final Boolean isActive; - private final long lastActiveMs; - private final long totalUpdates; - private final ModelProfile1_0 modelProfile; - - public static class Builder { - private Boolean isActive = null; - private long lastActiveMs = -1L; - private long totalUpdates = -1L; - private ModelProfile1_0 modelProfile = null; - - public Builder() {} - - public Builder setActive(Boolean isActive) { - this.isActive = isActive; - return this; - } - - public Builder setLastActiveMs(long lastActiveMs) { - this.lastActiveMs = lastActiveMs; - return this; - } - - public Builder setTotalUpdates(long totalUpdates) { - this.totalUpdates = totalUpdates; - return this; - } - - public Builder setModelProfile(ModelProfile1_0 modelProfile) { - this.modelProfile = modelProfile; - return this; - } - - public EntityProfileResponse1_0 build() { - return new EntityProfileResponse1_0(isActive, lastActiveMs, totalUpdates, modelProfile); - } - } - - public EntityProfileResponse1_0(Boolean isActive, long lastActiveTimeMs, long totalUpdates, ModelProfile1_0 modelProfile) { - this.isActive = isActive; - this.lastActiveMs = lastActiveTimeMs; - this.totalUpdates = totalUpdates; - this.modelProfile = modelProfile; - } - - public EntityProfileResponse1_0(StreamInput in) throws IOException { - super(in); - isActive = in.readOptionalBoolean(); - lastActiveMs = in.readLong(); - totalUpdates = in.readLong(); - if (in.readBoolean()) { - modelProfile = new ModelProfile1_0(in); - } else { - modelProfile = null; - } - } - - public Optional isActive() { - return Optional.ofNullable(isActive); - } - - public long getLastActiveMs() { - return lastActiveMs; - } - - public long getTotalUpdates() { - return totalUpdates; - } - - public ModelProfile1_0 getModelProfile() { - return modelProfile; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeOptionalBoolean(isActive); - out.writeLong(lastActiveMs); - out.writeLong(totalUpdates); - if (modelProfile != null) { - out.writeBoolean(true); - modelProfile.writeTo(out); - } else { - out.writeBoolean(false); - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (isActive != null) { - builder.field(ACTIVE, isActive); - } - if (lastActiveMs >= 0) { - builder.field(LAST_ACTIVE_TS, lastActiveMs); - } - if (totalUpdates >= 0) { - builder.field(TOTAL_UPDATES, totalUpdates); - } - if (modelProfile != null) { - builder.field(CommonName.MODEL, modelProfile); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - ToStringBuilder builder = new ToStringBuilder(this); - builder.append(ACTIVE, isActive); - builder.append(LAST_ACTIVE_TS, lastActiveMs); - builder.append(TOTAL_UPDATES, totalUpdates); - builder.append(CommonName.MODEL, modelProfile); - - return builder.toString(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - if (obj instanceof EntityProfileResponse1_0) { - EntityProfileResponse1_0 other = (EntityProfileResponse1_0) obj; - EqualsBuilder equalsBuilder = new EqualsBuilder(); - equalsBuilder.append(isActive, other.isActive); - equalsBuilder.append(lastActiveMs, other.lastActiveMs); - equalsBuilder.append(totalUpdates, other.totalUpdates); - equalsBuilder.append(modelProfile, other.modelProfile); - - return equalsBuilder.isEquals(); - } - return false; - } - - @Override - public int hashCode() { - return new HashCodeBuilder().append(isActive).append(lastActiveMs).append(totalUpdates).append(modelProfile).toHashCode(); - } -} diff --git a/src/test/java/org/opensearch/EntityResultRequest1_0.java b/src/test/java/org/opensearch/EntityResultRequest1_0.java deleted file mode 100644 index 9abfc3ec..00000000 --- a/src/test/java/org/opensearch/EntityResultRequest1_0.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch; - -import static org.opensearch.action.ValidateActions.addValidationError; - -import java.io.IOException; -import java.util.Locale; -import java.util.Map; - -import org.opensearch.action.ActionRequest; -import org.opensearch.action.ActionRequestValidationException; -import org.opensearch.ad.constant.CommonErrorMessages; -import org.opensearch.ad.constant.CommonName; -import org.opensearch.common.Strings; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; - -public class EntityResultRequest1_0 extends ActionRequest implements ToXContentObject { - - private String detectorId; - private Map entities; - private long start; - private long end; - - public EntityResultRequest1_0(StreamInput in) throws IOException { - super(in); - this.detectorId = in.readString(); - this.entities = in.readMap(StreamInput::readString, StreamInput::readDoubleArray); - this.start = in.readLong(); - this.end = in.readLong(); - } - - public EntityResultRequest1_0(String detectorId, Map entities, long start, long end) { - super(); - this.detectorId = detectorId; - this.entities = entities; - this.start = start; - this.end = end; - } - - public String getDetectorId() { - return this.detectorId; - } - - public Map getEntities() { - return this.entities; - } - - public long getStart() { - return this.start; - } - - public long getEnd() { - return this.end; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeString(this.detectorId); - out.writeMap(this.entities, StreamOutput::writeString, StreamOutput::writeDoubleArray); - out.writeLong(this.start); - out.writeLong(this.end); - } - - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException validationException = null; - if (Strings.isEmpty(detectorId)) { - validationException = addValidationError(CommonErrorMessages.AD_ID_MISSING_MSG, validationException); - } - if (start <= 0 || end <= 0 || start > end) { - validationException = addValidationError( - String.format(Locale.ROOT, "%s: start %d, end %d", CommonErrorMessages.INVALID_TIMESTAMP_ERR_MSG, start, end), - validationException - ); - } - return validationException; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CommonName.ID_JSON_KEY, detectorId); - builder.field(CommonName.START_JSON_KEY, start); - builder.field(CommonName.END_JSON_KEY, end); - for (String entity : entities.keySet()) { - builder.field(entity, entities.get(entity)); - } - builder.endObject(); - return builder; - } -} diff --git a/src/test/java/org/opensearch/ModelProfile1_0.java b/src/test/java/org/opensearch/ModelProfile1_0.java deleted file mode 100644 index 31ec4def..00000000 --- a/src/test/java/org/opensearch/ModelProfile1_0.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch; - -import java.io.IOException; - -import org.apache.commons.lang.builder.EqualsBuilder; -import org.apache.commons.lang.builder.HashCodeBuilder; -import org.apache.commons.lang.builder.ToStringBuilder; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentBuilder; - -public class ModelProfile1_0 implements Writeable, ToXContent { - // field name in toXContent - public static final String MODEL_ID = "model_id"; - public static final String MODEL_SIZE_IN_BYTES = "model_size_in_bytes"; - public static final String NODE_ID = "node_id"; - - private final String modelId; - private final long modelSizeInBytes; - private final String nodeId; - - public ModelProfile1_0(String modelId, long modelSize, String nodeId) { - super(); - this.modelId = modelId; - this.modelSizeInBytes = modelSize; - this.nodeId = nodeId; - } - - public ModelProfile1_0(StreamInput in) throws IOException { - modelId = in.readString(); - modelSizeInBytes = in.readLong(); - nodeId = in.readString(); - } - - public String getModelId() { - return modelId; - } - - public long getModelSize() { - return modelSizeInBytes; - } - - public String getNodeId() { - return nodeId; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(MODEL_ID, modelId); - if (modelSizeInBytes > 0) { - builder.field(MODEL_SIZE_IN_BYTES, modelSizeInBytes); - } - builder.field(NODE_ID, nodeId); - builder.endObject(); - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(modelId); - out.writeLong(modelSizeInBytes); - out.writeString(nodeId); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - if (obj instanceof ModelProfile1_0) { - ModelProfile1_0 other = (ModelProfile1_0) obj; - EqualsBuilder equalsBuilder = new EqualsBuilder(); - equalsBuilder.append(modelId, other.modelId); - equalsBuilder.append(modelSizeInBytes, other.modelSizeInBytes); - equalsBuilder.append(nodeId, other.nodeId); - - return equalsBuilder.isEquals(); - } - return false; - } - - @Override - public int hashCode() { - return new HashCodeBuilder().append(modelId).append(modelSizeInBytes).append(nodeId).toHashCode(); - } - - @Override - public String toString() { - ToStringBuilder builder = new ToStringBuilder(this); - builder.append(MODEL_ID, modelId); - if (modelSizeInBytes > 0) { - builder.append(MODEL_SIZE_IN_BYTES, modelSizeInBytes); - } - builder.append(NODE_ID, nodeId); - return builder.toString(); - } -} diff --git a/src/test/java/org/opensearch/ProfileNodeResponse1_0.java b/src/test/java/org/opensearch/ProfileNodeResponse1_0.java deleted file mode 100644 index 83a7170c..00000000 --- a/src/test/java/org/opensearch/ProfileNodeResponse1_0.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch; - -import java.io.IOException; -import java.util.Map; - -import org.opensearch.action.support.nodes.BaseNodeResponse; -import org.opensearch.ad.constant.CommonName; -import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentFragment; -import org.opensearch.common.xcontent.XContentBuilder; - -/** - * Profile response on a node - */ -public class ProfileNodeResponse1_0 extends BaseNodeResponse implements ToXContentFragment { - // filed name in toXContent - static final String MODEL_SIZE_IN_BYTES = "model_size_in_bytes"; - - private Map modelSize; - private int shingleSize; - private long activeEntities; - private long totalUpdates; - - /** - * Constructor - * - * @param in StreamInput - * @throws IOException throws an IO exception if the StreamInput cannot be read from - */ - public ProfileNodeResponse1_0(StreamInput in) throws IOException { - super(in); - if (in.readBoolean()) { - modelSize = in.readMap(StreamInput::readString, StreamInput::readLong); - } - shingleSize = in.readInt(); - activeEntities = in.readVLong(); - totalUpdates = in.readVLong(); - } - - /** - * Constructor - * - * @param node DiscoveryNode object - * @param modelSize Mapping of model id to its memory consumption in bytes - * @param shingleSize shingle size - * @param activeEntity active entity count - * @param totalUpdates RCF model total updates - */ - public ProfileNodeResponse1_0(DiscoveryNode node, Map modelSize, int shingleSize, long activeEntity, long totalUpdates) { - super(node); - this.modelSize = modelSize; - this.shingleSize = shingleSize; - this.activeEntities = activeEntity; - this.totalUpdates = totalUpdates; - } - - /** - * Creates a new ProfileNodeResponse object and reads in the profile from an input stream - * - * @param in StreamInput to read from - * @return ProfileNodeResponse object corresponding to the input stream - * @throws IOException throws an IO exception if the StreamInput cannot be read from - */ - public static ProfileNodeResponse1_0 readProfiles(StreamInput in) throws IOException { - return new ProfileNodeResponse1_0(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - if (modelSize != null) { - out.writeBoolean(true); - out.writeMap(modelSize, StreamOutput::writeString, StreamOutput::writeLong); - } else { - out.writeBoolean(false); - } - - out.writeInt(shingleSize); - out.writeVLong(activeEntities); - out.writeVLong(totalUpdates); - } - - /** - * Converts profile to xContent - * - * @param builder XContentBuilder - * @param params Params - * @return XContentBuilder - * @throws IOException thrown by builder for invalid field - */ - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(MODEL_SIZE_IN_BYTES); - for (Map.Entry entry : modelSize.entrySet()) { - builder.field(entry.getKey(), entry.getValue()); - } - builder.endObject(); - - builder.field(CommonName.SHINGLE_SIZE, shingleSize); - builder.field(CommonName.ACTIVE_ENTITIES, activeEntities); - builder.field(CommonName.TOTAL_UPDATES, totalUpdates); - - return builder; - } - - public Map getModelSize() { - return modelSize; - } - - public int getShingleSize() { - return shingleSize; - } - - public long getActiveEntities() { - return activeEntities; - } - - public long getTotalUpdates() { - return totalUpdates; - } -} diff --git a/src/test/java/org/opensearch/ProfileResponse1_0.java b/src/test/java/org/opensearch/ProfileResponse1_0.java deleted file mode 100644 index 30d0eee2..00000000 --- a/src/test/java/org/opensearch/ProfileResponse1_0.java +++ /dev/null @@ -1,169 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import org.opensearch.action.FailedNodeException; -import org.opensearch.action.support.nodes.BaseNodesResponse; -import org.opensearch.ad.constant.CommonName; -import org.opensearch.cluster.ClusterName; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentFragment; -import org.opensearch.common.xcontent.XContentBuilder; - -/** - * This class consists of the aggregated responses from the nodes - */ -public class ProfileResponse1_0 extends BaseNodesResponse implements ToXContentFragment { - // filed name in toXContent - static final String COORDINATING_NODE = CommonName.COORDINATING_NODE; - static final String SHINGLE_SIZE = CommonName.SHINGLE_SIZE; - static final String TOTAL_SIZE = CommonName.TOTAL_SIZE_IN_BYTES; - static final String ACTIVE_ENTITY = CommonName.ACTIVE_ENTITIES; - static final String MODELS = CommonName.MODELS; - static final String TOTAL_UPDATES = CommonName.TOTAL_UPDATES; - - private ModelProfile1_0[] modelProfile; - private int shingleSize; - private String coordinatingNode; - private long totalSizeInBytes; - private long activeEntities; - private long totalUpdates; - - /** - * Constructor - * - * @param in StreamInput - * @throws IOException thrown when unable to read from stream - */ - public ProfileResponse1_0(StreamInput in) throws IOException { - super(in); - int size = in.readVInt(); - modelProfile = new ModelProfile1_0[size]; - for (int i = 0; i < size; i++) { - modelProfile[i] = new ModelProfile1_0(in); - } - shingleSize = in.readInt(); - coordinatingNode = in.readString(); - totalSizeInBytes = in.readVLong(); - activeEntities = in.readVLong(); - totalUpdates = in.readVLong(); - } - - /** - * Constructor - * - * @param clusterName name of cluster - * @param nodes List of ProfileNodeResponse from nodes - * @param failures List of failures from nodes - */ - public ProfileResponse1_0(ClusterName clusterName, List nodes, List failures) { - super(clusterName, nodes, failures); - totalSizeInBytes = 0L; - activeEntities = 0L; - totalUpdates = 0L; - shingleSize = -1; - List modelProfileList = new ArrayList<>(); - for (ProfileNodeResponse1_0 response : nodes) { - String curNodeId = response.getNode().getId(); - if (response.getShingleSize() >= 0) { - coordinatingNode = curNodeId; - shingleSize = response.getShingleSize(); - } - if (response.getModelSize() != null) { - for (Map.Entry entry : response.getModelSize().entrySet()) { - totalSizeInBytes += entry.getValue(); - modelProfileList.add(new ModelProfile1_0(entry.getKey(), entry.getValue(), curNodeId)); - } - } - - if (response.getActiveEntities() > 0) { - activeEntities += response.getActiveEntities(); - } - if (response.getTotalUpdates() > totalUpdates) { - totalUpdates = response.getTotalUpdates(); - } - } - if (coordinatingNode == null) { - coordinatingNode = ""; - } - this.modelProfile = modelProfileList.toArray(new ModelProfile1_0[0]); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeVInt(modelProfile.length); - for (ModelProfile1_0 profile : modelProfile) { - profile.writeTo(out); - } - out.writeInt(shingleSize); - out.writeString(coordinatingNode); - out.writeVLong(totalSizeInBytes); - out.writeVLong(activeEntities); - out.writeVLong(totalUpdates); - } - - @Override - public void writeNodesTo(StreamOutput out, List nodes) throws IOException { - out.writeList(nodes); - } - - @Override - public List readNodesFrom(StreamInput in) throws IOException { - return in.readList(ProfileNodeResponse1_0::readProfiles); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(COORDINATING_NODE, coordinatingNode); - builder.field(SHINGLE_SIZE, shingleSize); - builder.field(TOTAL_SIZE, totalSizeInBytes); - builder.field(ACTIVE_ENTITY, activeEntities); - builder.field(TOTAL_UPDATES, totalUpdates); - builder.startArray(MODELS); - for (ModelProfile1_0 profile : modelProfile) { - profile.toXContent(builder, params); - } - builder.endArray(); - return builder; - } - - public ModelProfile1_0[] getModelProfile() { - return modelProfile; - } - - public int getShingleSize() { - return shingleSize; - } - - public long getActiveEntities() { - return activeEntities; - } - - public long getTotalUpdates() { - return totalUpdates; - } - - public String getCoordinatingNode() { - return coordinatingNode; - } - - public long getTotalSizeInBytes() { - return totalSizeInBytes; - } -} diff --git a/src/test/java/org/opensearch/RCFResultResponse1_0.java b/src/test/java/org/opensearch/RCFResultResponse1_0.java deleted file mode 100644 index 6e7103ce..00000000 --- a/src/test/java/org/opensearch/RCFResultResponse1_0.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch; - -import java.io.IOException; - -import org.opensearch.action.ActionResponse; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; - -public class RCFResultResponse1_0 extends ActionResponse implements ToXContentObject { - public static final String RCF_SCORE_JSON_KEY = "rcfScore"; - public static final String CONFIDENCE_JSON_KEY = "confidence"; - public static final String FOREST_SIZE_JSON_KEY = "forestSize"; - public static final String ATTRIBUTION_JSON_KEY = "attribution"; - private double rcfScore; - private double confidence; - private int forestSize; - private double[] attribution; - - public RCFResultResponse1_0(double rcfScore, double confidence, int forestSize, double[] attribution) { - this.rcfScore = rcfScore; - this.confidence = confidence; - this.forestSize = forestSize; - this.attribution = attribution; - } - - public RCFResultResponse1_0(StreamInput in) throws IOException { - super(in); - rcfScore = in.readDouble(); - confidence = in.readDouble(); - forestSize = in.readVInt(); - attribution = in.readDoubleArray(); - } - - public double getRCFScore() { - return rcfScore; - } - - public double getConfidence() { - return confidence; - } - - public int getForestSize() { - return forestSize; - } - - /** - * Returns RCF score attribution. - * - * @return RCF score attribution. - */ - public double[] getAttribution() { - return attribution; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeDouble(rcfScore); - out.writeDouble(confidence); - out.writeVInt(forestSize); - out.writeDoubleArray(attribution); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(RCF_SCORE_JSON_KEY, rcfScore); - builder.field(CONFIDENCE_JSON_KEY, confidence); - builder.field(FOREST_SIZE_JSON_KEY, forestSize); - builder.field(ATTRIBUTION_JSON_KEY, attribution); - builder.endObject(); - return builder; - } - -} diff --git a/src/test/java/org/opensearch/StreamInputOutputTests.java b/src/test/java/org/opensearch/StreamInputOutputTests.java new file mode 100644 index 00000000..3b9ecdc5 --- /dev/null +++ b/src/test/java/org/opensearch/StreamInputOutputTests.java @@ -0,0 +1,293 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static org.hamcrest.Matchers.equalTo; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.opensearch.action.FailedNodeException; +import org.opensearch.ad.AbstractADTest; +import org.opensearch.ad.model.Entity; +import org.opensearch.ad.model.EntityProfileName; +import org.opensearch.ad.model.ModelProfile; +import org.opensearch.ad.model.ModelProfileOnNode; +import org.opensearch.ad.transport.EntityProfileAction; +import org.opensearch.ad.transport.EntityProfileRequest; +import org.opensearch.ad.transport.EntityProfileResponse; +import org.opensearch.ad.transport.EntityResultRequest; +import org.opensearch.ad.transport.ProfileNodeResponse; +import org.opensearch.ad.transport.ProfileResponse; +import org.opensearch.ad.transport.RCFResultResponse; +import org.opensearch.cluster.ClusterName; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.transport.TransportAddress; + +/** + * Put in core package so that we can using Version's package private constructor + * + */ +public class StreamInputOutputTests extends AbstractADTest { + // public static Version V_1_1_0 = new Version(1010099, org.apache.lucene.util.Version.LUCENE_8_8_2); + private EntityResultRequest entityResultRequest; + private String detectorId; + private long start, end; + private Map entities; + private BytesStreamOutput output; + private String categoryField, categoryValue, categoryValue2; + private double[] feature; + private EntityProfileRequest entityProfileRequest; + private Entity entity, entity2; + private Set profilesToCollect; + private String nodeId = "abc"; + private String modelId = "123"; + private long modelSize = 712480L; + private long modelSize2 = 112480L; + private EntityProfileResponse entityProfileResponse; + private ProfileResponse profileResponse; + private RCFResultResponse rcfResultResponse; + + private boolean areEqualWithArrayValue(Map first, Map second) { + if (first.size() != second.size()) { + return false; + } + + return first.entrySet().stream().allMatch(e -> Arrays.equals(e.getValue(), second.get(e.getKey()))); + } + + @Override + public void setUp() throws Exception { + super.setUp(); + + categoryField = "a"; + categoryValue = "b"; + categoryValue2 = "b2"; + + feature = new double[] { 0.3 }; + detectorId = "123"; + + entity = Entity.createSingleAttributeEntity(categoryField, categoryValue); + entity2 = Entity.createSingleAttributeEntity(categoryField, categoryValue2); + + output = new BytesStreamOutput(); + } + + private void setUpEntityResultRequest() { + entities = new HashMap<>(); + entities.put(entity, feature); + start = 10L; + end = 20L; + entityResultRequest = new EntityResultRequest(detectorId, entities, start, end); + } + + /** + * @throws IOException when serialization/deserialization has issues. + */ + public void testDeSerializeEntityResultRequest() throws IOException { + setUpEntityResultRequest(); + + entityResultRequest.writeTo(output); + + StreamInput streamInput = output.bytes().streamInput(); + EntityResultRequest readRequest = new EntityResultRequest(streamInput); + assertThat(readRequest.getDetectorId(), equalTo(detectorId)); + assertThat(readRequest.getStart(), equalTo(start)); + assertThat(readRequest.getEnd(), equalTo(end)); + assertTrue(areEqualWithArrayValue(readRequest.getEntities(), entities)); + } + + private void setUpEntityProfileRequest() { + profilesToCollect = new HashSet(); + profilesToCollect.add(EntityProfileName.STATE); + entityProfileRequest = new EntityProfileRequest(detectorId, entity, profilesToCollect); + } + + /** + * @throws IOException when serialization/deserialization has issues. + */ + public void testDeserializeEntityProfileRequest() throws IOException { + setUpEntityProfileRequest(); + + entityProfileRequest.writeTo(output); + + StreamInput streamInput = output.bytes().streamInput(); + EntityProfileRequest readRequest = new EntityProfileRequest(streamInput); + assertThat(readRequest.getAdID(), equalTo(detectorId)); + assertThat(readRequest.getEntityValue(), equalTo(entity)); + assertThat(readRequest.getProfilesToCollect(), equalTo(profilesToCollect)); + } + + private void setUpEntityProfileResponse() { + long lastActiveTimestamp = 10L; + EntityProfileResponse.Builder builder = new EntityProfileResponse.Builder(); + builder.setLastActiveMs(lastActiveTimestamp).build(); + ModelProfile modelProfile = new ModelProfile(modelId, entity, modelSize); + ModelProfileOnNode model = new ModelProfileOnNode(nodeId, modelProfile); + builder.setModelProfile(model); + entityProfileResponse = builder.build(); + } + + /** + * @throws IOException when serialization/deserialization has issues. + */ + public void testDeserializeEntityProfileResponse() throws IOException { + setUpEntityProfileResponse(); + + entityProfileResponse.writeTo(output); + + StreamInput streamInput = output.bytes().streamInput(); + EntityProfileResponse readResponse = EntityProfileAction.INSTANCE.getResponseReader().read(streamInput); + assertThat(readResponse.getModelProfile(), equalTo(entityProfileResponse.getModelProfile())); + assertThat(readResponse.getLastActiveMs(), equalTo(entityProfileResponse.getLastActiveMs())); + assertThat(readResponse.getTotalUpdates(), equalTo(entityProfileResponse.getTotalUpdates())); + } + + @SuppressWarnings("serial") + private void setUpProfileResponse() { + String node1 = "node1"; + String nodeName1 = "nodename1"; + DiscoveryNode discoveryNode1_1 = new DiscoveryNode( + nodeName1, + node1, + new TransportAddress(TransportAddress.META_ADDRESS, 9300), + emptyMap(), + emptySet(), + Version.V_2_1_0 + ); + + String node2 = "node2"; + String nodeName2 = "nodename2"; + DiscoveryNode discoveryNode2 = new DiscoveryNode( + nodeName2, + node2, + new TransportAddress(TransportAddress.META_ADDRESS, 9301), + emptyMap(), + emptySet(), + Version.V_2_1_0 + ); + + String model1Id = "model1"; + String model2Id = "model2"; + + Map modelSizeMap1 = new HashMap() { + { + put(model1Id, modelSize); + put(model2Id, modelSize2); + } + }; + Map modelSizeMap2 = new HashMap(); + + int shingleSize = 8; + + ModelProfile modelProfile = new ModelProfile(model1Id, entity, modelSize); + ModelProfile modelProfile2 = new ModelProfile(model2Id, entity2, modelSize2); + + ProfileNodeResponse profileNodeResponse1 = new ProfileNodeResponse( + discoveryNode1_1, + modelSizeMap1, + shingleSize, + 0, + 0, + Arrays.asList(modelProfile, modelProfile2), + modelSizeMap1.size() + ); + ProfileNodeResponse profileNodeResponse2 = new ProfileNodeResponse( + discoveryNode2, + modelSizeMap2, + -1, + 0, + 0, + new ArrayList<>(), + modelSizeMap2.size() + ); + ProfileNodeResponse profileNodeResponse3 = new ProfileNodeResponse( + discoveryNode2, + null, + -1, + 0, + 0, + // null model size. Test if we can handle this case + null, + modelSizeMap2.size() + ); + List profileNodeResponses = Arrays.asList(profileNodeResponse1, profileNodeResponse2, profileNodeResponse3); + List failures = Collections.emptyList(); + + ClusterName clusterName = new ClusterName("test-cluster-name"); + profileResponse = new ProfileResponse(clusterName, profileNodeResponses, failures); + } + + /** + * @throws IOException when serialization/deserialization has issues. + */ + public void testDeserializeProfileResponse() throws IOException { + setUpProfileResponse(); + + profileResponse.writeTo(output); + + StreamInput streamInput = output.bytes().streamInput(); + ProfileResponse readResponse = new ProfileResponse(streamInput); + assertThat(readResponse.getModelProfile(), equalTo(profileResponse.getModelProfile())); + assertThat(readResponse.getShingleSize(), equalTo(profileResponse.getShingleSize())); + assertThat(readResponse.getActiveEntities(), equalTo(profileResponse.getActiveEntities())); + assertThat(readResponse.getTotalUpdates(), equalTo(profileResponse.getTotalUpdates())); + assertThat(readResponse.getCoordinatingNode(), equalTo(profileResponse.getCoordinatingNode())); + assertThat(readResponse.getTotalSizeInBytes(), equalTo(profileResponse.getTotalSizeInBytes())); + assertThat(readResponse.getModelCount(), equalTo(profileResponse.getModelCount())); + } + + private void setUpRCFResultResponse() { + rcfResultResponse = new RCFResultResponse( + 0.345, + 0.123, + 30, + new double[] { 0.3, 0.7 }, + 134, + 0.4, + Version.CURRENT, + randomIntBetween(-3, 0), + new double[] { randomDoubleBetween(0, 1.0, true), randomDoubleBetween(0, 1.0, true) }, + new double[][] { new double[] { randomDouble(), randomDouble() } }, + new double[] { randomDoubleBetween(0, 1.0, true), randomDoubleBetween(0, 1.0, true) }, + randomDoubleBetween(1.1, 10.0, true) + ); + } + + /** + * @throws IOException when serialization/deserialization has issues. + */ + public void testDeserializeRCFResultResponse() throws IOException { + setUpRCFResultResponse(); + + rcfResultResponse.writeTo(output); + + StreamInput streamInput = output.bytes().streamInput(); + RCFResultResponse readResponse = new RCFResultResponse(streamInput); + assertArrayEquals(readResponse.getAttribution(), rcfResultResponse.getAttribution(), 0.001); + assertThat(readResponse.getConfidence(), equalTo(rcfResultResponse.getConfidence())); + assertThat(readResponse.getForestSize(), equalTo(rcfResultResponse.getForestSize())); + assertThat(readResponse.getTotalUpdates(), equalTo(rcfResultResponse.getTotalUpdates())); + assertThat(readResponse.getRCFScore(), equalTo(rcfResultResponse.getRCFScore())); + } +} diff --git a/src/test/java/org/opensearch/ad/EntityProfileRunnerTests.java b/src/test/java/org/opensearch/ad/EntityProfileRunnerTests.java index dca8ac43..2f928a8d 100644 --- a/src/test/java/org/opensearch/ad/EntityProfileRunnerTests.java +++ b/src/test/java/org/opensearch/ad/EntityProfileRunnerTests.java @@ -50,6 +50,8 @@ import org.opensearch.ad.transport.EntityProfileAction; import org.opensearch.ad.transport.EntityProfileResponse; import org.opensearch.client.Client; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.common.io.stream.StreamInput; import org.opensearch.index.IndexNotFoundException; import org.opensearch.search.DocValueFormat; import org.opensearch.search.SearchHit; @@ -135,6 +137,7 @@ public void setUp() throws Exception { }).when(client).get(any(), any()); entity = Entity.createSingleAttributeEntity(categoryField, entityValue); + modelId = entity.getModelId(detectorId).get(); } @SuppressWarnings("unchecked") @@ -294,6 +297,7 @@ public void testEmptyProfile() throws InterruptedException { public void testModel() throws InterruptedException { setUpExecuteEntityProfileAction(InittedEverResultStatus.INITTED); EntityProfile.Builder expectedProfile = new EntityProfile.Builder(); + ModelProfileOnNode modelProfile = new ModelProfileOnNode(nodeId, new ModelProfile(modelId, entity, modelSize)); expectedProfile.modelProfile(modelProfile); final CountDownLatch inProgressLatch = new CountDownLatch(1); @@ -307,6 +311,18 @@ public void testModel() throws InterruptedException { assertTrue(inProgressLatch.await(100, TimeUnit.SECONDS)); } + public void testEmptyModelProfile() throws IOException { + ModelProfile modelProfile = new ModelProfile(modelId, null, modelSize); + BytesStreamOutput output = new BytesStreamOutput(); + modelProfile.writeTo(output); + StreamInput streamInput = output.bytes().streamInput(); + ModelProfile readResponse = new ModelProfile(streamInput); + assertEquals("serialization has the wrong model id", modelId, readResponse.getModelId()); + assertTrue("serialization has null entity", null == readResponse.getEntity()); + assertEquals("serialization has the wrong model size", modelSize, readResponse.getModelSizeInBytes()); + + } + @SuppressWarnings("unchecked") public void testJobIndexNotFound() throws InterruptedException { setUpExecuteEntityProfileAction(InittedEverResultStatus.INITTED); diff --git a/src/test/java/org/opensearch/ad/cluster/ADVersionUtilTests.java b/src/test/java/org/opensearch/ad/cluster/ADVersionUtilTests.java index 344d9e46..aa5fcc55 100644 --- a/src/test/java/org/opensearch/ad/cluster/ADVersionUtilTests.java +++ b/src/test/java/org/opensearch/ad/cluster/ADVersionUtilTests.java @@ -17,11 +17,11 @@ public class ADVersionUtilTests extends ADUnitTestCase { public void testParseVersionFromString() { - Version version = ADVersionUtil.fromString("1.1.0.0"); - assertEquals(Version.V_1_1_0, version); + Version version = ADVersionUtil.fromString("2.1.0.0"); + assertEquals(Version.V_2_1_0, version); - version = ADVersionUtil.fromString("1.1.0"); - assertEquals(Version.V_1_1_0, version); + version = ADVersionUtil.fromString("2.1.0"); + assertEquals(Version.V_2_1_0, version); } public void testParseVersionFromStringWithNull() { @@ -31,9 +31,4 @@ public void testParseVersionFromStringWithNull() { public void testParseVersionFromStringWithWrongFormat() { expectThrows(IllegalArgumentException.class, () -> ADVersionUtil.fromString("1.1")); } - - public void testCompatibleWithVersionOnOrAfter1_1() { - assertTrue(ADVersionUtil.compatibleWithVersionOnOrAfter1_1(Version.V_1_1_0)); - assertFalse(ADVersionUtil.compatibleWithVersionOnOrAfter1_1(Version.V_1_0_0)); - } } diff --git a/src/test/java/org/opensearch/ad/cluster/HashRingTests.java b/src/test/java/org/opensearch/ad/cluster/HashRingTests.java index dbfc0e26..21cde5f8 100644 --- a/src/test/java/org/opensearch/ad/cluster/HashRingTests.java +++ b/src/test/java/org/opensearch/ad/cluster/HashRingTests.java @@ -142,10 +142,10 @@ public void testGetOwningNode() throws UnknownHostException { assertEquals( "Wrong hash ring size for historical analysis", 2, - hashRing.getNodesWithSameAdVersion(Version.V_1_1_0, false).size() + hashRing.getNodesWithSameAdVersion(Version.V_2_1_0, false).size() ); // Circles for realtime AD will change as it's eligible to build for when its empty - assertEquals("Wrong hash ring size for realtime AD", 2, hashRing.getNodesWithSameAdVersion(Version.V_1_1_0, true).size()); + assertEquals("Wrong hash ring size for realtime AD", 2, hashRing.getNodesWithSameAdVersion(Version.V_2_1_0, true).size()); }, e -> { logger.error("building hash ring failed", e); assertFalse("Build hash ring failed", true); @@ -161,10 +161,10 @@ public void testGetOwningNode() throws UnknownHostException { assertEquals( "Wrong hash ring size for historical analysis", 3, - hashRing.getNodesWithSameAdVersion(Version.V_1_1_0, false).size() + hashRing.getNodesWithSameAdVersion(Version.V_2_1_0, false).size() ); // Circles for realtime AD will not change as it's eligible to rebuild - assertEquals("Wrong hash ring size for realtime AD", 2, hashRing.getNodesWithSameAdVersion(Version.V_1_1_0, true).size()); + assertEquals("Wrong hash ring size for realtime AD", 2, hashRing.getNodesWithSameAdVersion(Version.V_2_1_0, true).size()); }, e -> { logger.error("building hash ring failed", e); @@ -182,9 +182,9 @@ public void testGetOwningNode() throws UnknownHostException { assertEquals( "Wrong hash ring size for historical analysis", 4, - hashRing.getNodesWithSameAdVersion(Version.V_1_1_0, false).size() + hashRing.getNodesWithSameAdVersion(Version.V_2_1_0, false).size() ); - assertEquals("Wrong hash ring size for realtime AD", 4, hashRing.getNodesWithSameAdVersion(Version.V_1_1_0, true).size()); + assertEquals("Wrong hash ring size for realtime AD", 4, hashRing.getNodesWithSameAdVersion(Version.V_2_1_0, true).size()); }, e -> { logger.error("building hash ring failed", e); assertFalse("Failed to build hash ring", true); @@ -235,7 +235,7 @@ private void setupClusterAdminClient(DiscoveryNode... nodes) { ActionListener listener = invocation.getArgument(1); List nodeInfos = new ArrayList<>(); for (DiscoveryNode node : nodes) { - nodeInfos.add(createNodeInfo(node, "1.1.0.0")); + nodeInfos.add(createNodeInfo(node, "2.1.0.0")); } NodesInfoResponse nodesInfoResponse = new NodesInfoResponse(ClusterName.DEFAULT, nodeInfos, ImmutableList.of()); listener.onResponse(nodesInfoResponse); diff --git a/src/test/java/org/opensearch/ad/transport/ADTaskProfileTests.java b/src/test/java/org/opensearch/ad/transport/ADTaskProfileTests.java index ffd9a34a..dd4aeb5d 100644 --- a/src/test/java/org/opensearch/ad/transport/ADTaskProfileTests.java +++ b/src/test/java/org/opensearch/ad/transport/ADTaskProfileTests.java @@ -165,12 +165,8 @@ public void testSerializeResponse() throws IOException { List adTaskProfileNodeResponses = response.readNodesFrom(input); assertEquals(1, adTaskProfileNodeResponses.size()); ADTaskProfileNodeResponse parsedProfile = adTaskProfileNodeResponses.get(0); - if (Version.CURRENT.onOrBefore(Version.V_1_0_0)) { - assertEquals(profile.getNodeId(), parsedProfile.getAdTaskProfile().getNodeId()); - assertNull(parsedProfile.getAdTaskProfile().getTaskId()); - } else { - assertEquals(profile.getTaskId(), parsedProfile.getAdTaskProfile().getTaskId()); - } + + assertEquals(profile.getTaskId(), parsedProfile.getAdTaskProfile().getTaskId()); } public void testADTaskProfileParseFullConstructor() throws IOException { diff --git a/src/test/java/org/opensearch/ad/transport/CronTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/CronTransportActionTests.java index ecc869c1..46df7603 100644 --- a/src/test/java/org/opensearch/ad/transport/CronTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/CronTransportActionTests.java @@ -21,7 +21,6 @@ import org.junit.Assert; import org.junit.Before; -import org.junit.BeforeClass; import org.opensearch.Version; import org.opensearch.action.support.ActionFilters; import org.opensearch.ad.AbstractADTest; @@ -33,7 +32,6 @@ import org.opensearch.ad.ml.EntityColdStarter; import org.opensearch.ad.ml.ModelManager; import org.opensearch.ad.task.ADTaskManager; -import org.opensearch.ad.util.Bwc; import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; @@ -54,11 +52,6 @@ public class CronTransportActionTests extends AbstractADTest { private CronTransportAction action; private String localNodeID; - @BeforeClass - public static void setUpBeforeClass() { - Bwc.DISABLE_BWC = false; - } - @Override @Before public void setUp() throws Exception { @@ -100,10 +93,10 @@ public void testNormal() throws IOException, JsonPathNotFoundException { CronNodeRequest nodeRequest = new CronNodeRequest(); BytesStreamOutput nodeRequestOut = new BytesStreamOutput(); - nodeRequestOut.setVersion(Version.V_1_0_0); + nodeRequestOut.setVersion(Version.V_2_0_0); nodeRequest.writeTo(nodeRequestOut); StreamInput siNode = nodeRequestOut.bytes().streamInput(); - siNode.setVersion(Version.V_1_0_0); + siNode.setVersion(Version.V_2_0_0); CronNodeRequest nodeResponseRead = new CronNodeRequest(siNode); diff --git a/src/test/java/org/opensearch/ad/transport/ForwardADTaskRequestTests.java b/src/test/java/org/opensearch/ad/transport/ForwardADTaskRequestTests.java index 668c8577..c0b70219 100644 --- a/src/test/java/org/opensearch/ad/transport/ForwardADTaskRequestTests.java +++ b/src/test/java/org/opensearch/ad/transport/ForwardADTaskRequestTests.java @@ -54,9 +54,9 @@ protected NamedWriteableRegistry writableRegistry() { return getInstanceFromNode(NamedWriteableRegistry.class); } - public void testUnsupportedVersion() throws IOException { + public void testNullVersion() throws IOException { AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableList.of()); - expectThrows(ADVersionException.class, () -> new ForwardADTaskRequest(detector, null, null, null, null, Version.V_1_0_0)); + expectThrows(ADVersionException.class, () -> new ForwardADTaskRequest(detector, null, null, null, null, null)); } public void testNullDetectorIdAndTaskAction() throws IOException { @@ -79,7 +79,7 @@ public void testNullDetectorIdAndTaskAction() throws IOException { randomUser(), null ); - ForwardADTaskRequest request = new ForwardADTaskRequest(detector, null, null, null, null, Version.V_1_1_0); + ForwardADTaskRequest request = new ForwardADTaskRequest(detector, null, null, null, null, Version.V_2_1_0); ActionRequestValidationException validate = request.validate(); assertEquals("Validation Failed: 1: AD ID is missing;2: AD task action is missing;", validate.getMessage()); } diff --git a/src/test/java/org/opensearch/ad/transport/MultiEntityResultTests.java b/src/test/java/org/opensearch/ad/transport/MultiEntityResultTests.java index aa3c182f..45476187 100644 --- a/src/test/java/org/opensearch/ad/transport/MultiEntityResultTests.java +++ b/src/test/java/org/opensearch/ad/transport/MultiEntityResultTests.java @@ -56,7 +56,6 @@ import org.mockito.ArgumentCaptor; import org.mockito.ArgumentMatcher; import org.mockito.stubbing.Answer; -import org.opensearch.BwcTests; import org.opensearch.OpenSearchTimeoutException; import org.opensearch.Version; import org.opensearch.action.ActionListener; @@ -100,7 +99,6 @@ import org.opensearch.ad.stats.StatNames; import org.opensearch.ad.stats.suppliers.CounterSupplier; import org.opensearch.ad.task.ADTaskManager; -import org.opensearch.ad.util.Bwc; import org.opensearch.ad.util.ClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; @@ -176,7 +174,6 @@ public class MultiEntityResultTests extends AbstractADTest { @BeforeClass public static void setUpBeforeClass() { setUpThreadPool(AnomalyResultTests.class.getSimpleName()); - Bwc.DISABLE_BWC = false; } @AfterClass @@ -347,7 +344,6 @@ private TransportResponseHandler entityResultHa return new TransportResponseHandler() { @Override public T read(StreamInput in) throws IOException { - in.setVersion(BwcTests.V_1_1_0); return handler.read(in); } @@ -373,7 +369,6 @@ private TransportResponseHandler unackEntityRes return new TransportResponseHandler() { @Override public T read(StreamInput in) throws IOException { - in.setVersion(BwcTests.V_1_1_0); return handler.read(in); } @@ -677,7 +672,7 @@ public void sendRequest( // we start support multi-category fields since 1.1 // Set version to 1.1 will force the outbound/inbound message to use 1.1 version - setupTestNodes(entityResultInterceptor, 5, settings, BwcTests.V_1_1_0, MAX_ENTITIES_PER_QUERY, PAGE_SIZE); + setupTestNodes(entityResultInterceptor, 5, settings, Version.V_2_0_0, MAX_ENTITIES_PER_QUERY, PAGE_SIZE); TransportService realTransportService = testNodes[0].transportService; ClusterService realClusterService = testNodes[0].clusterService; diff --git a/src/test/java/test/org/opensearch/ad/util/FakeNode.java b/src/test/java/test/org/opensearch/ad/util/FakeNode.java index 1955d0d3..1af160f9 100644 --- a/src/test/java/test/org/opensearch/ad/util/FakeNode.java +++ b/src/test/java/test/org/opensearch/ad/util/FakeNode.java @@ -28,7 +28,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.Logger; import org.apache.lucene.util.SetOnce; -import org.opensearch.BwcTests; import org.opensearch.Version; import org.opensearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.opensearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; @@ -75,7 +74,7 @@ public FakeNode( Settings.EMPTY, new MockNioTransport( Settings.EMPTY, - BwcTests.V_1_1_0, + Version.V_2_1_0, threadPool, new NetworkService(Collections.emptyList()), PageCacheRecycler.NON_RECYCLING_INSTANCE,