Skip to content

Commit

Permalink
[Remove] Version.V_1_ constants
Browse files Browse the repository at this point in the history
Removes all usages of Version.V_1_ version constants along with ancient
API logic.

Signed-off-by: Nicholas Walter Knize <[email protected]>
  • Loading branch information
nknize committed Nov 1, 2022
1 parent f0aed87 commit 17db2de
Show file tree
Hide file tree
Showing 33 changed files with 93 additions and 329 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -46,21 +46,16 @@
import static org.hamcrest.Matchers.equalTo;

public class MainResponseTests extends AbstractResponseTestCase<org.opensearch.action.main.MainResponse, MainResponse> {
private static String DISTRIBUTION = "opensearch";

@Override
protected org.opensearch.action.main.MainResponse createServerTestInstance(XContentType xContentType) {
String clusterUuid = randomAlphaOfLength(10);
ClusterName clusterName = new ClusterName(randomAlphaOfLength(10));
String nodeName = randomAlphaOfLength(10);
final String date = new Date(randomNonNegativeLong()).toString();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Build build = new Build(
Build.Type.UNKNOWN,
randomAlphaOfLength(8),
date,
randomBoolean(),
version.toString(),
version.before(Version.V_1_0_0) ? null : "opensearch"
);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Build build = new Build(Build.Type.UNKNOWN, randomAlphaOfLength(8), date, randomBoolean(), version.toString(), DISTRIBUTION);
return new org.opensearch.action.main.MainResponse(nodeName, version, clusterName, clusterUuid, build);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -268,11 +268,18 @@ public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
filters.put("dutch_stem", DutchStemTokenFilterFactory::new);
filters.put("edge_ngram", EdgeNGramTokenFilterFactory::new);
filters.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> {
deprecationLogger.deprecate(
"edgeNGram_deprecation",
"The [edgeNGram] token filter name is deprecated and will be removed in a future version. "
+ "Please change the filter name to [edge_ngram] instead."
);
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_3_0_0)) {
throw new IllegalArgumentException(
"The [edgeNGram] tokenizer name was deprecated pre 1.0. "
+ "Please use the tokenizer name to [edge_ngram] for indices created in versions 3.0 or higher instead."
);
} else {
deprecationLogger.deprecate(
"edgeNGram_deprecation",
"The [edgeNGram] token filter name is deprecated and will be removed in a future version. "
+ "Please change the filter name to [edge_ngram] instead."
);
}
return new EdgeNGramTokenFilterFactory(indexSettings, environment, name, settings);
});
filters.put("elision", requiresAnalysisSettings(ElisionTokenFilterFactory::new));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@
import org.opensearch.indices.analysis.AnalysisModule;
import org.opensearch.test.OpenSearchTokenStreamTestCase;
import org.opensearch.test.IndexSettingsModule;
import org.opensearch.test.VersionUtils;

import java.io.IOException;
import java.io.StringReader;
Expand Down Expand Up @@ -76,21 +75,6 @@ public void testPreConfiguredTokenizer() throws IOException {
}
}

// Check deprecated name as well, needs version before 8.0 because throws IAE after that
{
try (
IndexAnalyzers indexAnalyzers = buildAnalyzers(
VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT),
"edgeNGram"
)
) {
NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer");
assertNotNull(analyzer);
assertAnalyzesTo(analyzer, "test", new String[] { "t", "te" });

}
}

}

public void testCustomTokenChars() throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ public void testChainedSynonymFilters() throws IOException {
public void testShingleFilters() {

Settings settings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT))
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT))
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.synonyms.type", "synonym")
.putList("index.analysis.filter.synonyms.synonyms", "programmer, developer")
Expand Down Expand Up @@ -289,7 +289,7 @@ public void testPreconfiguredTokenFilters() throws IOException {
);

Settings settings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT))
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT))
.put("path.home", createTempDir().toString())
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
Expand All @@ -313,7 +313,7 @@ public void testPreconfiguredTokenFilters() throws IOException {
public void testDisallowedTokenFilters() throws IOException {

Settings settings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT))
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT))
.put("path.home", createTempDir().toString())
.putList("common_words", "a", "b")
.put("output_unigrams", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ public void testDisallowedWithSynonyms() throws IOException {
AnalysisPhoneticPlugin plugin = new AnalysisPhoneticPlugin();

Settings settings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT))
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT))
.put("path.home", createTempDir().toString())
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
Expand Down
14 changes: 0 additions & 14 deletions server/src/main/java/org/opensearch/Version.java
Original file line number Diff line number Diff line change
Expand Up @@ -75,20 +75,6 @@ public class Version implements Comparable<Version>, ToXContentFragment {
public static final int V_EMPTY_ID = 0;
public static final Version V_EMPTY = new Version(V_EMPTY_ID, org.apache.lucene.util.Version.LATEST);

public static final Version V_1_0_0 = new Version(1000099, org.apache.lucene.util.Version.LUCENE_8_8_2);
public static final Version V_1_1_0 = new Version(1010099, org.apache.lucene.util.Version.LUCENE_8_9_0);
public static final Version V_1_2_0 = new Version(1020099, org.apache.lucene.util.Version.LUCENE_8_10_1);
public static final Version V_1_2_1 = new Version(1020199, org.apache.lucene.util.Version.LUCENE_8_10_1);
public static final Version V_1_2_2 = new Version(1020299, org.apache.lucene.util.Version.LUCENE_8_10_1);
public static final Version V_1_2_3 = new Version(1020399, org.apache.lucene.util.Version.LUCENE_8_10_1);
public static final Version V_1_2_4 = new Version(1020499, org.apache.lucene.util.Version.LUCENE_8_10_1);
public static final Version V_1_2_5 = new Version(1020599, org.apache.lucene.util.Version.LUCENE_8_10_1);
public static final Version V_1_3_0 = new Version(1030099, org.apache.lucene.util.Version.LUCENE_8_10_1);
public static final Version V_1_3_1 = new Version(1030199, org.apache.lucene.util.Version.LUCENE_8_10_1);
public static final Version V_1_3_2 = new Version(1030299, org.apache.lucene.util.Version.LUCENE_8_10_1);
public static final Version V_1_3_3 = new Version(1030399, org.apache.lucene.util.Version.LUCENE_8_10_1);
public static final Version V_1_3_4 = new Version(1030499, org.apache.lucene.util.Version.LUCENE_8_10_1);
public static final Version V_1_3_5 = new Version(1030599, org.apache.lucene.util.Version.LUCENE_8_10_1);
public static final Version V_2_0_0 = new Version(2000099, org.apache.lucene.util.Version.LUCENE_9_1_0);
public static final Version V_2_0_1 = new Version(2000199, org.apache.lucene.util.Version.LUCENE_9_1_0);
public static final Version V_2_0_2 = new Version(2000299, org.apache.lucene.util.Version.LUCENE_9_1_0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@

package org.opensearch.action.admin.cluster.node.stats;

import org.opensearch.Version;
import org.opensearch.action.support.nodes.BaseNodeResponse;
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.cluster.node.DiscoveryNodeRole;
Expand Down Expand Up @@ -141,12 +140,7 @@ public NodeStats(StreamInput in) throws IOException {
scriptCacheStats = scriptStats.toScriptCacheStats();
}
indexingPressureStats = in.readOptionalWriteable(IndexingPressureStats::new);
if (in.getVersion().onOrAfter(Version.V_1_2_0)) {
shardIndexingPressureStats = in.readOptionalWriteable(ShardIndexingPressureStats::new);
} else {
shardIndexingPressureStats = null;
}

shardIndexingPressureStats = in.readOptionalWriteable(ShardIndexingPressureStats::new);
}

public NodeStats(
Expand Down Expand Up @@ -319,9 +313,7 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalWriteable(ingestStats);
out.writeOptionalWriteable(adaptiveSelectionStats);
out.writeOptionalWriteable(indexingPressureStats);
if (out.getVersion().onOrAfter(Version.V_1_2_0)) {
out.writeOptionalWriteable(shardIndexingPressureStats);
}
out.writeOptionalWriteable(shardIndexingPressureStats);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,10 +87,8 @@ public CommonStatsFlags(StreamInput in) throws IOException {
completionDataFields = in.readStringArray();
includeSegmentFileSizes = in.readBoolean();
includeUnloadedSegments = in.readBoolean();
if (in.getVersion().onOrAfter(Version.V_1_2_0)) {
includeAllShardIndexingPressureTrackers = in.readBoolean();
includeOnlyTopIndexingPressureMetrics = in.readBoolean();
}
includeAllShardIndexingPressureTrackers = in.readBoolean();
includeOnlyTopIndexingPressureMetrics = in.readBoolean();
}

@Override
Expand All @@ -109,10 +107,8 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeStringArrayNullable(completionDataFields);
out.writeBoolean(includeSegmentFileSizes);
out.writeBoolean(includeUnloadedSegments);
if (out.getVersion().onOrAfter(Version.V_1_2_0)) {
out.writeBoolean(includeAllShardIndexingPressureTrackers);
out.writeBoolean(includeOnlyTopIndexingPressureMetrics);
}
out.writeBoolean(includeAllShardIndexingPressureTrackers);
out.writeBoolean(includeOnlyTopIndexingPressureMetrics);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -247,10 +247,7 @@ public SearchRequest(StreamInput in) throws IOException {
finalReduce = true;
}
ccsMinimizeRoundtrips = in.readBoolean();

if (in.getVersion().onOrAfter(Version.V_1_1_0)) {
cancelAfterTimeInterval = in.readOptionalTimeValue();
}
cancelAfterTimeInterval = in.readOptionalTimeValue();
}

@Override
Expand Down Expand Up @@ -278,10 +275,7 @@ public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(finalReduce);
}
out.writeBoolean(ccsMinimizeRoundtrips);

if (out.getVersion().onOrAfter(Version.V_1_1_0)) {
out.writeOptionalTimeValue(cancelAfterTimeInterval);
}
out.writeOptionalTimeValue(cancelAfterTimeInterval);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -312,37 +312,6 @@ private void refreshDiscoveryNodeVersionAfterUpgrade(DiscoveryNodes currentNodes
// this logic is only applicable when OpenSearch node is cluster-manager and is noop for zen discovery node
return;
}
if (currentNodes.getMinNodeVersion().before(Version.V_1_0_0)) {
Map<String, Version> channelVersions = transportService.getChannelVersion(currentNodes);
for (DiscoveryNode node : currentNodes) {
if (channelVersions.containsKey(node.getId())) {
if (channelVersions.get(node.getId()) != node.getVersion()) {
DiscoveryNode tmpNode = nodesBuilder.get(node.getId());
nodesBuilder.remove(node.getId());
nodesBuilder.add(
new DiscoveryNode(
tmpNode.getName(),
tmpNode.getId(),
tmpNode.getEphemeralId(),
tmpNode.getHostName(),
tmpNode.getHostAddress(),
tmpNode.getAddress(),
tmpNode.getAttributes(),
tmpNode.getRoles(),
channelVersions.get(tmpNode.getId())
)
);
logger.info(
"Refreshed the DiscoveryNode version for node {}:{} from {} to {}",
node.getId(),
node.getAddress(),
node.getVersion(),
channelVersions.get(tmpNode.getId())
);
}
}
}
}
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@

package org.opensearch.cluster.health;

import org.opensearch.Version;
import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.cluster.routing.IndexRoutingTable;
Expand Down Expand Up @@ -154,11 +153,7 @@ public ClusterStateHealth(final StreamInput in) throws IOException {
unassignedShards = in.readVInt();
numberOfNodes = in.readVInt();
numberOfDataNodes = in.readVInt();
if (in.getVersion().onOrAfter(Version.V_1_0_0)) {
hasDiscoveredClusterManager = in.readBoolean();
} else {
hasDiscoveredClusterManager = true;
}
hasDiscoveredClusterManager = in.readBoolean();
status = ClusterHealthStatus.fromValue(in.readByte());
int size = in.readVInt();
indices = new HashMap<>(size);
Expand Down Expand Up @@ -262,9 +257,7 @@ public void writeTo(final StreamOutput out) throws IOException {
out.writeVInt(unassignedShards);
out.writeVInt(numberOfNodes);
out.writeVInt(numberOfDataNodes);
if (out.getVersion().onOrAfter(Version.V_1_0_0)) {
out.writeBoolean(hasDiscoveredClusterManager);
}
out.writeBoolean(hasDiscoveredClusterManager);
out.writeByte(status.value());
out.writeVInt(indices.size());
for (ClusterIndexHealth indexHealth : this) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@

package org.opensearch.cluster.metadata;

import org.opensearch.Version;
import org.opensearch.cluster.AbstractDiffable;
import org.opensearch.cluster.Diff;
import org.opensearch.cluster.metadata.DataStream.TimestampField;
Expand Down Expand Up @@ -312,11 +311,7 @@ public DataStreamTemplate(TimestampField timestampField) {
}

public DataStreamTemplate(StreamInput in) throws IOException {
if (in.getVersion().onOrAfter(Version.V_1_0_0)) {
this.timestampField = in.readOptionalWriteable(TimestampField::new);
} else {
this.timestampField = DataStreamFieldMapper.Defaults.TIMESTAMP_FIELD;
}
this.timestampField = in.readOptionalWriteable(TimestampField::new);
}

public TimestampField getTimestampField() {
Expand All @@ -335,9 +330,7 @@ public Map<String, Object> getDataStreamMappingSnippet() {

@Override
public void writeTo(StreamOutput out) throws IOException {
if (out.getVersion().onOrAfter(Version.V_1_0_0)) {
out.writeOptionalWriteable(timestampField);
}
out.writeOptionalWriteable(timestampField);
}

@Override
Expand Down
Loading

0 comments on commit 17db2de

Please sign in to comment.