diff --git a/CHANGELOG.md b/CHANGELOG.md index e7fcf5ee6b35c..d4af6303aec95 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Allow to pass the list settings through environment variables (like [], ["a", "b", "c"], ...) ([#10625](https://github.com/opensearch-project/OpenSearch/pull/10625)) - [Admission Control] Integrate CPU AC with ResourceUsageCollector and add CPU AC stats to nodes/stats ([#10887](https://github.com/opensearch-project/OpenSearch/pull/10887)) - [S3 Repository] Add setting to control connection count for sync client ([#12028](https://github.com/opensearch-project/OpenSearch/pull/12028)) +- Add optional section of node analyzers into NodeInfo ([#10296](https://github.com/opensearch-project/OpenSearch/pull/10296)) ### Dependencies - Bump `log4j-core` from 2.18.0 to 2.19.0 @@ -59,6 +60,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add task completion count in search backpressure stats API ([#10028](https://github.com/opensearch-project/OpenSearch/pull/10028/)) - Deprecate CamelCase `PathHierarchy` tokenizer name in favor to lowercase `path_hierarchy` ([#10894](https://github.com/opensearch-project/OpenSearch/pull/10894)) - Switched to more reliable OpenSearch Lucene snapshot location([#11728](https://github.com/opensearch-project/OpenSearch/pull/11728)) +- Breaking change: "search_pipelines" metric is not included in NodesInfoRequest by default ([#10296](https://github.com/opensearch-project/OpenSearch/pull/10296)) ### Deprecated diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json index 37279edd3106f..eb70c0327a938 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/nodes.info.json @@ -42,7 +42,8 @@ "transport", "http", "plugins", - "ingest" + "ingest", + "analysis_components" ], "description":"A comma-separated list of metrics you wish returned. Leave empty to return all." } @@ -69,7 +70,8 @@ "transport", "http", "plugins", - "ingest" + "ingest", + "analysis_components" ], "description":"A comma-separated list of metrics you wish returned. Leave empty to return all." } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.info/50_analyzis_components.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.info/50_analyzis_components.yml new file mode 100644 index 0000000000000..8cfff3b27f1b8 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.info/50_analyzis_components.yml @@ -0,0 +1,24 @@ +--- +"node_info test analysis_components": + - skip: + version: " - 2.99.99" + reason: "analysis_components metric was introduced in 3.0.0" + features: [arbitrary_key] + + - do: + nodes.info: + metric: [ analysis_components ] + + # Note: It will only stash the first node_id in the api response. + - set: + nodes._arbitrary_key_: node_id + + - is_true: nodes + - is_true: cluster_name + + - is_true: nodes.$node_id.analysis_components.analyzers + - is_true: nodes.$node_id.analysis_components.tokenizers + - is_true: nodes.$node_id.analysis_components.tokenFilters + - is_true: nodes.$node_id.analysis_components.charFilters + - is_true: nodes.$node_id.analysis_components.normalizers + - is_true: nodes.$node_id.analysis_components.plugins diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeAnalysisComponents.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeAnalysisComponents.java new file mode 100644 index 0000000000000..bee5fa6924db0 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeAnalysisComponents.java @@ -0,0 +1,322 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.admin.cluster.node.info; + +import org.opensearch.common.collect.Tuple; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.service.ReportingService; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.index.analysis.AnalysisRegistry; +import org.opensearch.plugins.AnalysisPlugin; +import org.opensearch.plugins.PluginInfo; +import org.opensearch.plugins.PluginsService; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.stream.Collectors; + +import static java.util.Collections.unmodifiableList; +import static java.util.Collections.unmodifiableSortedSet; + +/** + * Information about node analysis components. + * + * Every node in the cluster contains several analysis components. Some are preconfigured, the rest come from + * {@link AnalysisPlugin}s installed on this node (such as org.opensearch.analysis.common.CommonAnalysisModulePlugin). + * + * @see org.opensearch.index.analysis.AnalysisRegistry + * @see org.opensearch.indices.analysis.AnalysisModule + * + * @opensearch.internal + * @opensearch.experimental + */ +public class NodeAnalysisComponents implements ReportingService.Info { + + private final SortedSet analyzersIds; + + private final SortedSet tokenizersIds; + + private final SortedSet tokenFiltersIds; + + private final SortedSet charFiltersIds; + + private final SortedSet normalizersIds; + + private final List nodeAnalysisPlugins; + + public SortedSet getAnalyzersIds() { + return this.analyzersIds; + } + + public SortedSet getTokenizersIds() { + return this.tokenizersIds; + } + + public SortedSet getTokenFiltersIds() { + return this.tokenFiltersIds; + } + + public SortedSet getCharFiltersIds() { + return this.charFiltersIds; + } + + public SortedSet getNormalizersIds() { + return this.normalizersIds; + } + + public List getNodeAnalysisPlugins() { + return nodeAnalysisPlugins; + } + + public NodeAnalysisComponents(AnalysisRegistry analysisRegistry, PluginsService pluginsService) { + List nodeAnalysisPlugins = new ArrayList<>(); + List> analysisPlugins = pluginsService.filterPluginsForPluginInfo(AnalysisPlugin.class); + for (Tuple plugin : analysisPlugins) { + nodeAnalysisPlugins.add( + new NodeAnalysisComponents.AnalysisPluginComponents( + plugin.v1().getName(), + plugin.v1().getClassname(), + plugin.v2().getAnalyzers().keySet(), + plugin.v2().getTokenizers().keySet(), + plugin.v2().getTokenFilters().keySet(), + plugin.v2().getCharFilters().keySet(), + plugin.v2().getHunspellDictionaries().keySet() + ) + ); + } + this.analyzersIds = ensureSorted(analysisRegistry.getNodeAnalyzersKeys()); + this.tokenizersIds = ensureSorted(analysisRegistry.getNodeTokenizersKeys()); + this.tokenFiltersIds = ensureSorted(analysisRegistry.getNodeTokenFiltersKeys()); + this.charFiltersIds = ensureSorted(analysisRegistry.getNodeCharFiltersKeys()); + this.normalizersIds = ensureSorted(analysisRegistry.getNodeNormalizersKeys()); + this.nodeAnalysisPlugins = ensureNonEmpty(nodeAnalysisPlugins); + } + + public NodeAnalysisComponents( + final Set analyzersKeySet, + final Set tokenizersKeySet, + final Set tokenFiltersKeySet, + final Set charFiltersKeySet, + final Set normalizersKeySet, + final List nodeAnalysisPlugins + ) { + this.analyzersIds = ensureSorted(analyzersKeySet); + this.tokenizersIds = ensureSorted(tokenizersKeySet); + this.tokenFiltersIds = ensureSorted(tokenFiltersKeySet); + this.charFiltersIds = ensureSorted(charFiltersKeySet); + this.normalizersIds = ensureSorted(normalizersKeySet); + this.nodeAnalysisPlugins = ensureNonEmpty(nodeAnalysisPlugins); + } + + /** + * This class represents analysis components provided by {@link org.opensearch.plugins.AnalysisPlugin}. + * There can be several plugins (or modules) installed on each cluster node. + */ + public static class AnalysisPluginComponents implements Comparable, Writeable { + + private final String pluginName; + private final String className; + private final SortedSet analyzersIds; + private final SortedSet tokenizersIds; + private final SortedSet tokenFiltersIds; + private final SortedSet charFiltersIds; + private final SortedSet hunspellDictionaries; + + public AnalysisPluginComponents( + final String pluginName, + final String className, + final Set analyzersIds, + final Set tokenizersIds, + final Set tokenFiltersIds, + final Set charFiltersIds, + final Set hunspellDictionaries + ) { + this.pluginName = pluginName; + this.className = className; + this.analyzersIds = unmodifiableSortedSet(new TreeSet<>(analyzersIds)); + this.tokenizersIds = unmodifiableSortedSet(new TreeSet<>(tokenizersIds)); + this.tokenFiltersIds = unmodifiableSortedSet(new TreeSet<>(tokenFiltersIds)); + this.charFiltersIds = unmodifiableSortedSet(new TreeSet<>(charFiltersIds)); + this.hunspellDictionaries = unmodifiableSortedSet(new TreeSet<>(hunspellDictionaries)); + } + + public AnalysisPluginComponents(StreamInput in) throws IOException { + this.pluginName = in.readString(); + this.className = in.readString(); + this.analyzersIds = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString))); + this.tokenizersIds = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString))); + this.tokenFiltersIds = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString))); + this.charFiltersIds = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString))); + this.hunspellDictionaries = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString))); + } + + public void writeTo(StreamOutput out) throws IOException { + out.writeString(this.pluginName); + out.writeString(this.className); + out.writeStringCollection(this.analyzersIds); + out.writeStringCollection(this.tokenizersIds); + out.writeStringCollection(this.tokenFiltersIds); + out.writeStringCollection(this.charFiltersIds); + out.writeStringCollection(this.hunspellDictionaries); + } + + private static final Comparator nullSafeStringComparator = Comparator.nullsFirst(String::compareTo); + + private static String concatenateItems(SortedSet items) { + return items.stream().collect(Collectors.joining()); + } + + /** + * This Comparator defines the comparison logic for sorting instances of AnalysisPluginComponents based on + * their attributes in the following order: + * + * 1. Plugin name (as specified in the plugin descriptor) + * 2. Class name + * 3. Analyzers IDs + * 4. Tokenizers IDs + * 5. TokenFilters IDs + * 6. CharFilters IDs + * 7. Hunspell dictionary IDs + */ + private static final Comparator pluginComponentsComparator = Comparator.comparing( + AnalysisPluginComponents::getPluginName, + nullSafeStringComparator + ) + .thenComparing(AnalysisPluginComponents::getClassName, nullSafeStringComparator) + .thenComparing(c -> concatenateItems(c.getAnalyzersIds()), nullSafeStringComparator) + .thenComparing(c -> concatenateItems(c.getTokenizersIds()), nullSafeStringComparator) + .thenComparing(c -> concatenateItems(c.getTokenFiltersIds()), nullSafeStringComparator) + .thenComparing(c -> concatenateItems(c.getCharFiltersIds()), nullSafeStringComparator) + .thenComparing(c -> concatenateItems(c.getHunspellDictionaries()), nullSafeStringComparator); + + @Override + public int compareTo(NodeAnalysisComponents.AnalysisPluginComponents o) { + return pluginComponentsComparator.compare(this, o); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnalysisPluginComponents that = (AnalysisPluginComponents) o; + return Objects.equals(pluginName, that.pluginName) + && Objects.equals(className, that.className) + && Objects.equals(analyzersIds, that.analyzersIds) + && Objects.equals(tokenizersIds, that.tokenizersIds) + && Objects.equals(tokenFiltersIds, that.tokenFiltersIds) + && Objects.equals(charFiltersIds, that.charFiltersIds) + && Objects.equals(hunspellDictionaries, that.hunspellDictionaries); + } + + @Override + public int hashCode() { + return Objects.hash(pluginName, className, analyzersIds, tokenizersIds, tokenFiltersIds, charFiltersIds, hunspellDictionaries); + } + + public String getPluginName() { + return this.pluginName; + } + + public String getClassName() { + return this.className; + } + + public SortedSet getAnalyzersIds() { + return this.analyzersIds; + } + + public SortedSet getTokenizersIds() { + return this.tokenizersIds; + } + + public SortedSet getTokenFiltersIds() { + return this.tokenFiltersIds; + } + + public SortedSet getCharFiltersIds() { + return this.charFiltersIds; + } + + public SortedSet getHunspellDictionaries() { + return this.hunspellDictionaries; + } + } + + public NodeAnalysisComponents(StreamInput in) throws IOException { + this.analyzersIds = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString))); + this.tokenizersIds = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString))); + this.tokenFiltersIds = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString))); + this.charFiltersIds = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString))); + this.normalizersIds = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString))); + this.nodeAnalysisPlugins = unmodifiableList(in.readList(NodeAnalysisComponents.AnalysisPluginComponents::new)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeStringCollection(this.analyzersIds); + out.writeStringCollection(this.tokenizersIds); + out.writeStringCollection(this.tokenFiltersIds); + out.writeStringCollection(this.charFiltersIds); + out.writeStringCollection(this.normalizersIds); + out.writeList(this.nodeAnalysisPlugins); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject("analysis_components"); + builder.field("analyzers").value(this.getAnalyzersIds()); + builder.field("tokenizers").value(this.getTokenizersIds()); + builder.field("tokenFilters").value(this.getTokenFiltersIds()); + builder.field("charFilters").value(this.getCharFiltersIds()); + builder.field("normalizers").value(this.getNormalizersIds()); + builder.startArray("plugins"); + for (NodeAnalysisComponents.AnalysisPluginComponents pluginComponents : this.getNodeAnalysisPlugins()) { + builder.startObject(); + builder.field("name", pluginComponents.getPluginName()); + builder.field("classname", pluginComponents.getClassName()); + builder.field("analyzers").value(pluginComponents.getAnalyzersIds()); + builder.field("tokenizers").value(pluginComponents.getTokenizersIds()); + builder.field("tokenFilters").value(pluginComponents.getTokenFiltersIds()); + builder.field("charFilters").value(pluginComponents.getCharFiltersIds()); + builder.field("hunspellDictionaries").value(pluginComponents.getHunspellDictionaries()); + builder.endObject(); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + public int hashCode() { + return Objects.hash(analyzersIds, tokenizersIds, tokenFiltersIds, charFiltersIds, normalizersIds, nodeAnalysisPlugins); + } + + /** + * Ensures that a given set of strings is sorted in "natural" order. + * + * See: {@link SortedSet} + */ + private static SortedSet ensureSorted(Set stringSet) { + return stringSet == null ? Collections.emptySortedSet() : unmodifiableSortedSet(new TreeSet<>(stringSet)); + } + + private static List ensureNonEmpty( + List pluginComponents + ) { + return pluginComponents == null ? Collections.emptyList() : unmodifiableList(pluginComponents); + } +} diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java index 544fd1fb6aaf4..9db4cb8d62fe4 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodeInfo.java @@ -103,6 +103,9 @@ public NodeInfo(StreamInput in) throws IOException { if (in.getVersion().onOrAfter(Version.V_2_7_0)) { addInfoIfNonNull(SearchPipelineInfo.class, in.readOptionalWriteable(SearchPipelineInfo::new)); } + if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + addInfoIfNonNull(NodeAnalysisComponents.class, in.readOptionalWriteable(NodeAnalysisComponents::new)); + } } public NodeInfo( @@ -120,7 +123,8 @@ public NodeInfo( @Nullable IngestInfo ingest, @Nullable AggregationInfo aggsInfo, @Nullable ByteSizeValue totalIndexingBuffer, - @Nullable SearchPipelineInfo searchPipelineInfo + @Nullable SearchPipelineInfo searchPipelineInfo, + @Nullable NodeAnalysisComponents nodeAnalysisComponents ) { super(node); this.version = version; @@ -136,6 +140,7 @@ public NodeInfo( addInfoIfNonNull(IngestInfo.class, ingest); addInfoIfNonNull(AggregationInfo.class, aggsInfo); addInfoIfNonNull(SearchPipelineInfo.class, searchPipelineInfo); + addInfoIfNonNull(NodeAnalysisComponents.class, nodeAnalysisComponents); this.totalIndexingBuffer = totalIndexingBuffer; } @@ -227,6 +232,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(Version.V_2_7_0)) { out.writeOptionalWriteable(getInfo(SearchPipelineInfo.class)); } + if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + out.writeOptionalWriteable(getInfo(NodeAnalysisComponents.class)); + } } public static NodeInfo.Builder builder(Version version, Build build, DiscoveryNode node) { @@ -259,6 +267,7 @@ private Builder(Version version, Build build, DiscoveryNode node) { private AggregationInfo aggsInfo; private ByteSizeValue totalIndexingBuffer; private SearchPipelineInfo searchPipelineInfo; + private NodeAnalysisComponents nodeAnalysisComponents; public Builder setSettings(Settings settings) { this.settings = settings; @@ -320,6 +329,11 @@ public Builder setSearchPipelineInfo(SearchPipelineInfo searchPipelineInfo) { return this; } + public Builder setNodeAnalysisComponents(NodeAnalysisComponents nodeAnalysisComponents) { + this.nodeAnalysisComponents = nodeAnalysisComponents; + return this; + } + public NodeInfo build() { return new NodeInfo( version, @@ -336,7 +350,8 @@ public NodeInfo build() { ingest, aggsInfo, totalIndexingBuffer, - searchPipelineInfo + searchPipelineInfo, + nodeAnalysisComponents ); } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoRequest.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoRequest.java index e694a5e102e02..c6fbb4b5c09a2 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoRequest.java @@ -53,7 +53,7 @@ @PublicApi(since = "1.0.0") public class NodesInfoRequest extends BaseNodesRequest { - private Set requestedMetrics = Metric.allMetrics(); + private Set requestedMetrics = Metric.defaultMetrics(); /** * Create a new NodeInfoRequest from a {@link StreamInput} object. @@ -73,7 +73,7 @@ public NodesInfoRequest(StreamInput in) throws IOException { */ public NodesInfoRequest(String... nodesIds) { super(nodesIds); - all(); + defaultMetrics(); } /** @@ -92,6 +92,15 @@ public NodesInfoRequest all() { return this; } + /** + * Sets to return data for default metrics. + * See {@link Metric#defaultMetrics()}. + */ + public NodesInfoRequest defaultMetrics() { + requestedMetrics.addAll(Metric.defaultMetrics()); + return this; + } + /** * Get the names of requested metrics */ @@ -156,7 +165,7 @@ public void writeTo(StreamOutput out) throws IOException { /** * An enumeration of the "core" sections of metrics that may be requested - * from the nodes information endpoint. Eventually this list list will be + * from the nodes information endpoint. Eventually this list will be * pluggable. */ public enum Metric { @@ -171,7 +180,8 @@ public enum Metric { INGEST("ingest"), AGGREGATIONS("aggregations"), INDICES("indices"), - SEARCH_PIPELINES("search_pipelines"); + SEARCH_PIPELINES("search_pipelines"), + ANALYSIS_COMPONENTS("analysis_components"); private String metricName; @@ -187,8 +197,27 @@ boolean containedIn(Set metricNames) { return metricNames.contains(this.metricName()); } + /** + * Return all available metrics. + */ public static Set allMetrics() { return Arrays.stream(values()).map(Metric::metricName).collect(Collectors.toSet()); } + + /** + * Return "the default" set of metrics. + * Similar to {@link #allMetrics()} except {@link Metric#SEARCH_PIPELINES} and + * {@link Metric#ANALYSIS_COMPONENTS} metrics are not included. + *
+ * The motivation to define the default set of metrics was to keep the default response + * size at bay. Metrics that are NOT included in the default set were typically introduced later + * and are considered to contain specific type of information that is not usually useful unless you + * know that you really need it. + */ + public static Set defaultMetrics() { + return allMetrics().stream() + .filter(metric -> !(metric.equals(ANALYSIS_COMPONENTS.metricName()) || metric.equals(SEARCH_PIPELINES.metricName()))) + .collect(Collectors.toSet()); + } } } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java index 7ddd70185e8ad..fcb41597732ca 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/NodesInfoResponse.java @@ -152,6 +152,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (nodeInfo.getInfo(SearchPipelineInfo.class) != null) { nodeInfo.getInfo(SearchPipelineInfo.class).toXContent(builder, params); } + if (nodeInfo.getInfo(NodeAnalysisComponents.class) != null) { + nodeInfo.getInfo(NodeAnalysisComponents.class).toXContent(builder, params); + } builder.endObject(); } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/TransportNodesInfoAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/TransportNodesInfoAction.java index 2c4f8522a5a5c..23ba1877a7196 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/info/TransportNodesInfoAction.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/info/TransportNodesInfoAction.java @@ -118,7 +118,8 @@ protected NodeInfo nodeOperation(NodeInfoRequest nodeRequest) { metrics.contains(NodesInfoRequest.Metric.INGEST.metricName()), metrics.contains(NodesInfoRequest.Metric.AGGREGATIONS.metricName()), metrics.contains(NodesInfoRequest.Metric.INDICES.metricName()), - metrics.contains(NodesInfoRequest.Metric.SEARCH_PIPELINES.metricName()) + metrics.contains(NodesInfoRequest.Metric.SEARCH_PIPELINES.metricName()), + metrics.contains(NodesInfoRequest.Metric.ANALYSIS_COMPONENTS.metricName()) ); } diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 9c5dcc9e9de3f..ba90012edcd7a 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -145,7 +145,7 @@ protected ClusterStatsNodeResponse newNodeResponse(StreamInput in) throws IOExce @Override protected ClusterStatsNodeResponse nodeOperation(ClusterStatsNodeRequest nodeRequest) { - NodeInfo nodeInfo = nodeService.info(true, true, false, true, false, true, false, true, false, false, false, false); + NodeInfo nodeInfo = nodeService.info(true, true, false, true, false, true, false, true, false, false, false, false, false); NodeStats nodeStats = nodeService.stats( CommonStatsFlags.NONE, true, diff --git a/server/src/main/java/org/opensearch/action/search/PutSearchPipelineTransportAction.java b/server/src/main/java/org/opensearch/action/search/PutSearchPipelineTransportAction.java index a92961cdc3fd9..903b7dfce09c0 100644 --- a/server/src/main/java/org/opensearch/action/search/PutSearchPipelineTransportAction.java +++ b/server/src/main/java/org/opensearch/action/search/PutSearchPipelineTransportAction.java @@ -82,7 +82,7 @@ protected void clusterManagerOperation( ClusterState state, ActionListener listener ) throws Exception { - NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); + NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().clear().addMetric(NodesInfoRequest.Metric.SEARCH_PIPELINES.metricName()); client.admin().cluster().nodesInfo(nodesInfoRequest, ActionListener.wrap(nodeInfos -> { Map searchPipelineInfos = new HashMap<>(); for (NodeInfo nodeInfo : nodeInfos.getNodes()) { diff --git a/server/src/main/java/org/opensearch/index/analysis/AnalysisRegistry.java b/server/src/main/java/org/opensearch/index/analysis/AnalysisRegistry.java index 793cdcd5c5c1a..ff478cde11bca 100644 --- a/server/src/main/java/org/opensearch/index/analysis/AnalysisRegistry.java +++ b/server/src/main/java/org/opensearch/index/analysis/AnalysisRegistry.java @@ -54,9 +54,11 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.function.BiFunction; import java.util.function.Function; @@ -223,6 +225,46 @@ public Analyzer getAnalyzer(String analyzer) throws IOException { return analyzerProvider.get(environment, analyzer).get(); } + /** + * Returns registered keys of {@link Analyzer}s on this node. + */ + public Set getNodeAnalyzersKeys() { + // return defensive copy so that client can do anything to it without affecting the registry content + return new HashSet<>(this.analyzers.keySet()); + } + + /** + * Returns registered keys of {@link org.apache.lucene.analysis.Tokenizer}s on this node. + */ + public Set getNodeTokenizersKeys() { + // return defensive copy so that client can do anything to it without affecting the registry content + return new HashSet<>(this.tokenizers.keySet()); + } + + /** + * Returns registered keys of {@link org.apache.lucene.analysis.TokenFilter}s on this node. + */ + public Set getNodeTokenFiltersKeys() { + // return defensive copy so that client can do anything to it without affecting the registry content + return new HashSet<>(this.tokenFilters.keySet()); + } + + /** + * Returns registered keys of {@link org.apache.lucene.analysis.CharFilter}s on this node. + */ + public Set getNodeCharFiltersKeys() { + // return defensive copy so that client can do anything to it without affecting the registry content + return new HashSet<>(this.charFilters.keySet()); + } + + /** + * Returns registered keys of Token normalizers on this node. + */ + public Set getNodeNormalizersKeys() { + // return defensive copy so that client can do anything to it without affecting the registry content + return new HashSet<>(this.normalizers.keySet()); + } + @Override public void close() throws IOException { try { diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 8510122c39fcb..40b14fe54debe 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -1155,7 +1155,8 @@ protected Node( resourceUsageCollectorService, segmentReplicationStatsTracker, repositoryService, - admissionControlService + admissionControlService, + analysisModule.getAnalysisRegistry() ); final SearchService searchService = newSearchService( diff --git a/server/src/main/java/org/opensearch/node/NodeService.java b/server/src/main/java/org/opensearch/node/NodeService.java index 15cc8f3d20bb3..68920de1d7e18 100644 --- a/server/src/main/java/org/opensearch/node/NodeService.java +++ b/server/src/main/java/org/opensearch/node/NodeService.java @@ -34,6 +34,7 @@ import org.opensearch.Build; import org.opensearch.Version; +import org.opensearch.action.admin.cluster.node.info.NodeAnalysisComponents; import org.opensearch.action.admin.cluster.node.info.NodeInfo; import org.opensearch.action.admin.cluster.node.stats.NodeStats; import org.opensearch.action.admin.indices.stats.CommonStatsFlags; @@ -49,6 +50,7 @@ import org.opensearch.http.HttpServerTransport; import org.opensearch.index.IndexingPressureService; import org.opensearch.index.SegmentReplicationStatsTracker; +import org.opensearch.index.analysis.AnalysisRegistry; import org.opensearch.index.store.remote.filecache.FileCache; import org.opensearch.indices.IndicesService; import org.opensearch.ingest.IngestService; @@ -125,7 +127,8 @@ public class NodeService implements Closeable { ResourceUsageCollectorService resourceUsageCollectorService, SegmentReplicationStatsTracker segmentReplicationStatsTracker, RepositoriesService repositoriesService, - AdmissionControlService admissionControlService + AdmissionControlService admissionControlService, + AnalysisRegistry analysisRegistry ) { this.settings = settings; this.threadPool = threadPool; @@ -168,7 +171,8 @@ public NodeInfo info( boolean ingest, boolean aggs, boolean indices, - boolean searchPipeline + boolean searchPipeline, + boolean analyzers ) { NodeInfo.Builder builder = NodeInfo.builder(Version.CURRENT, Build.CURRENT, transportService.getLocalNode()); if (settings) { @@ -207,6 +211,9 @@ public NodeInfo info( if (searchPipeline && searchPipelineService != null) { builder.setSearchPipelineInfo(searchPipelineService.info()); } + if (analyzers && indicesService != null && pluginService != null) { + builder.setNodeAnalysisComponents(new NodeAnalysisComponents(indicesService.getAnalysis(), pluginService)); + } return builder.build(); } diff --git a/server/src/main/java/org/opensearch/plugins/PluginsService.java b/server/src/main/java/org/opensearch/plugins/PluginsService.java index 590b70d3bfc53..e90e55788555d 100644 --- a/server/src/main/java/org/opensearch/plugins/PluginsService.java +++ b/server/src/main/java/org/opensearch/plugins/PluginsService.java @@ -807,4 +807,18 @@ private String signatureMessage(final Class clazz) { public List filterPlugins(Class type) { return plugins.stream().filter(x -> type.isAssignableFrom(x.v2().getClass())).map(p -> ((T) p.v2())).collect(Collectors.toList()); } + + /** + * Return {@link PluginInfo}s for filtered plugins of given type. + * Unlike {@link #filterPlugins(Class)} which returns only instances of T this method returns also {@link PluginInfo} + * for each matching plugin. The PluginInfo class can provide a lot of details about the plugin that the generic + * class T does not provide. For instance {@link PluginInfo#getClassname()} or {@link PluginInfo#getName()}. + */ + public List> filterPluginsForPluginInfo(Class type) { + List> collect = plugins.stream() + .filter(x -> type.isAssignableFrom(x.v2().getClass())) + .map(tuple -> (Tuple) tuple) + .collect(Collectors.toList()); + return collect; + } } diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/node/info/NodeInfoTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/node/info/NodeInfoTests.java index cfd6fcec4bdc6..9209a16ad3564 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/node/info/NodeInfoTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/node/info/NodeInfoTests.java @@ -52,7 +52,7 @@ public class NodeInfoTests extends OpenSearchTestCase { /** - * Check that the the {@link NodeInfo#getInfo(Class)} method returns null + * Check that the {@link NodeInfo#getInfo(Class)} method returns null * for absent info objects, and returns the right thing for present info * objects. */ @@ -72,6 +72,7 @@ public void testGetInfo() { null, null, null, + null, null ); diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/node/info/NodesInfoRequestTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/node/info/NodesInfoRequestTests.java index 412b546e134b7..aa23bb6eef55a 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/node/info/NodesInfoRequestTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/node/info/NodesInfoRequestTests.java @@ -86,15 +86,18 @@ public void testRemoveSingleMetric() throws Exception { } /** - * Test that a newly constructed NodesInfoRequestObject requests all of the - * possible metrics defined in {@link NodesInfoRequest.Metric}. + * Test that a newly constructed NodesInfoRequestObject does not request all the + * possible metrics defined in {@link NodesInfoRequest.Metric} but only the default metrics + * according to {@link NodesInfoRequest.Metric#defaultMetrics()}. */ public void testNodesInfoRequestDefaults() { - NodesInfoRequest defaultNodesInfoRequest = new NodesInfoRequest(randomAlphaOfLength(8)); - NodesInfoRequest allMetricsNodesInfoRequest = new NodesInfoRequest(randomAlphaOfLength(8)); - allMetricsNodesInfoRequest.all(); + NodesInfoRequest requestOOTB = new NodesInfoRequest(randomAlphaOfLength(8)); + NodesInfoRequest requestAll = new NodesInfoRequest(randomAlphaOfLength(8)).all(); + NodesInfoRequest requestDefault = new NodesInfoRequest(randomAlphaOfLength(8)).defaultMetrics(); - assertThat(defaultNodesInfoRequest.requestedMetrics(), equalTo(allMetricsNodesInfoRequest.requestedMetrics())); + assertTrue(requestAll.requestedMetrics().size() > requestOOTB.requestedMetrics().size()); + assertTrue(requestDefault.requestedMetrics().size() == requestOOTB.requestedMetrics().size()); + assertThat(requestOOTB.requestedMetrics(), equalTo(requestDefault.requestedMetrics())); } /** @@ -107,6 +110,21 @@ public void testNodesInfoRequestAll() throws Exception { assertThat(request.requestedMetrics(), equalTo(NodesInfoRequest.Metric.allMetrics())); } + /** + * Test that the {@link NodesInfoRequest#defaultMetrics()} method enables default metrics. + */ + public void testNodesInfoRequestDefault() { + NodesInfoRequest request = new NodesInfoRequest("node"); + request.defaultMetrics(); + + assertEquals(11, request.requestedMetrics().size()); + assertThat(request.requestedMetrics(), equalTo(NodesInfoRequest.Metric.defaultMetrics())); + assertTrue(request.requestedMetrics().contains(NodesInfoRequest.Metric.JVM.metricName())); + assertTrue(request.requestedMetrics().contains(NodesInfoRequest.Metric.AGGREGATIONS.metricName())); + assertFalse(request.requestedMetrics().contains(NodesInfoRequest.Metric.SEARCH_PIPELINES.metricName())); + assertFalse(request.requestedMetrics().contains(NodesInfoRequest.Metric.ANALYSIS_COMPONENTS.metricName())); + } + /** * Test that the {@link NodesInfoRequest#clear()} method disables all metrics. */ diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodesTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodesTests.java index 40a30342b86b9..ec348bf4bca2f 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodesTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/stats/ClusterStatsNodesTests.java @@ -181,6 +181,7 @@ private static NodeInfo createNodeInfo(String nodeId, String transportType, Stri null, null, null, + null, null ); } diff --git a/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java b/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java index fba26b0c72e0e..e488377ad3659 100644 --- a/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java +++ b/server/src/test/java/org/opensearch/nodesinfo/NodeInfoStreamingTests.java @@ -33,6 +33,7 @@ package org.opensearch.nodesinfo; import org.opensearch.Build; +import org.opensearch.action.admin.cluster.node.info.NodeAnalysisComponents; import org.opensearch.action.admin.cluster.node.info.NodeInfo; import org.opensearch.action.admin.cluster.node.info.PluginsAndModules; import org.opensearch.cluster.node.DiscoveryNode; @@ -64,8 +65,11 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; @@ -85,6 +89,34 @@ public void testNodeInfoStreaming() throws IOException { } } + public void testNodeInfoPluginComponentsNaturalOrder() throws IOException { + NodeAnalysisComponents nodeAnalysisComponents = createNodeAnalyzers(); + assertOrdered(nodeAnalysisComponents.getAnalyzersIds()); + assertOrdered(nodeAnalysisComponents.getTokenizersIds()); + assertOrdered(nodeAnalysisComponents.getTokenFiltersIds()); + assertOrdered(nodeAnalysisComponents.getCharFiltersIds()); + assertOrdered(nodeAnalysisComponents.getNormalizersIds()); + + for (NodeAnalysisComponents.AnalysisPluginComponents nodeAnalysisPlugin : nodeAnalysisComponents.getNodeAnalysisPlugins()) { + assertOrdered(nodeAnalysisPlugin.getAnalyzersIds()); + assertOrdered(nodeAnalysisPlugin.getTokenizersIds()); + assertOrdered(nodeAnalysisPlugin.getTokenFiltersIds()); + assertOrdered(nodeAnalysisPlugin.getCharFiltersIds()); + assertOrdered(nodeAnalysisPlugin.getHunspellDictionaries()); + } + } + + private void assertOrdered(Set set) { + Iterator it = set.iterator(); + if (it.hasNext()) { + String prev = it.next(); + while (it.hasNext()) { + String curr = it.next(); + assertTrue("Elements not naturally ordered", prev.compareTo(curr) < 0); + } + } + } + // checks all properties that are expected to be unchanged. // Once we start changing them between versions this method has to be changed as well private void assertExpectedUnchanged(NodeInfo nodeInfo, NodeInfo readNodeInfo) throws IOException { @@ -101,6 +133,7 @@ private void assertExpectedUnchanged(NodeInfo nodeInfo, NodeInfo readNodeInfo) t compareJsonOutput(nodeInfo.getInfo(OsInfo.class), readNodeInfo.getInfo(OsInfo.class)); compareJsonOutput(nodeInfo.getInfo(PluginsAndModules.class), readNodeInfo.getInfo(PluginsAndModules.class)); compareJsonOutput(nodeInfo.getInfo(IngestInfo.class), readNodeInfo.getInfo(IngestInfo.class)); + compareJsonOutput(nodeInfo.getInfo(NodeAnalysisComponents.class), readNodeInfo.getInfo(NodeAnalysisComponents.class)); } private void compareJsonOutput(ToXContent param1, ToXContent param2) throws IOException { @@ -253,6 +286,11 @@ private static NodeInfo createNodeInfo() { searchPipelineInfo = new SearchPipelineInfo(Map.of(randomAlphaOfLengthBetween(3, 10), processors)); } + NodeAnalysisComponents nodeAnalysisComponents = null; + if (randomBoolean()) { + nodeAnalysisComponents = createNodeAnalyzers(); + } + return new NodeInfo( VersionUtils.randomVersion(random()), build, @@ -268,7 +306,93 @@ private static NodeInfo createNodeInfo() { ingestInfo, aggregationInfo, indexingBuffer, - searchPipelineInfo + searchPipelineInfo, + nodeAnalysisComponents + ); + } + + private static NodeAnalysisComponents createNodeAnalyzers() { + List nodeAnalysisPlugins = generateAnalysisPlugins(randomInt(5)); + + return new NodeAnalysisComponents( + generateCodes(), + generateCodes(), + generateCodes(), + generateCodes(), + generateCodes(), + nodeAnalysisPlugins + ); + } + + private static List generateAnalysisPlugins(int numberOfPlugins) { + assert numberOfPlugins > -1; + List plugins = new ArrayList<>(); + for (int i = 0; i < numberOfPlugins; i++) { + NodeAnalysisComponents.AnalysisPluginComponents plugin = new NodeAnalysisComponents.AnalysisPluginComponents( + generateRandomStringArray(1, 10, false, false)[0], // plugin name + generateRandomStringArray(1, 10, false, false)[0], // plugin classname + generateCodes(), + generateCodes(), + generateCodes(), + generateCodes(), + generateCodes() + ); + plugins.add(plugin); + } + return plugins; + } + + private static Set generateCodes() { + return randomUnique(CODES_SUPPLIER, NodeInfoStreamingTests.StringSetSupplier.RECOMMENDED_SIZE); + } + + private static NodeInfoStreamingTests.StringSetSupplier CODES_SUPPLIER = new NodeInfoStreamingTests.StringSetSupplier(); + + private static class StringSetSupplier implements Supplier { + + private static List CODES = List.of( + "aaa1", + "bbb1", + "ccc1", + "ddd1", + "eee1", + "fff1", + "ggg1", + "hhh1", + "iii1", + "jjj1", + "aaa2", + "bbb2", + "ccc2", + "ddd2", + "eee2", + "fff2", + "ggg2", + "hhh2", + "iii2", + "jjj2", + "aaa3", + "bbb3", + "ccc3", + "ddd3", + "eee3", + "fff3", + "ggg3", + "hhh3", + "iii3", + "jjj3" ); + + /** + * This supplier is used to generate UNIQUE tokens (see {@link #generateCodes()}) + * thus we return smaller size in order to increase the chance of yielding more unique tokens. + * As a result, this supplier will produce set of 10 or less unique tokens. + */ + private static int RECOMMENDED_SIZE = CODES.size() / 3; + + @Override + public String get() { + return CODES.get(randomInt(CODES.size() - 1)); + } } } diff --git a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java index db276678ba4dd..e505d8fa87132 100644 --- a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java @@ -154,6 +154,23 @@ public void testFilterPlugins() { assertEquals(FilterablePlugin.class, scriptPlugins.get(0).getClass()); } + public void testPluginInfoAccess() { + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); + PluginsService service = newPluginsService(settings, AdditionalSettingsPlugin1.class, FilterablePlugin.class); + + List> pluginInfos1 = service.filterPluginsForPluginInfo( + AdditionalSettingsPlugin1.class + ); + assertEquals(1, pluginInfos1.size()); + assertEquals("org.opensearch.plugins.PluginsServiceTests$AdditionalSettingsPlugin1", pluginInfos1.get(0).v1().getName()); + assertEquals("org.opensearch.plugins.PluginsServiceTests$AdditionalSettingsPlugin1", pluginInfos1.get(0).v1().getClassname()); + + List> pluginInfos2 = service.filterPluginsForPluginInfo(FilterablePlugin.class); + assertEquals(1, pluginInfos2.size()); + assertEquals("org.opensearch.plugins.PluginsServiceTests$FilterablePlugin", pluginInfos2.get(0).v1().getName()); + assertEquals("org.opensearch.plugins.PluginsServiceTests$FilterablePlugin", pluginInfos2.get(0).v1().getClassname()); + } + public void testHiddenDirectories() throws IOException { final Path home = createTempDir(); final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build();