Skip to content

Commit

Permalink
Node Analyzers REST API
Browse files Browse the repository at this point in the history
A first implementation proposal of a new REST API called "Nodes Analyzers".

There are some changes/extensions to original "core" API to avoid use of Java reflections.

I still need to add a few more tests. Especially tests that will set up a few artificial cluster nodes with controlled set of plugins and then verifying that the response contains all that is required (including Hunspell dictionaries).

Original proposal that was implemented as a plugin included some interesting REST API tests (see https://github.com/lukas-vlcek/OpenSearch-list-built-in-analyzers/). I think we will need to reimplement these differently.

Closes #5481

Signed-off-by: Lukáš <[email protected]>
  • Loading branch information
lukas-vlcek committed Oct 2, 2023
1 parent 797def6 commit d11d0d0
Show file tree
Hide file tree
Showing 16 changed files with 943 additions and 0 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Implement on behalf of token passing for extensions ([#8679](https://github.com/opensearch-project/OpenSearch/pull/8679))
- Implement Visitor Design pattern in QueryBuilder to enable the capability to traverse through the complex QueryBuilder tree. ([#10110](https://github.com/opensearch-project/OpenSearch/pull/10110))
- Provide service accounts tokens to extensions ([#9618](https://github.com/opensearch-project/OpenSearch/pull/9618))
- Introduce new REST API Node Analyzers ([#10296](https://github.com/opensearch-project/OpenSearch/pull/10296))

### Dependencies
- Bump `log4j-core` from 2.18.0 to 2.19.0
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
{
"nodes.analyzers":{
"documentation":{
"url":"https://opensearch.org/docs/latest/api-reference/nodes-apis/nodes-analyzers/",
"description":"Returns information about analyzer components on the node."
},
"stability":"experimental",
"url":{
"paths":[
{
"path":"/_nodes/analyzers",
"methods":[
"GET"
]
},
{
"path":"/_nodes/{node_id}/analyzers",
"methods":[
"GET"
],
"parts":{
"node_id":{
"type":"list",
"description":"A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes"
}
}
}
]
},
"params":{
"flat_settings":{
"type":"boolean",
"description":"Return settings in flat format (default: false)"
},
"timeout":{
"type":"time",
"description":"Explicit operation timeout"
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
setup:
- skip:
features: [arbitrary_key]
---
"nodes_analyzers test":
- do:
nodes.analyzers: {}

# Note: It will only stash the first node_id in the api response.
- set:
nodes._arbitrary_key_: node_id

- is_true: nodes
- is_true: cluster_name

- is_true: nodes.$node_id.analyzers
- is_true: nodes.$node_id.tokenizers
- is_true: nodes.$node_id.tokenFilters
- is_true: nodes.$node_id.charFilters
- is_true: nodes.$node_id.normalizers
- is_true: nodes.$node_id.plugins
5 changes: 5 additions & 0 deletions server/src/main/java/org/opensearch/action/ActionModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,8 @@
import org.opensearch.action.admin.cluster.decommission.awareness.put.TransportDecommissionAction;
import org.opensearch.action.admin.cluster.health.ClusterHealthAction;
import org.opensearch.action.admin.cluster.health.TransportClusterHealthAction;
import org.opensearch.action.admin.cluster.node.analyzers.NodesAnalyzersAction;
import org.opensearch.action.admin.cluster.node.analyzers.TransportNodesAnalyzersAction;
import org.opensearch.action.admin.cluster.node.hotthreads.NodesHotThreadsAction;
import org.opensearch.action.admin.cluster.node.hotthreads.TransportNodesHotThreadsAction;
import org.opensearch.action.admin.cluster.node.info.NodesInfoAction;
Expand Down Expand Up @@ -346,6 +348,7 @@
import org.opensearch.rest.action.admin.cluster.RestGetStoredScriptAction;
import org.opensearch.rest.action.admin.cluster.RestGetTaskAction;
import org.opensearch.rest.action.admin.cluster.RestListTasksAction;
import org.opensearch.rest.action.admin.cluster.RestNodesAnalyzersAction;
import org.opensearch.rest.action.admin.cluster.RestNodesHotThreadsAction;
import org.opensearch.rest.action.admin.cluster.RestNodesInfoAction;
import org.opensearch.rest.action.admin.cluster.RestNodesStatsAction;
Expand Down Expand Up @@ -598,6 +601,7 @@ public <Request extends ActionRequest, Response extends ActionResponse> void reg
actions.register(RemoteStoreStatsAction.INSTANCE, TransportRemoteStoreStatsAction.class);
actions.register(NodesUsageAction.INSTANCE, TransportNodesUsageAction.class);
actions.register(NodesHotThreadsAction.INSTANCE, TransportNodesHotThreadsAction.class);
actions.register(NodesAnalyzersAction.INSTANCE, TransportNodesAnalyzersAction.class);
actions.register(ListTasksAction.INSTANCE, TransportListTasksAction.class);
actions.register(GetTaskAction.INSTANCE, TransportGetTaskAction.class);
actions.register(CancelTasksAction.INSTANCE, TransportCancelTasksAction.class);
Expand Down Expand Up @@ -787,6 +791,7 @@ public void initRestHandlers(Supplier<DiscoveryNodes> nodesInCluster) {
registerHandler.accept(new RestNodesStatsAction());
registerHandler.accept(new RestNodesUsageAction());
registerHandler.accept(new RestNodesHotThreadsAction());
registerHandler.accept(new RestNodesAnalyzersAction());
registerHandler.accept(new RestClusterAllocationExplainAction());
registerHandler.accept(new RestClusterStatsAction());
registerHandler.accept(new RestClusterStateAction(settingsFilter));
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,198 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.action.admin.cluster.node.analyzers;

import org.opensearch.action.support.nodes.BaseNodeResponse;
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.common.Nullable;
import org.opensearch.core.common.io.stream.StreamInput;
import org.opensearch.core.common.io.stream.StreamOutput;

import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;

import static java.util.Collections.unmodifiableMap;
import static java.util.Collections.unmodifiableSortedSet;

/**
* Node analyzers per Node.
*
* @opensearch.internal
*/
public class NodeAnalyzersInfo extends BaseNodeResponse {

@Nullable
private final SortedSet<String> analyzersKeySet;

@Nullable
private final SortedSet<String> tokenizersKeySet;

@Nullable
private final SortedSet<String> tokenFiltersKeySet;

@Nullable
private final SortedSet<String> charFiltersKeySet;

@Nullable
private final SortedSet<String> normalizersKeySet;

@Nullable
private final Map<String, AnalysisPluginComponents> nodeAnalysisPlugins;

public static class AnalysisPluginComponents implements Comparable<AnalysisPluginComponents> {

private final String pluginName;
private final SortedSet<String> analyzersKeySet;
private final SortedSet<String> tokenizersKeySet;
private final SortedSet<String> tokenFiltersKeySet;
private final SortedSet<String> charFiltersKeySet;
private final SortedSet<String> hunspellDictionaries;

public AnalysisPluginComponents(
final String pluginName,
final Set<String> analyzersKeySet,
final Set<String> tokenizersKeySet,
final Set<String> tokenFiltersKeySet,
final Set<String> charFiltersKeySet,
final Set<String> hunspellDictionaries
) {
this.pluginName = pluginName;
this.analyzersKeySet = unmodifiableSortedSet(new TreeSet<>(analyzersKeySet));
this.tokenizersKeySet = unmodifiableSortedSet(new TreeSet<>(tokenizersKeySet));
this.tokenFiltersKeySet = unmodifiableSortedSet(new TreeSet<>(tokenFiltersKeySet));
this.charFiltersKeySet = unmodifiableSortedSet(new TreeSet<>(charFiltersKeySet));
this.hunspellDictionaries = unmodifiableSortedSet(new TreeSet<>(hunspellDictionaries));
}

public AnalysisPluginComponents(StreamInput in) throws IOException {
this.pluginName = in.readString();
this.analyzersKeySet = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString)));
this.tokenizersKeySet = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString)));
this.tokenFiltersKeySet = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString)));
this.charFiltersKeySet = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString)));
this.hunspellDictionaries = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString)));
}

public void writeTo(StreamOutput out) throws IOException {
out.writeString(this.pluginName);
out.writeStringCollection(this.analyzersKeySet);
out.writeStringCollection(this.tokenizersKeySet);
out.writeStringCollection(this.tokenFiltersKeySet);
out.writeStringCollection(this.charFiltersKeySet);
out.writeStringCollection(this.hunspellDictionaries);
}

public String getPluginName() {
return pluginName;
}

public Set<String> getAnalyzersKeySet() {
return analyzersKeySet;
}

public Set<String> getTokenizersKeySet() {
return tokenizersKeySet;
}

public Set<String> getTokenFiltersKeySet() {
return tokenFiltersKeySet;
}

public Set<String> getCharFiltersKeySet() {
return charFiltersKeySet;
}

public Set<String> getHunspellDictionaries() {
return hunspellDictionaries;
}

@Override
public int compareTo(AnalysisPluginComponents o) {
// TODO: Do we need better comparison?
return this.pluginName.compareTo(o.pluginName);
}
}

protected NodeAnalyzersInfo(StreamInput in) throws IOException {
super(in);
this.analyzersKeySet = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString)));
this.tokenizersKeySet = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString)));
this.tokenFiltersKeySet = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString)));
this.charFiltersKeySet = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString)));
this.normalizersKeySet = unmodifiableSortedSet(new TreeSet<>(in.readSet(StreamInput::readString)));
this.nodeAnalysisPlugins = unmodifiableMap(in.readMap(StreamInput::readString, AnalysisPluginComponents::new));
}

public NodeAnalyzersInfo(
final DiscoveryNode node,
final Set<String> analyzersKeySet,
final Set<String> tokenizersKeySet,
final Set<String> tokenFiltersKeySet,
final Set<String> charFiltersKeySet,
final Set<String> normalizersKeySet,
final Map<String, AnalysisPluginComponents> nodeAnalysisPlugins
) {
super(node);
this.analyzersKeySet = analyzersKeySet == null
? Collections.emptySortedSet()
: unmodifiableSortedSet(new TreeSet<>(analyzersKeySet));
this.tokenizersKeySet = tokenizersKeySet == null
? Collections.emptySortedSet()
: unmodifiableSortedSet(new TreeSet<>(tokenizersKeySet));
this.tokenFiltersKeySet = tokenFiltersKeySet == null
? Collections.emptySortedSet()
: unmodifiableSortedSet(new TreeSet<>(tokenFiltersKeySet));
this.charFiltersKeySet = charFiltersKeySet == null
? Collections.emptySortedSet()
: unmodifiableSortedSet(new TreeSet<>(charFiltersKeySet));
this.normalizersKeySet = normalizersKeySet == null
? Collections.emptySortedSet()
: unmodifiableSortedSet(new TreeSet<>(normalizersKeySet));
this.nodeAnalysisPlugins = nodeAnalysisPlugins == null ? Collections.emptyMap() : unmodifiableMap(nodeAnalysisPlugins);
}

public Set<String> getAnalyzersKeySet() {
return this.analyzersKeySet;
}

public Set<String> getTokenizersKeySet() {
return this.tokenizersKeySet;
}

public Set<String> getTokenFiltersKeySet() {
return this.tokenFiltersKeySet;
}

public Set<String> getCharFiltersKeySet() {
return this.charFiltersKeySet;
}

public Set<String> getNormalizersKeySet() {
return this.normalizersKeySet;
}

public Map<String, AnalysisPluginComponents> getNodeAnalysisPlugins() {
return nodeAnalysisPlugins;
}

@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringCollection(this.analyzersKeySet);
out.writeStringCollection(this.tokenizersKeySet);
out.writeStringCollection(this.tokenFiltersKeySet);
out.writeStringCollection(this.charFiltersKeySet);
out.writeStringCollection(this.normalizersKeySet);
out.writeMap(this.nodeAnalysisPlugins, StreamOutput::writeString, (o, s) -> s.writeTo(o));
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.action.admin.cluster.node.analyzers;

import org.opensearch.action.ActionType;

/**
* @opensearch.internal
*/
public class NodesAnalyzersAction extends ActionType<NodesAnalyzersResponse> {

public static final NodesAnalyzersAction INSTANCE = new NodesAnalyzersAction();

public static final String NAME = "cluster:monitor/node/analyzers";

private NodesAnalyzersAction() {
super(NAME, NodesAnalyzersResponse::new);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.action.admin.cluster.node.analyzers;

import org.opensearch.action.support.nodes.BaseNodesRequest;
import org.opensearch.core.common.io.stream.StreamInput;

import java.io.IOException;

/**
* @opensearch.internal
*/
public class NodesAnalyzersRequest extends BaseNodesRequest<NodesAnalyzersRequest> {

public NodesAnalyzersRequest(String... nodesIds) {
super(nodesIds);
}

public NodesAnalyzersRequest(StreamInput in) throws IOException {
super(in);
}
}
Loading

0 comments on commit d11d0d0

Please sign in to comment.