Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into global-info-refactor
Browse files Browse the repository at this point in the history
# Conflicts:
#	build.gradle
  • Loading branch information
mark-vieira committed May 21, 2019
2 parents 9664bf5 + aac7473 commit 3907284
Show file tree
Hide file tree
Showing 26 changed files with 646 additions and 86 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,22 @@ class BuildPlugin implements Plugin<Project> {
configureTestTasks(project)
configurePrecommit(project)
configureDependenciesInfo(project)

// Common config when running with a FIPS-140 runtime JVM
// Need to do it here to support external plugins
if (project.ext.inFipsJvm) {
project.tasks.withType(Test) {
systemProperty 'javax.net.ssl.trustStorePassword', 'password'
systemProperty 'javax.net.ssl.keyStorePassword', 'password'
}
project.pluginManager.withPlugin("elasticsearch.testclusters") {
project.testClusters.all {
systemProperty 'javax.net.ssl.trustStorePassword', 'password'
systemProperty 'javax.net.ssl.keyStorePassword', 'password'
}
}
}

}

static void requireDocker(final Task task) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,44 +21,35 @@

import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
import org.gradle.testkit.runner.BuildResult;
import org.gradle.testkit.runner.GradleRunner;


public class ExportElasticsearchBuildResourcesTaskIT extends GradleIntegrationTestCase {

public static final String PROJECT_NAME = "elasticsearch-build-resources";

public void testUpToDateWithSourcesConfigured() {
GradleRunner.create()
.withProjectDir(getProjectDir(PROJECT_NAME))
getGradleRunner(PROJECT_NAME)
.withArguments("clean", "-s")
.withPluginClasspath()
.build();

BuildResult result = GradleRunner.create()
.withProjectDir(getProjectDir(PROJECT_NAME))
BuildResult result = getGradleRunner(PROJECT_NAME)
.withArguments("buildResources", "-s", "-i")
.withPluginClasspath()
.build();
assertTaskSuccessful(result, ":buildResources");
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml");
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle_suppressions.xml");

result = GradleRunner.create()
.withProjectDir(getProjectDir(PROJECT_NAME))
result = getGradleRunner(PROJECT_NAME)
.withArguments("buildResources", "-s", "-i")
.withPluginClasspath()
.build();
assertTaskUpToDate(result, ":buildResources");
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml");
assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle_suppressions.xml");
}

public void testImplicitTaskDependencyCopy() {
BuildResult result = GradleRunner.create()
.withProjectDir(getProjectDir(PROJECT_NAME))
BuildResult result = getGradleRunner(PROJECT_NAME)
.withArguments("clean", "sampleCopyAll", "-s", "-i")
.withPluginClasspath()
.build();

assertTaskSuccessful(result, ":buildResources");
Expand All @@ -69,10 +60,8 @@ public void testImplicitTaskDependencyCopy() {
}

public void testImplicitTaskDependencyInputFileOfOther() {
BuildResult result = GradleRunner.create()
.withProjectDir(getProjectDir(PROJECT_NAME))
BuildResult result = getGradleRunner(PROJECT_NAME)
.withArguments("clean", "sample", "-s", "-i")
.withPluginClasspath()
.build();

assertTaskSuccessful(result, ":sample");
Expand All @@ -81,11 +70,12 @@ public void testImplicitTaskDependencyInputFileOfOther() {
}

public void testIncorrectUsage() {
BuildResult result = GradleRunner.create()
.withProjectDir(getProjectDir(PROJECT_NAME))
.withArguments("noConfigAfterExecution", "-s", "-i")
.withPluginClasspath()
.buildAndFail();
assertOutputContains("buildResources can't be configured after the task ran");
assertOutputContains(
getGradleRunner(PROJECT_NAME)
.withArguments("noConfigAfterExecution", "-s", "-i")
.buildAndFail()
.getOutput(),
"buildResources can't be configured after the task ran"
);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
import org.gradle.testkit.runner.BuildResult;
import org.gradle.testkit.runner.GradleRunner;

/*
* Licensed to Elasticsearch under one or more contributor
Expand All @@ -25,10 +24,8 @@
public class JarHellTaskIT extends GradleIntegrationTestCase {

public void testJarHellDetected() {
BuildResult result = GradleRunner.create()
.withProjectDir(getProjectDir("jarHell"))
BuildResult result = getGradleRunner("jarHell")
.withArguments("clean", "precommit", "-s", "-Dlocal.repo.path=" + getLocalTestRepoPath())
.withPluginClasspath()
.buildAndFail();

assertTaskFailed(result, ":jarHell");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,12 @@
import org.gradle.testkit.runner.BuildTask;
import org.gradle.testkit.runner.GradleRunner;
import org.gradle.testkit.runner.TaskOutcome;
import org.junit.Rule;
import org.junit.rules.TemporaryFolder;

import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
Expand All @@ -16,6 +20,9 @@

public abstract class GradleIntegrationTestCase extends GradleUnitTestCase {

@Rule
public TemporaryFolder testkitTmpDir = new TemporaryFolder();

protected File getProjectDir(String name) {
File root = new File("src/testKit/");
if (root.exists() == false) {
Expand All @@ -26,9 +33,16 @@ protected File getProjectDir(String name) {
}

protected GradleRunner getGradleRunner(String sampleProject) {
File testkit;
try {
testkit = testkitTmpDir.newFolder();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
return GradleRunner.create()
.withProjectDir(getProjectDir(sampleProject))
.withPluginClasspath();
.withPluginClasspath()
.withTestKitDir(testkit);
}

protected File getBuildDir(String name) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,21 @@
import org.elasticsearch.gradle.test.GradleIntegrationTestCase;
import org.gradle.testkit.runner.BuildResult;
import org.gradle.testkit.runner.GradleRunner;
import org.junit.Before;
import org.junit.Ignore;


import java.util.Arrays;

public class TestClustersPluginIT extends GradleIntegrationTestCase {

private GradleRunner runner;

@Before
public void setUp() throws Exception {
runner = getGradleRunner("testclusters");
}

public void testListClusters() {
BuildResult result = getTestClustersRunner("listTestClusters").build();

Expand Down Expand Up @@ -190,10 +199,7 @@ private GradleRunner getTestClustersRunner(String... tasks) {
arguments[tasks.length] = "-s";
arguments[tasks.length + 1] = "-i";
arguments[tasks.length + 2] = "-Dlocal.repo.path=" + getLocalTestRepoPath();
return GradleRunner.create()
.withProjectDir(getProjectDir("testclusters"))
.withArguments(arguments)
.withPluginClasspath();
return runner.withArguments(arguments);
}

private void assertStartedAndStoppedOnce(BuildResult result, String nodeName) {
Expand Down
2 changes: 1 addition & 1 deletion docs/reference/cat/thread_pool.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ ml_autodetect (default distro only)
ml_datafeed (default distro only)
ml_utility (default distro only)
refresh
rollup_indexing (default distro only)`
rollup_indexing (default distro only)
search
security-token-key (default distro only)
snapshot
Expand Down
63 changes: 60 additions & 3 deletions docs/reference/commands/node-tool.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -4,22 +4,23 @@
The `elasticsearch-node` command enables you to perform certain unsafe
operations on a node that are only possible while it is shut down. This command
allows you to adjust the <<modules-node,role>> of a node and may be able to
recover some data after a disaster.
recover some data after a disaster or start a node even if it is incompatible
with the data on disk.

[float]
=== Synopsis

[source,shell]
--------------------------------------------------
bin/elasticsearch-node repurpose|unsafe-bootstrap|detach-cluster
bin/elasticsearch-node repurpose|unsafe-bootstrap|detach-cluster|override-version
[--ordinal <Integer>] [-E <KeyValuePair>]
[-h, --help] ([-s, --silent] | [-v, --verbose])
--------------------------------------------------

[float]
=== Description

This tool has three modes:
This tool has four modes:

* `elasticsearch-node repurpose` can be used to delete unwanted data from a
node if it used to be a <<data-node,data node>> or a
Expand All @@ -36,6 +37,11 @@ This tool has three modes:
cluster bootstrapping was not possible, it also enables you to move nodes
into a brand-new cluster.

* `elasticsearch-node override-version` enables you to start up a node
even if the data in the data path was written by an incompatible version of
{es}. This may sometimes allow you to downgrade to an earlier version of
{es}.

[[node-tool-repurpose]]
[float]
==== Changing the role of a node
Expand Down Expand Up @@ -109,6 +115,25 @@ way forward that does not risk data loss, but it may be possible to use the
`elasticsearch-node` tool to construct a new cluster that contains some of the
data from the failed cluster.

[[node-tool-override-version]]
[float]
==== Bypassing version checks

The data that {es} writes to disk is designed to be read by the current version
and a limited set of future versions. It cannot generally be read by older
versions, nor by versions that are more than one major version newer. The data
stored on disk includes the version of the node that wrote it, and {es} checks
that it is compatible with this version when starting up.

In rare circumstances it may be desirable to bypass this check and start up an
{es} node using data that was written by an incompatible version. This may not
work if the format of the stored data has changed, and it is a risky process
because it is possible for the format to change in ways that {es} may
misinterpret, silently leading to data loss.

To bypass this check, you can use the `elasticsearch-node override-version`
tool to overwrite the version number stored in the data path with the current
version, causing {es} to believe that it is compatible with the on-disk data.

[[node-tool-unsafe-bootstrap]]
[float]
Expand Down Expand Up @@ -262,6 +287,9 @@ one-node cluster.
`detach-cluster`:: Specifies to unsafely detach this node from its cluster so
it can join a different cluster.

`override-version`:: Overwrites the version number stored in the data path so
that a node can start despite being incompatible with the on-disk data.

`--ordinal <Integer>`:: If there is <<max-local-storage-nodes,more than one
node sharing a data path>> then this specifies which node to target. Defaults
to `0`, meaning to use the first node in the data path.
Expand Down Expand Up @@ -423,3 +451,32 @@ Do you want to proceed?
Confirm [y/N] y
Node was successfully detached from the cluster
----

[float]
==== Bypassing version checks

Run the `elasticsearch-node override-version` command to overwrite the version
stored in the data path so that a node can start despite being incompatible
with the data stored in the data path:

[source, txt]
----
node$ ./bin/elasticsearch-node override-version
WARNING: Elasticsearch MUST be stopped before running this tool.
This data path was last written by Elasticsearch version [x.x.x] and may no
longer be compatible with Elasticsearch version [y.y.y]. This tool will bypass
this compatibility check, allowing a version [y.y.y] node to start on this data
path, but a version [y.y.y] node may not be able to read this data or may read
it incorrectly leading to data loss.
You should not use this tool. Instead, continue to use a version [x.x.x] node
on this data path. If necessary, you can use reindex-from-remote to copy the
data from here into an older cluster.
Do you want to proceed?
Confirm [y/N] y
Successfully overwrote this node's metadata to bypass its version compatibility checks.
----
8 changes: 8 additions & 0 deletions docs/reference/query-dsl/script-score-query.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -78,10 +78,18 @@ to be the most efficient by using the internal mechanisms.

[[vector-functions]]
===== Functions for vector fields

experimental[]

These functions are used for
for <<dense-vector,`dense_vector`>> and
<<sparse-vector,`sparse_vector`>> fields.

NOTE: During vector functions' calculation, all matched documents are
linearly scanned. Thus, expect the query time grow linearly
with the number of matched documents. For this reason, we recommend
to limit the number of matched documents with a `query` parameter.

For dense_vector fields, `cosineSimilarity` calculates the measure of
cosine similarity between a given query vector and document vectors.

Expand Down
2 changes: 1 addition & 1 deletion docs/reference/search/rank-eval.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ GET /my_index/_rank_eval
],
"requests": [
{
"id": "amsterdam_query"
"id": "amsterdam_query",
"ratings": [ ... ],
"template_id": "match_one_field_query", <3>
"params": { <4>
Expand Down
16 changes: 8 additions & 8 deletions server/src/main/java/org/elasticsearch/Version.java
Original file line number Diff line number Diff line change
Expand Up @@ -94,18 +94,18 @@ public class Version implements Comparable<Version>, ToXContentFragment {
public static final Version V_6_7_1 = new Version(V_6_7_1_ID, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final int V_6_7_2_ID = 6070299;
public static final Version V_6_7_2 = new Version(V_6_7_2_ID, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final int V_6_7_3_ID = 6070399;
public static final Version V_6_7_3 = new Version(V_6_7_3_ID, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final int V_6_8_0_ID = 6080099;
public static final Version V_6_8_0 = new Version(V_6_8_0_ID, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final int V_6_8_1_ID = 6080199;
public static final Version V_6_8_1 = new Version(V_6_8_1_ID, org.apache.lucene.util.Version.LUCENE_7_7_0);
public static final int V_7_0_0_ID = 7000099;
public static final Version V_7_0_0 = new Version(V_7_0_0_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final int V_7_0_1_ID = 7000199;
public static final Version V_7_0_1 = new Version(V_7_0_1_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final int V_7_0_2_ID = 7000299;
public static final Version V_7_0_2 = new Version(V_7_0_2_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final int V_7_1_0_ID = 7010099;
public static final Version V_7_1_0 = new Version(V_7_1_0_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final int V_7_1_1_ID = 7010199;
public static final Version V_7_1_1 = new Version(V_7_1_1_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final int V_7_2_0_ID = 7020099;
public static final Version V_7_2_0 = new Version(V_7_2_0_ID, org.apache.lucene.util.Version.LUCENE_8_0_0);
public static final int V_8_0_0_ID = 8000099;
Expand All @@ -128,18 +128,18 @@ public static Version fromId(int id) {
return V_8_0_0;
case V_7_2_0_ID:
return V_7_2_0;
case V_7_1_1_ID:
return V_7_1_1;
case V_7_1_0_ID:
return V_7_1_0;
case V_7_0_2_ID:
return V_7_0_2;
case V_7_0_1_ID:
return V_7_0_1;
case V_7_0_0_ID:
return V_7_0_0;
case V_6_8_1_ID:
return V_6_8_1;
case V_6_8_0_ID:
return V_6_8_0;
case V_6_7_3_ID:
return V_6_7_3;
case V_6_7_1_ID:
return V_6_7_1;
case V_6_7_2_ID:
Expand Down
Loading

0 comments on commit 3907284

Please sign in to comment.