diff --git a/.github/workflows/sql-cli-release-workflow.yml b/.github/workflows/sql-cli-release-workflow.yml
index 78b843e1a7..dd7c13d0cd 100644
--- a/.github/workflows/sql-cli-release-workflow.yml
+++ b/.github/workflows/sql-cli-release-workflow.yml
@@ -42,9 +42,11 @@ jobs:
python setup.py sdist bdist_wheel
artifact=`ls ./dist/*.tar.gz`
wheel_artifact=`ls ./dist/*.whl`
+ renamed_wheel_artifact=`echo $wheel_artifact | sed 's/_/-/g'`
+ mv "$wheel_artifact" "$renamed_wheel_artifact"
aws s3 cp $artifact s3://artifacts.opendistroforelasticsearch.amazon.com/downloads/elasticsearch-clients/opendistro-sql-cli/
- aws s3 cp $wheel_artifact s3://artifacts.opendistroforelasticsearch.amazon.com/downloads/elasticsearch-clients/opendistro-sql-cli/
+ aws s3 cp $renamed_wheel_artifact s3://artifacts.opendistroforelasticsearch.amazon.com/downloads/elasticsearch-clients/opendistro-sql-cli/
# aws cloudfront create-invalidation --distribution-id ${{ secrets.DISTRIBUTION_ID }} --paths "/downloads/*"
diff --git a/.github/workflows/sql-odbc-release-workflow.yml b/.github/workflows/sql-odbc-release-workflow.yml
index 9ee8661569..a1ab4f8358 100644
--- a/.github/workflows/sql-odbc-release-workflow.yml
+++ b/.github/workflows/sql-odbc-release-workflow.yml
@@ -11,6 +11,8 @@ env:
ODBC_BIN_PATH: "./build/odbc/bin"
ODBC_BUILD_PATH: "./build/odbc/build"
AWS_SDK_INSTALL_PATH: "./build/aws-sdk/install"
+ PLUGIN_NAME: opendistro-sql-odbc
+ OD_VERSION: 1.12.0.0
jobs:
build-mac:
@@ -80,6 +82,8 @@ jobs:
run: |
cd installer
mac_installer=`ls -1t *.pkg | grep "Open Distro for Elasticsearch SQL ODBC Driver" | head -1`
+ mv "$mac_installer" "${{ env.PLUGIN_NAME }}-${{ env.OD_VERSION }}-macos-x64.pkg"
+ mac_installer=`ls -1t *.pkg | grep "${{ env.PLUGIN_NAME }}-${{ env.OD_VERSION }}-macos-x64.pkg" | head -1`
echo $mac_installer
aws s3 cp "$mac_installer" s3://artifacts.opendistroforelasticsearch.amazon.com/downloads/elasticsearch-clients/opendistro-sql-odbc/mac/
build-windows32:
@@ -128,6 +132,8 @@ jobs:
run: |
cd ci-output/installer
windows_installer=`ls -1t *.msi | grep "Open Distro for Elasticsearch SQL ODBC Driver" | head -1`
+ mv "$windows_installer" "${{ env.PLUGIN_NAME }}-${{ env.OD_VERSION }}-windows-x86.msi"
+ windows_installer=`ls -1t *.msi | grep "${{ env.PLUGIN_NAME }}-${{ env.OD_VERSION }}-windows-x86.msi" | head -1`
echo $windows_installer
aws s3 cp "$windows_installer" s3://artifacts.opendistroforelasticsearch.amazon.com/downloads/elasticsearch-clients/opendistro-sql-odbc/windows/
build-windows64:
@@ -176,5 +182,7 @@ jobs:
run: |
cd ci-output/installer
windows_installer=`ls -1t *.msi | grep "Open Distro for Elasticsearch SQL ODBC Driver" | head -1`
+ mv "$windows_installer" "${{ env.PLUGIN_NAME }}-${{ env.OD_VERSION }}-windows-x64.msi"
+ windows_installer=`ls -1t *.msi | grep "${{ env.PLUGIN_NAME }}-${{ env.OD_VERSION }}-windows-x64.msi" | head -1`
echo $windows_installer
aws s3 cp "$windows_installer" s3://artifacts.opendistroforelasticsearch.amazon.com/downloads/elasticsearch-clients/opendistro-sql-odbc/windows/
diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpression.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpression.java
index 084af70a8d..aa0a6e7b94 100644
--- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpression.java
+++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpression.java
@@ -71,7 +71,7 @@ public String toString() {
}
/**
- * Resolve the ExprValue from {@link ExprTupleValue} using paths.*
+ * Resolve the ExprValue from {@link ExprTupleValue} using paths.
* Considering the following sample data.
* {
* "name": "bob smith"
@@ -81,7 +81,11 @@ public String toString() {
* }
* "address": {
* "state": "WA",
- * "city": "seattle"
+ * "city": "seattle",
+ * "project.year": 1990
+ * }
+ * "address.local": {
+ * "state": "WA",
* }
* }
* The paths could be
@@ -89,8 +93,11 @@ public String toString() {
* 2. multiple paths, e.g. "name.address.state", which will be resolved as "WA"
* 3. special case, the "." is the path separator, but it is possible that the path include
* ".", for handling this use case, we define the resolve rule as bellow, e.g. "project.year" is
- * resolved as 1990 instead of 2020.
- * Resolved Rule
+ * resolved as 1990 instead of 2020. Note. This logic only applied top level none object field.
+ * e.g. "address.local.state" been resolved to Missing. but "address.project.year" could been
+ * resolved as 1990.
+ *
+ *
Resolve Rule
* 1. Resolve the full name by combine the paths("x"."y"."z") as whole ("x.y.z").
* 2. Resolve the path recursively through ExprValue.
*
diff --git a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/IntervalClause.java b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/IntervalClause.java
index dad231fe62..8376be4163 100644
--- a/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/IntervalClause.java
+++ b/core/src/main/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/IntervalClause.java
@@ -24,6 +24,7 @@
import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.STRING;
import static com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionDSL.define;
import static com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionDSL.impl;
+import static com.amazon.opendistroforelasticsearch.sql.expression.function.FunctionDSL.nullMissingHandling;
import com.amazon.opendistroforelasticsearch.sql.data.model.ExprIntervalValue;
import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue;
@@ -54,8 +55,8 @@ public void register(BuiltinFunctionRepository repository) {
private FunctionResolver interval() {
return define(BuiltinFunctionName.INTERVAL.getName(),
- impl(IntervalClause::interval, INTERVAL, INTEGER, STRING),
- impl(IntervalClause::interval, INTERVAL, LONG, STRING));
+ impl(nullMissingHandling(IntervalClause::interval), INTERVAL, INTEGER, STRING),
+ impl(nullMissingHandling(IntervalClause::interval), INTERVAL, LONG, STRING));
}
private ExprValue interval(ExprValue value, ExprValue unit) {
diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpressionTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpressionTest.java
index 551aa77894..6faafcb0c7 100644
--- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpressionTest.java
+++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/ReferenceExpressionTest.java
@@ -118,6 +118,23 @@ public void not_exist_path() {
assertTrue(actualValue.isMissing());
}
+ @Test
+ public void object_field_contain_dot() {
+ ReferenceExpression expr = new ReferenceExpression("address.local.state", STRING);
+ ExprValue actualValue = expr.resolve(tuple());
+
+ assertTrue(actualValue.isMissing());
+ }
+
+ @Test
+ public void innner_none_object_field_contain_dot() {
+ ReferenceExpression expr = new ReferenceExpression("address.project.year", INTEGER);
+ ExprValue actualValue = expr.resolve(tuple());
+
+ assertEquals(INTEGER, actualValue.type());
+ assertEquals(1990, actualValue.integerValue());
+ }
+
/**
* {
* "name": "bob smith"
@@ -128,19 +145,28 @@ public void not_exist_path() {
* "address": {
* "state": "WA",
* "city": "seattle"
+ * "project.year": 1990
+ * },
+ * "address.local": {
+ * "state": "WA",
* }
* }
*/
private ExprTupleValue tuple() {
ExprValue address =
- ExprValueUtils.tupleValue(ImmutableMap.of("state", "WA", "city", "seattle"));
+ ExprValueUtils.tupleValue(ImmutableMap.of("state", "WA", "city", "seattle", "project"
+ + ".year", 1990));
ExprValue project =
ExprValueUtils.tupleValue(ImmutableMap.of("year", 2020));
+ ExprValue addressLocal =
+ ExprValueUtils.tupleValue(ImmutableMap.of("state", "WA"));
ExprTupleValue tuple = ExprTupleValue.fromExprValueMap(ImmutableMap.of(
"name", new ExprStringValue("bob smith"),
"project.year", new ExprIntegerValue(1990),
"project", project,
- "address", address));
+ "address", address,
+ "address.local", addressLocal
+ ));
return tuple;
}
}
\ No newline at end of file
diff --git a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/IntervalClauseTest.java b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/IntervalClauseTest.java
index ff7a7ea010..043f793b01 100644
--- a/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/IntervalClauseTest.java
+++ b/core/src/test/java/com/amazon/opendistroforelasticsearch/sql/expression/datetime/IntervalClauseTest.java
@@ -16,9 +16,13 @@
package com.amazon.opendistroforelasticsearch.sql.expression.datetime;
import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.intervalValue;
+import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.missingValue;
+import static com.amazon.opendistroforelasticsearch.sql.data.model.ExprValueUtils.nullValue;
+import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTEGER;
import static com.amazon.opendistroforelasticsearch.sql.data.type.ExprCoreType.INTERVAL;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.mockito.Mockito.when;
import com.amazon.opendistroforelasticsearch.sql.data.model.ExprValue;
import com.amazon.opendistroforelasticsearch.sql.exception.ExpressionEvaluationException;
@@ -39,6 +43,12 @@ public class IntervalClauseTest extends ExpressionTestBase {
@Mock
Environment env;
+ @Mock
+ Expression nullRef;
+
+ @Mock
+ Expression missingRef;
+
@Test
public void microsecond() {
FunctionExpression expr = dsl.interval(DSL.literal(1), DSL.literal("microsecond"));
@@ -114,4 +124,22 @@ public void to_string() {
FunctionExpression expr = dsl.interval(DSL.literal(1), DSL.literal("day"));
assertEquals("interval(1, \"day\")", expr.toString());
}
+
+ @Test
+ public void null_value() {
+ when(nullRef.type()).thenReturn(INTEGER);
+ when(nullRef.valueOf(env)).thenReturn(nullValue());
+ FunctionExpression expr = dsl.interval(nullRef, DSL.literal("day"));
+ assertEquals(INTERVAL, expr.type());
+ assertEquals(nullValue(), expr.valueOf(env));
+ }
+
+ @Test
+ public void missing_value() {
+ when(missingRef.type()).thenReturn(INTEGER);
+ when(missingRef.valueOf(env)).thenReturn(missingValue());
+ FunctionExpression expr = dsl.interval(missingRef, DSL.literal("day"));
+ assertEquals(INTERVAL, expr.type());
+ assertEquals(missingValue(), expr.valueOf(env));
+ }
}
diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ElasticsearchExecutionProtector.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ElasticsearchExecutionProtector.java
index bec7d3150d..b017839616 100644
--- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ElasticsearchExecutionProtector.java
+++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ElasticsearchExecutionProtector.java
@@ -77,7 +77,7 @@ public PhysicalPlan visitRename(RenameOperator node, Object context) {
*/
@Override
public PhysicalPlan visitTableScan(TableScanOperator node, Object context) {
- return new ResourceMonitorPlan(node, resourceMonitor);
+ return doProtect(node);
}
@Override
@@ -111,10 +111,14 @@ public PhysicalPlan visitHead(HeadOperator node, Object context) {
);
}
+ /**
+ * Decorate input node with {@link ResourceMonitorPlan} to avoid aggregate
+ * window function pre-loads too many data into memory in worst case.
+ */
@Override
public PhysicalPlan visitWindow(WindowOperator node, Object context) {
return new WindowOperator(
- visitInput(node.getInput(), context),
+ doProtect(visitInput(node.getInput(), context)),
node.getWindowFunction(),
node.getWindowDefinition());
}
@@ -124,11 +128,10 @@ public PhysicalPlan visitWindow(WindowOperator node, Object context) {
*/
@Override
public PhysicalPlan visitSort(SortOperator node, Object context) {
- return new ResourceMonitorPlan(
+ return doProtect(
new SortOperator(
visitInput(node.getInput(), context),
- node.getSortList()),
- resourceMonitor);
+ node.getSortList()));
}
/**
@@ -155,4 +158,16 @@ PhysicalPlan visitInput(PhysicalPlan node, Object context) {
return node.accept(this, context);
}
}
+
+ private PhysicalPlan doProtect(PhysicalPlan node) {
+ if (isProtected(node)) {
+ return node;
+ }
+ return new ResourceMonitorPlan(node, resourceMonitor);
+ }
+
+ private boolean isProtected(PhysicalPlan node) {
+ return (node instanceof ResourceMonitorPlan);
+ }
+
}
diff --git a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ResourceMonitorPlan.java b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ResourceMonitorPlan.java
index 0225a46974..112e8dd2c3 100644
--- a/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ResourceMonitorPlan.java
+++ b/elasticsearch/src/main/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/protector/ResourceMonitorPlan.java
@@ -33,6 +33,12 @@
@RequiredArgsConstructor
@EqualsAndHashCode
public class ResourceMonitorPlan extends PhysicalPlan {
+
+ /**
+ * How many method calls to delegate's next() to perform resource check once.
+ */
+ public static final long NUMBER_OF_NEXT_CALL_TO_CHECK = 1000;
+
/**
* Delegated PhysicalPlan.
*/
@@ -44,6 +50,13 @@ public class ResourceMonitorPlan extends PhysicalPlan {
@ToString.Exclude
private final ResourceMonitor monitor;
+ /**
+ * Count how many calls to delegate's next() already.
+ */
+ @EqualsAndHashCode.Exclude
+ private long nextCallCount = 0L;
+
+
@Override
public R accept(PhysicalPlanNodeVisitor visitor, C context) {
return delegate.accept(visitor, context);
@@ -74,6 +87,10 @@ public boolean hasNext() {
@Override
public ExprValue next() {
+ boolean shouldCheck = (++nextCallCount % NUMBER_OF_NEXT_CALL_TO_CHECK == 0);
+ if (shouldCheck && !this.monitor.isHealthy()) {
+ throw new IllegalStateException("resource is not enough to load next row, quit.");
+ }
return delegate.next();
}
}
diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionProtectorTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionProtectorTest.java
index 03af6f5641..0a38cc5740 100644
--- a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionProtectorTest.java
+++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ElasticsearchExecutionProtectorTest.java
@@ -50,6 +50,7 @@
import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.AvgAggregator;
import com.amazon.opendistroforelasticsearch.sql.expression.aggregation.NamedAggregator;
import com.amazon.opendistroforelasticsearch.sql.expression.window.WindowDefinition;
+import com.amazon.opendistroforelasticsearch.sql.expression.window.aggregation.AggregateWindowFunction;
import com.amazon.opendistroforelasticsearch.sql.expression.window.ranking.RankFunction;
import com.amazon.opendistroforelasticsearch.sql.monitor.ResourceMonitor;
import com.amazon.opendistroforelasticsearch.sql.planner.physical.PhysicalPlan;
@@ -213,6 +214,50 @@ public void testProtectSortForWindowOperator() {
windowDefinition)));
}
+ @Test
+ public void testProtectWindowOperatorInput() {
+ NamedExpression avg = named(mock(AggregateWindowFunction.class));
+ WindowDefinition windowDefinition = mock(WindowDefinition.class);
+
+ assertEquals(
+ window(
+ resourceMonitor(
+ values()),
+ avg,
+ windowDefinition),
+ executionProtector.protect(
+ window(
+ values(),
+ avg,
+ windowDefinition)));
+ }
+
+ @SuppressWarnings("unchecked")
+ @Test
+ public void testNotProtectWindowOperatorInputIfAlreadyProtected() {
+ NamedExpression avg = named(mock(AggregateWindowFunction.class));
+ Pair sortItem =
+ ImmutablePair.of(DEFAULT_ASC, DSL.ref("age", INTEGER));
+ WindowDefinition windowDefinition =
+ new WindowDefinition(emptyList(), ImmutableList.of(sortItem));
+
+ assertEquals(
+ window(
+ resourceMonitor(
+ sort(
+ values(emptyList()),
+ sortItem)),
+ avg,
+ windowDefinition),
+ executionProtector.protect(
+ window(
+ sort(
+ values(emptyList()),
+ sortItem),
+ avg,
+ windowDefinition)));
+ }
+
@Test
public void testWithoutProtection() {
Expression filterExpr = literal(ExprBooleanValue.of(true));
diff --git a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ResourceMonitorPlanTest.java b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ResourceMonitorPlanTest.java
index de2e0d157b..c1fb77fe40 100644
--- a/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ResourceMonitorPlanTest.java
+++ b/elasticsearch/src/test/java/com/amazon/opendistroforelasticsearch/sql/elasticsearch/executor/ResourceMonitorPlanTest.java
@@ -73,8 +73,26 @@ void openSuccess() {
@Test
void nextSuccess() {
- monitorPlan.next();
- verify(plan, times(1)).next();
+ when(resourceMonitor.isHealthy()).thenReturn(true);
+
+ for (int i = 1; i <= 1000; i++) {
+ monitorPlan.next();
+ }
+ verify(resourceMonitor, times(1)).isHealthy();
+ verify(plan, times(1000)).next();
+ }
+
+ @Test
+ void nextExceedResourceLimit() {
+ when(resourceMonitor.isHealthy()).thenReturn(false);
+
+ for (int i = 1; i < 1000; i++) {
+ monitorPlan.next();
+ }
+
+ IllegalStateException exception =
+ assertThrows(IllegalStateException.class, () -> monitorPlan.next());
+ assertEquals("resource is not enough to load next row, quit.", exception.getMessage());
}
@Test
diff --git a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/NullLiteralIT.java b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/NullLiteralIT.java
index 78881da080..a21fc286ac 100644
--- a/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/NullLiteralIT.java
+++ b/integ-test/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/NullLiteralIT.java
@@ -56,6 +56,14 @@ public void testNullLiteralInFunction() {
rows(null, null));
}
+ @Test
+ public void testNullLiteralInInterval() {
+ verifyDataRows(
+ query("SELECT INTERVAL NULL DAY, INTERVAL 60 * 60 * 24 * (NULL - FLOOR(NULL)) SECOND"),
+ rows(null, null)
+ );
+ }
+
private JSONObject query(String sql) {
return new JSONObject(executeQuery(sql, "jdbc"));
}
diff --git a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/utils/QueryDataAnonymizer.java b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/utils/QueryDataAnonymizer.java
index b23798274b..04a351edfa 100644
--- a/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/utils/QueryDataAnonymizer.java
+++ b/legacy/src/main/java/com/amazon/opendistroforelasticsearch/sql/legacy/utils/QueryDataAnonymizer.java
@@ -47,7 +47,7 @@ public static String anonymizeData(String query) {
.replaceAll("false", "boolean_literal")
.replaceAll("[\\n][\\t]+", " ");
} catch (Exception e) {
- LOG.error("Caught an exception when removing sensitive data", e);
+ LOG.warn("Caught an exception when anonymizing sensitive data");
resultQuery = query;
}
return resultQuery;
diff --git a/sql-odbc/docs/dev/BUILD_INSTRUCTIONS.md b/sql-odbc/docs/dev/BUILD_INSTRUCTIONS.md
index f7d60cc949..41dbf888ba 100644
--- a/sql-odbc/docs/dev/BUILD_INSTRUCTIONS.md
+++ b/sql-odbc/docs/dev/BUILD_INSTRUCTIONS.md
@@ -2,85 +2,104 @@
## Windows
-### Dependencies
+### Requirements
-* [cmake](https://cmake.org/install/)
-* [Visual Studio 2019](https://visualstudio.microsoft.com/vs/) (Other versions may work, but only 2019 has been tested)
-* [ODBC Driver source code](https://github.com/opendistro-for-elasticsearch/sql/tree/master/sql-odbc)
+* Install [cmake](https://cmake.org/install/)
+* Install [Visual Studio 2019](https://visualstudio.microsoft.com/vs/)
+ * Other versions may work, but only 2019 has been tested
-### Build
+**NOTE:** All Windows/`.ps1` scripts must be run from a [Developer Powershell](https://devblogs.microsoft.com/visualstudio/the-powershell-you-know-and-love-now-with-a-side-of-visual-studio/).
-#### with Visual Studio
+> A shortcut is installed on your system with Visual Studio (**"Developer Powershell for VS 2019"**)
-Run `./build_win_.ps1` to generate a VS2019 project for building/testing the driver. (the build may fail, but should still generate a `.sln` file)
+> Programs launched with this prompt (ex: VS Code) also have access to the Developer shell.
-The solution can be found at `\build\odbc\build\global_make_list.sln`.
+> For more information regarding the Developer Powershell: https://docs.microsoft.com/en-us/cpp/build/building-on-the-command-line?view=msvc-160
-#### with Developer Powershell
+### Build
-Use `./build_win_.ps1` to build the driver from a Developer Powershell prompt.
+```
+.\build_win_.ps1
+```
-> A shortcut is installed on your system with Visual Studio (search for **"Developer Powershell for VS 2019"**)
+A Visual Studio 2019 solution will also be generated for building/testing the driver with Visual Studio. (Note: if the build fails, the solution will still be generated.)
-> Programs launched with this prompt (ex: VS Code) also have access to the Developer shell.
+Solution file: `\build\odbc\cmake\global_make_list.sln`.
-### Build Output
+### Output
```
build
-└--
- └-- odbc
- └-- bin
- └--
- └-- build
- └-- lib
- └-- aws-sdk
- └-- build
- └-- install
+└-- odbc
+ └-- bin
+ └--
+ └-- cmake
+ └-- lib
+└-- aws-sdk
+ └-- build
+ └-- install
```
-* Driver DLL: `.\build\\odbc\bin\\odfesqlodbc.dll`
-* Test binaries folder: `.\build\\odbc\bin\`
+* Driver DLL: `.\build\odbc\bin\\odfesqlodbc.dll`
+* Test binaries folder: `.\build\odbc\bin\\`
### Packaging
-From a Developer Powershell, run:
+**Note:** If you make changes to the driver code or CMake project files, re-run the `build_windows_.sh` script before running the following command.
+
```
-msbuild .\build\Release\odbc\PACKAGE.vcxproj -p:Configuration=Release
+msbuild .\build\odbc\PACKAGE.vcxproj -p:Configuration=Release
```
-An installer named as `Open Distro for Elasticsearch SQL ODBC Driver--Windows--bit.msi` will be generated in the build directory.
+`Open Distro for Elasticsearch SQL ODBC Driver--Windows--bit.msi` will be generated in the build directory.
+### Testing
+See [run_tests.md](./run_tests.md)
## Mac
-(TODO: upgrade build scripts & documentation for Mac)
-### Dependencies
+### Requirements
-Homebrew must be installed to manage packages, to install homebrew see the [homebrew homepage](https://brew.sh/).
-Using homebrew, install the following packages using the command provided:
->brew install [package]
->
->* curl
->* cmake
->* libiodbc
+[Homebrew](https://brew.sh/) is required for installing the following necessary packages.
+```
+brew install curl
+brew install cmake
+brew install libiodbc
+```
-### Building the Driver
+### Build
-From a Bash shell:
+```
+./build_mac_.sh
+```
-`./build_mac_.sh`
+### Output
-### Output Location on Mac
+```
+build
+└-- odbc
+ └-- bin
+ └-- lib
+└-- cmake-build64
+ └--
+```
-Compiling on Mac will output the tests to **bin64** and the driver to **lib64**. There are also some additional test infrastructure files which output to the **lib64** directory.
+* Driver library: `./build/odbc/lib/libodfesqlodbc.dylib`
+* Test binaries folder: `./build/odbc/bin/`
### Packaging
-Run below command from the project's build directory.
->cpack .
+**Note:** If you make changes to the driver code or CMake project files, re-run the `build_mac_.sh` script before running the following command.
+
+```
+cd cmake-build64/
+cpack .
+```
+
+`Open Distro for Elasticsearch SQL ODBC Driver--Darwin.pkg` will be generated in the build directory.
-Installer named as `Open Distro for Elasticsearch SQL ODBC Driver--Darwin.pkg` will be generated in the build directory.
+### Testing
+See [run_tests.md](./run_tests.md)
## General Build Info
diff --git a/sql-odbc/docs/dev/run_tests.md b/sql-odbc/docs/dev/run_tests.md
index 903e5f8f0a..8f7fb1b87a 100644
--- a/sql-odbc/docs/dev/run_tests.md
+++ b/sql-odbc/docs/dev/run_tests.md
@@ -1,14 +1,16 @@
# ODFE SQL ODBC Driver Testing
-## Preparation
+## Requirements
* Latest version of [Open Distro for Elasticsearch](https://opendistro.github.io/for-elasticsearch-docs/docs/install/)
+* [Required datasets loaded](#set-up-test-datasets)
+* [DSN configured](#set-up-dsn)
-### Loading Test Datasets
+### Set up test datasets
-Loading a dataset requires an [elasticsearch](https://opendistro.github.io/for-elasticsearch-docs/docs/install/) service running with [kibana](https://opendistro.github.io/for-elasticsearch-docs/docs/kibana/). If either of these are missing, please refer to the documentation on how to set them up.
+Loading a dataset requires an [OpenDistro for Elasticsearch](https://opendistro.github.io/for-elasticsearch-docs/docs/install/) service running with [Kibana](https://opendistro.github.io/for-elasticsearch-docs/docs/kibana/). If either of these are missing, please refer to the documentation on how to set them up.
-Note, if you wish to work with SSL/TLS, you need to configure Elasticsearch and Kibana to support it. See Working With SSL/TLS below.
+Note, if you wish to work with SSL/TLS, you need to configure ODFE and Kibana to support it. See the [build instructions](./BUILD_INSTRUCTIONS.md) for more info.
First load the sample datasets provided by kibana.
@@ -23,14 +25,9 @@ Select the wrench on the left control panel. Enter the following commands into t
* [kibana_sample_data_types](./datasets/kibana_sample_data_types.md)
-## Running Tests
-
-Tests can be **executed directly**, or by using the **Test Runner**.
-
-**NOTES:**
+### Set up DSN
-* A test DSN named `test_dsn` must be set up in order for certain tests in ITODBCConnection to pass. To configure the DSN, see the instructions, below.
-* Datasets must be loaded into Elasticsearch using [kibana](https://www.elastic.co/guide/en/kibana/current/connect-to-elasticsearch.html). See the section on loading datasets below.
+A test DSN named `test_dsn` must be set up in order for certain tests in ITODBCConnection to pass. To configure the DSN, see the instructions below.
### Windows Test DSN Setup
@@ -50,7 +47,12 @@ Tests can be **executed directly**, or by using the **Test Runner**.
* `export ODBCINSTINI=/src/IntegrationTests/ITODBCConnection/test_odbcinst.ini`
* Manually add the entries to your existing `odbc.ini` and `odbcinst.ini` entries. (normally found at `~/.odbc.ini` and `~/.odbcinst.ini`)
-### Running Tests directly on Windows
+## Running Tests
+Tests can be executed directly, or by using the **Test Runner**.
+
+### Direct
+
+#### Windows
Tests can be executed directly using **Visual Studio** by setting the desired test as a **Start up Project**
@@ -60,23 +62,23 @@ Tests can be executed directly using **Visual Studio** by setting the desired te
For more information, see the [Visual Studio Console Application documentation](https://docs.microsoft.com/en-us/cpp/build/vscpp-step-2-build?view=vs-2019).
-### Running Tests directly on Mac
+#### Mac
Tests can be executed using a command line interface. From the project root directory, enter:
> **bin64/**
-### Running Tests using the Test Runner
+### Test Runner
The **Test Runner** requires [python](https://wiki.python.org/moin/BeginnersGuide/Download) to be installed on the system. Running the **Test Runner** will execute all the tests and compile a report with the results. The report indicates the execution status of all tests along with the execution time. To find error details of any failed test, hover over the test.
-#### Running Tests using the Test Runner on Windows
+#### Windows
Open the project's root directory in a command line interface of your choice. Execute
>**.\run_test_runner.bat**
The **Test Runner** has been tried and tested with [Python3.8](https://www.python.org/downloads/release/python-380/) on **Windows systems**. Other versions of Python may work, but are untested.
-#### Running Tests using the Test Runner on Mac
+#### Mac
Open the project's root directory in a command line interface of your choice. Execute
>**./run_test_runner.sh**
@@ -87,12 +89,15 @@ The **Test Runner** has been tried and tested with [Python3.7.6](https://www.pyt
(using a CMake script provided by George Cave (StableCoder) under the Apache 2.0 license, found [here](https://github.com/StableCoder/cmake-scripts/blob/master/code-coverage.cmake))
-> **NOTE**: Before building with coverage, make sure the following directory is in your PATH environment variable:
-> `/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin`
+#### Requirements
+* `llvm-cov` in your PATH environment variable
+ * Possible locations for this binary:
+ * `/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin`
+ * `/Library/Developer/CommandLineTools/usr/bin`
To build the tests with code coverage enabled, set the `CODE_COVERAGE` variable to `ON` when preparing your CMake build.
```bash
-cmake ... -DBUILD_WITH_TESTS=ON -DCODE_COVERAGE=ON
+cmake -DBUILD_WITH_TESTS=ON -DCODE_COVERAGE=ON
```
To get coverage for the driver library, you must use the `ccov-all` target, which runs all test suites and components with coverage.
diff --git a/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstAggregationBuilder.java b/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstAggregationBuilder.java
index 39ac8575fa..3a62461d41 100644
--- a/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstAggregationBuilder.java
+++ b/sql/src/main/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstAggregationBuilder.java
@@ -21,8 +21,7 @@
import com.amazon.opendistroforelasticsearch.sql.ast.Node;
import com.amazon.opendistroforelasticsearch.sql.ast.expression.AggregateFunction;
import com.amazon.opendistroforelasticsearch.sql.ast.expression.Alias;
-import com.amazon.opendistroforelasticsearch.sql.ast.expression.Function;
-import com.amazon.opendistroforelasticsearch.sql.ast.expression.Literal;
+import com.amazon.opendistroforelasticsearch.sql.ast.expression.QualifiedName;
import com.amazon.opendistroforelasticsearch.sql.ast.expression.UnresolvedExpression;
import com.amazon.opendistroforelasticsearch.sql.ast.tree.Aggregation;
import com.amazon.opendistroforelasticsearch.sql.ast.tree.UnresolvedPlan;
@@ -125,8 +124,7 @@ private List replaceGroupByItemIfAliasOrOrdinal() {
*/
private Optional findNonAggregatedItemInSelect() {
return querySpec.getSelectItems().stream()
- .filter(this::isNonAggregatedExpression)
- .filter(this::isNonLiteralFunction)
+ .filter(this::isNonAggregateOrLiteralExpression)
.findFirst();
}
@@ -134,27 +132,18 @@ private boolean isAggregatorNotFoundAnywhere() {
return querySpec.getAggregators().isEmpty();
}
- private boolean isNonLiteralFunction(UnresolvedExpression expr) {
- // The base case for recursion
- if (expr instanceof Literal) {
+ private boolean isNonAggregateOrLiteralExpression(UnresolvedExpression expr) {
+ if (expr instanceof AggregateFunction) {
return false;
}
- if (expr instanceof Function) {
- List extends Node> children = expr.getChild();
- return children.stream().anyMatch(child ->
- isNonLiteralFunction((UnresolvedExpression) child));
- }
- return true;
- }
- private boolean isNonAggregatedExpression(UnresolvedExpression expr) {
- if (expr instanceof AggregateFunction) {
- return false;
+ if (expr instanceof QualifiedName) {
+ return true;
}
List extends Node> children = expr.getChild();
- return children.stream()
- .allMatch(child -> isNonAggregatedExpression((UnresolvedExpression) child));
+ return children.stream().anyMatch(child ->
+ isNonAggregateOrLiteralExpression((UnresolvedExpression) child));
}
}
diff --git a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstAggregationBuilderTest.java b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstAggregationBuilderTest.java
index 87183e4051..e4e831e5b3 100644
--- a/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstAggregationBuilderTest.java
+++ b/sql/src/test/java/com/amazon/opendistroforelasticsearch/sql/sql/parser/AstAggregationBuilderTest.java
@@ -133,6 +133,27 @@ void can_build_implicit_group_by_for_aggregator_in_having_clause() {
hasGroupByItems(),
hasAggregators(
alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
+
+ assertThat(
+ buildAggregation("SELECT INTERVAL 1 DAY FROM test HAVING AVG(age) > 30"),
+ allOf(
+ hasGroupByItems(),
+ hasAggregators(
+ alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
+
+ assertThat(
+ buildAggregation("SELECT CAST(1 AS LONG) FROM test HAVING AVG(age) > 30"),
+ allOf(
+ hasGroupByItems(),
+ hasAggregators(
+ alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
+
+ assertThat(
+ buildAggregation("SELECT CASE WHEN true THEN 1 ELSE 2 END FROM test HAVING AVG(age) > 30"),
+ allOf(
+ hasGroupByItems(),
+ hasAggregators(
+ alias("AVG(age)", aggregate("AVG", qualifiedName("age"))))));
}
@Test
diff --git a/workbench/public/components/Main/main.tsx b/workbench/public/components/Main/main.tsx
index c6c1074d83..72713c09de 100644
--- a/workbench/public/components/Main/main.tsx
+++ b/workbench/public/components/Main/main.tsx
@@ -262,10 +262,20 @@ export class Main extends React.Component {
};
}
if (!response.data.ok) {
+ let err = response.data.resp;
+ console.log("Error occurred when processing query response: ", err)
+
+ // Mark fulfilled to true as long as the data is fulfilled
+ if (response.data.body) {
+ return {
+ fulfilled: true,
+ errorMessage: err,
+ data: response.data.body
+ }
+ }
return {
fulfilled: false,
- errorMessage: response.data.resp,
- data: response.data.body,
+ errorMessage: err,
};
}