diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 9ab3c7680..72741d518 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -16,11 +16,6 @@ variables:
backwardCompatibleRelease: '1.0.0'
forwardCompatibleRelease: '1.0.0'
- backwardCompatibleTestOptions_Windows_2_3: ""
- forwardCompatibleTestOptions_Windows_2_3: ""
- backwardCompatibleTestOptions_Linux_2_3: ""
- forwardCompatibleTestOptions_Linux_2_3: ""
-
backwardCompatibleTestOptions_Windows_2_4: ""
forwardCompatibleTestOptions_Windows_2_4: ""
backwardCompatibleTestOptions_Linux_2_4: ""
@@ -206,53 +201,6 @@ stages:
backwardCompatibleRelease: $(backwardCompatibleRelease)
forwardCompatibleRelease: $(forwardCompatibleRelease)
tests:
- - version: '2.3.0'
- jobOptions:
- # 'Hosted Ubuntu 1604' test is disabled due to https://github.com/dotnet/spark/issues/753
- - pool: 'Hosted VS2017'
- testOptions: ''
- backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_3)
- forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_3)
- - version: '2.3.1'
- jobOptions:
- - pool: 'Hosted VS2017'
- testOptions: ""
- backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_3)
- forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_3)
- - pool: 'Hosted Ubuntu 1604'
- testOptions: ""
- backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_3)
- forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_3)
- - version: '2.3.2'
- jobOptions:
- - pool: 'Hosted VS2017'
- testOptions: ""
- backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_3)
- forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_3)
- - pool: 'Hosted Ubuntu 1604'
- testOptions: ""
- backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_3)
- forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_3)
- - version: '2.3.3'
- jobOptions:
- - pool: 'Hosted VS2017'
- testOptions: ""
- backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_3)
- forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_3)
- - pool: 'Hosted Ubuntu 1604'
- testOptions: ""
- backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_3)
- forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_3)
- - version: '2.3.4'
- jobOptions:
- - pool: 'Hosted VS2017'
- testOptions: ""
- backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Windows_2_3)
- forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Windows_2_3)
- - pool: 'Hosted Ubuntu 1604'
- testOptions: ""
- backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_3)
- forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_3)
- version: '2.4.0'
jobOptions:
- pool: 'Hosted VS2017'
diff --git a/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive/AssemblyKernelExtension.cs b/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive/AssemblyKernelExtension.cs
index a99e6ee0b..da2521283 100644
--- a/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive/AssemblyKernelExtension.cs
+++ b/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive/AssemblyKernelExtension.cs
@@ -145,10 +145,10 @@ private bool IsPathValid(string path)
}
Version version = SparkEnvironment.SparkVersion;
- return (version.Major, version.Minor, version.Build) switch
+ return version.Major switch
{
- (2, _, _) => false,
- (3, 0, _) => true,
+ 2 => false,
+ 3 => true,
_ => throw new NotSupportedException($"Spark {version} not supported.")
};
}
diff --git a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/ML/Feature/StopWordsRemoverTests.cs b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/ML/Feature/StopWordsRemoverTests.cs
index 4bf614a44..832304e43 100644
--- a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/ML/Feature/StopWordsRemoverTests.cs
+++ b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/ML/Feature/StopWordsRemoverTests.cs
@@ -3,7 +3,6 @@
// See the LICENSE file in the project root for more information.
using System.IO;
-using Microsoft.Spark.E2ETest.Utils;
using Microsoft.Spark.ML.Feature;
using Microsoft.Spark.Sql;
using Microsoft.Spark.Sql.Types;
@@ -23,10 +22,10 @@ public StopWordsRemoverTests(SparkFixture fixture) : base(fixture)
}
///
- /// Test signatures for APIs up to Spark 2.3.*.
+ /// Test signatures for APIs up to Spark 2.4.*.
///
[Fact]
- public void TestSignaturesV2_3_X()
+ public void TestSignaturesV2_4_X()
{
string expectedUid = "theUidWithOutLocale";
string expectedInputCol = "input_col";
@@ -62,16 +61,9 @@ public void TestSignaturesV2_3_X()
Assert.IsType(stopWordsRemover.Transform(input));
TestFeatureBase(stopWordsRemover, "inputCol", "input_col");
- }
- ///
- /// Test signatures for APIs introduced in Spark 2.4.*.
- ///
- [SkipIfSparkVersionIsLessThan(Versions.V2_4_0)]
- public void TestSignaturesV2_4_X()
- {
string expectedLocale = "en_GB";
- StopWordsRemover stopWordsRemover = new StopWordsRemover().SetLocale(expectedLocale);
+ stopWordsRemover.SetLocale(expectedLocale);
Assert.Equal(expectedLocale, stopWordsRemover.GetLocale());
}
}
diff --git a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/SparkContextTests.cs b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/SparkContextTests.cs
index 40e5a774e..e0b9fb6a0 100644
--- a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/SparkContextTests.cs
+++ b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/SparkContextTests.cs
@@ -17,13 +17,13 @@ namespace Microsoft.Spark.E2ETest.IpcTests
public class SparkContextTests
{
///
- /// Test signatures for APIs up to Spark 2.3.*.
+ /// Test signatures for APIs up to Spark 2.4.*.
///
///
/// For the RDD related tests, refer to .
///
[Fact]
- public void TestSignaturesV2_3_X()
+ public void TestSignaturesV2_4_X()
{
SparkContext sc = SparkContext.GetOrCreate(new SparkConf());
diff --git a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/CatalogTests.cs b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/CatalogTests.cs
index 40d4649c8..f5f37dd91 100644
--- a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/CatalogTests.cs
+++ b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/CatalogTests.cs
@@ -24,10 +24,10 @@ public CatalogTests(SparkFixture fixture)
}
///
- /// Test signatures for APIs up to Spark 2.3.*.
+ /// Test signatures for APIs up to Spark 2.4.*.
///
[Fact]
- public void TestSignaturesV2_3_X()
+ public void TestSignaturesV2_4_X()
{
WithTable(_spark, new string[] { "users", "users2", "users3", "users4", "usersp" }, () =>
{
diff --git a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/ColumnTests.cs b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/ColumnTests.cs
index 59fa6df99..6ffa2b3c7 100644
--- a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/ColumnTests.cs
+++ b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/ColumnTests.cs
@@ -14,10 +14,10 @@ namespace Microsoft.Spark.E2ETest.IpcTests
public class ColumnTests
{
///
- /// Test signatures for APIs up to Spark 2.3.*.
+ /// Test signatures for APIs up to Spark 2.4.*.
///
[Fact]
- public void TestSignaturesV2_3_X()
+ public void TestSignaturesV2_4_X()
{
Column col1 = Column("col1");
Column col2 = Column("col2");
@@ -28,22 +28,22 @@ public void TestSignaturesV2_3_X()
Assert.IsType(col1 == col2);
Assert.IsType(col1.EqualTo(col2));
-
+
Assert.IsType(col1 != col2);
Assert.IsType(col1.NotEqual(col2));
-
- Assert.IsType(col1 > col2);
- Assert.IsType(col1 > "hello");
- Assert.IsType(col1.Gt(col2));
+
+ Assert.IsType(col1 > col2);
+ Assert.IsType(col1 > "hello");
+ Assert.IsType(col1.Gt(col2));
Assert.IsType(col1.Gt("hello"));
-
- Assert.IsType(col1 < col2);
- Assert.IsType(col1 < "hello");
- Assert.IsType(col1.Lt(col2));
+
+ Assert.IsType(col1 < col2);
+ Assert.IsType(col1 < "hello");
+ Assert.IsType(col1.Lt(col2));
Assert.IsType(col1.Lt("hello"));
Assert.IsType(col1 <= col2);
- Assert.IsType(col1 <= "hello");
+ Assert.IsType(col1 <= "hello");
Assert.IsType(col1.Leq(col2));
Assert.IsType(col1.Leq("hello"));
@@ -59,7 +59,7 @@ public void TestSignaturesV2_3_X()
Assert.IsType(When(col1 == col2, 0).Otherwise(col2));
Assert.IsType(When(col1 == col2, 0).Otherwise("hello"));
-
+
Assert.IsType(col1.Between(col1, col2));
Assert.IsType(col1.Between(1, 3));
@@ -69,7 +69,7 @@ public void TestSignaturesV2_3_X()
Assert.IsType(col1 | col2);
Assert.IsType(col1.Or(col2));
-
+
Assert.IsType(col1 & col2);
Assert.IsType(col1.And(col2));
@@ -139,7 +139,7 @@ public void TestSignaturesV2_3_X()
Assert.IsType(col1.Over(PartitionBy(col1)));
Assert.IsType(col1.Over());
-
+
Assert.Equal("col1", col1.ToString());
Assert.Equal("col2", col2.ToString());
}
diff --git a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/DataFrameReaderTests.cs b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/DataFrameReaderTests.cs
index 8b3ccb648..feb9b33ff 100644
--- a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/DataFrameReaderTests.cs
+++ b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/DataFrameReaderTests.cs
@@ -20,15 +20,15 @@ public DataFrameReaderTests(SparkFixture fixture)
}
///
- /// Test signatures for APIs up to Spark 2.3.*.
+ /// Test signatures for APIs up to Spark 2.4.*.
///
[Fact]
- public void TestSignaturesV2_3_X()
+ public void TestSignaturesV2_4_X()
{
DataFrameReader dfr = _spark.Read();
Assert.IsType(dfr.Format("json"));
-
+
Assert.IsType(
dfr.Schema(
new StructType(new[]
diff --git a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/DataFrameTests.cs b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/DataFrameTests.cs
index 58403b485..b95d71add 100644
--- a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/DataFrameTests.cs
+++ b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/DataFrameTests.cs
@@ -437,10 +437,10 @@ private static FxDataFrame CountCharacters(FxDataFrame dataFrame)
}
///
- /// Test signatures for APIs up to Spark 2.3.*.
+ /// Test signatures for APIs up to Spark 2.4.*.
///
[Fact]
- public void TestSignaturesV2_3_X()
+ public void TestSignaturesV2_4_X()
{
Assert.IsType(_df["name"]);
Assert.IsType(_df["age"]);
@@ -569,6 +569,16 @@ public void TestSignaturesV2_3_X()
Assert.IsType(df.Sum("age"));
Assert.IsType(df.Sum("age", "tempAge"));
+
+ var values = new List