diff --git a/.gitignore b/.gitignore
index e78c6ab8d..5e938f1c2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -49,7 +49,7 @@ dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
-# .NET Core
+# .NET
project.lock.json
project.fragment.lock.json
artifacts/
diff --git a/README.md b/README.md
index f6dff9634..e23e85cd2 100644
--- a/README.md
+++ b/README.md
@@ -8,7 +8,7 @@
.NET for Apache Spark is compliant with .NET Standard - a formal specification of .NET APIs that are common across .NET implementations. This means you can use .NET for Apache Spark anywhere you write .NET code allowing you to reuse all the knowledge, skills, code, and libraries you already have as a .NET developer.
-.NET for Apache Spark runs on Windows, Linux, and macOS using .NET Core, or Windows using .NET Framework. It also runs on all major cloud providers including [Azure HDInsight Spark](deployment/README.md#azure-hdinsight-spark), [Amazon EMR Spark](deployment/README.md#amazon-emr-spark), [AWS](deployment/README.md#databricks) & [Azure](deployment/README.md#databricks) Databricks.
+.NET for Apache Spark runs on Windows, Linux, and macOS using .NET 6, or Windows using .NET Framework. It also runs on all major cloud providers including [Azure HDInsight Spark](deployment/README.md#azure-hdinsight-spark), [Amazon EMR Spark](deployment/README.md#amazon-emr-spark), [AWS](deployment/README.md#databricks) & [Azure](deployment/README.md#databricks) Databricks.
**Note**: We currently have a Spark Project Improvement Proposal JIRA at [SPIP: .NET bindings for Apache Spark](https://issues.apache.org/jira/browse/SPARK-27006) to work with the community towards getting .NET support by default into Apache Spark. We highly encourage you to participate in the discussion.
@@ -61,7 +61,7 @@
.NET for Apache Spark releases are available [here](https://github.com/dotnet/spark/releases) and NuGet packages are available [here](https://www.nuget.org/packages/Microsoft.Spark).
## Get Started
-These instructions will show you how to run a .NET for Apache Spark app using .NET Core.
+These instructions will show you how to run a .NET for Apache Spark app using .NET 6.
- [Windows Instructions](docs/getting-started/windows-instructions.md)
- [Ubuntu Instructions](docs/getting-started/ubuntu-instructions.md)
- [MacOs Instructions](docs/getting-started/macos-instructions.md)
@@ -79,8 +79,8 @@ Building from source is very easy and the whole process (from cloning to being a
| | | Instructions |
| :---: | :--- | :--- |
-| ![Windows icon](docs/img/windows-icon-32.png) | **Windows** |
- Local - [.NET Framework 4.6.1](docs/building/windows-instructions.md#using-visual-studio-for-net-framework-461)
- Local - [.NET Core 3.1](docs/building/windows-instructions.md#using-net-core-cli-for-net-core)
|
-| ![Ubuntu icon](docs/img/ubuntu-icon-32.png) | **Ubuntu** | - Local - [.NET Core 3.1](docs/building/ubuntu-instructions.md)
- [Azure HDInsight Spark - .NET Core 3.1](deployment/README.md)
|
+| ![Windows icon](docs/img/windows-icon-32.png) | **Windows** | - Local - [.NET Framework 4.6.1](docs/building/windows-instructions.md#using-visual-studio-for-net-framework-461)
- Local - [.NET 6](docs/building/windows-instructions.md#using-net-core-cli-for-net-core)
|
+| ![Ubuntu icon](docs/img/ubuntu-icon-32.png) | **Ubuntu** | - Local - [.NET 6](docs/building/ubuntu-instructions.md)
- [Azure HDInsight Spark - .NET 6](deployment/README.md)
|
## Samples
diff --git a/ROADMAP.md b/ROADMAP.md
index 9a9dfbb20..f801cd16e 100644
--- a/ROADMAP.md
+++ b/ROADMAP.md
@@ -12,7 +12,7 @@ The goal of the .NET for Apache Spark project is to provide an easy to use, .NET
### Performance Optimizations
* Improvements to C# Pickling Library
* Improvements to Arrow .NET Library
-* Exploiting .NET Core 3.0 Vectorization (*)
+* Exploiting .NET Vectorization (*)
* Micro-benchmarking framework for Interop
### Benchmarks
diff --git a/azure-pipelines-e2e-tests-template.yml b/azure-pipelines-e2e-tests-template.yml
index fd23514c5..c7e304ecd 100644
--- a/azure-pipelines-e2e-tests-template.yml
+++ b/azure-pipelines-e2e-tests-template.yml
@@ -20,7 +20,7 @@ stages:
- job: Run_${{ replace(option.pool, ' ', '_') }}
${{ if eq(lower(option.pool), 'windows') }}:
pool:
- vmImage: 'windows-2019'
+ vmImage: 'windows-2022'
${{ else }}:
pool:
${{ if or(eq(variables['System.TeamProject'], 'public'), in(variables['Build.Reason'], 'PullRequest')) }}:
@@ -58,10 +58,10 @@ stages:
mvn -version
- task: UseDotNet@2
- displayName: 'Use .NET Core sdk'
+ displayName: 'Use .NET 6 sdk'
inputs:
packageType: sdk
- version: 3.1.x
+ version: 6.x
installationPath: $(Agent.ToolsDirectory)/dotnet
- task: DownloadBuildArtifacts@0
@@ -71,7 +71,7 @@ stages:
downloadPath: $(Build.ArtifactStagingDirectory)
- pwsh: |
- $framework = "netcoreapp3.1"
+ $framework = "net6.0"
if ($env:AGENT_OS -eq 'Windows_NT') {
$runtimeIdentifier = "win-x64"
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index e31e7b3a6..384ec0779 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -56,7 +56,7 @@ stages:
jobs:
- job: Build
pool:
- vmImage: 'windows-2019'
+ vmImage: 'windows-2022'
variables:
${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
@@ -171,7 +171,7 @@ stages:
- Sign
displayName: Publish Artifacts
pool:
- vmImage: 'windows-2019'
+ vmImage: 'windows-2022'
variables:
${{ if and(ne(variables['System.TeamProject'], 'public'), notin(variables['Build.Reason'], 'PullRequest')) }}:
@@ -210,8 +210,8 @@ stages:
forwardCompatibleRelease: $(forwardCompatibleRelease)
tests:
- version: '2.4.0'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
@@ -222,8 +222,8 @@ stages:
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
- version: '2.4.1'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
@@ -234,8 +234,8 @@ stages:
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
- version: '2.4.3'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
@@ -246,8 +246,8 @@ stages:
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
- version: '2.4.4'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
@@ -258,8 +258,8 @@ stages:
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
- version: '2.4.5'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
@@ -270,8 +270,8 @@ stages:
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
- version: '2.4.6'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
@@ -282,8 +282,8 @@ stages:
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
- version: '2.4.7'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
@@ -294,8 +294,8 @@ stages:
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
- version: '2.4.8'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
@@ -306,8 +306,8 @@ stages:
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_2_4)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_2_4)
- version: '3.0.0'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
@@ -318,8 +318,8 @@ stages:
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3_0)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3_0)
- version: '3.0.1'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
@@ -330,8 +330,8 @@ stages:
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3_0)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3_0)
- version: '3.0.2'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
@@ -342,8 +342,8 @@ stages:
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3_0)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3_0)
- version: '3.1.1'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
@@ -354,8 +354,8 @@ stages:
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3_1)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3_1)
- version: '3.1.2'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
@@ -366,8 +366,8 @@ stages:
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3_1)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3_1)
- version: '3.2.0'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
@@ -378,8 +378,8 @@ stages:
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3_2)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3_2)
- version: '3.2.1'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
@@ -390,8 +390,8 @@ stages:
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3_2)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3_2)
- version: '3.2.2'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
@@ -402,8 +402,8 @@ stages:
backwardCompatibleTestOptions: $(backwardCompatibleTestOptions_Linux_3_2)
forwardCompatibleTestOptions: $(forwardCompatibleTestOptions_Linux_3_2)
- version: '3.2.3'
- enableForwardCompatibleTests: true
- enableBackwardCompatibleTests: true
+ enableForwardCompatibleTests: false
+ enableBackwardCompatibleTests: false
jobOptions:
- pool: 'Windows'
testOptions: ""
diff --git a/benchmark/README.md b/benchmark/README.md
index 8c67bd56d..fb2c99f12 100644
--- a/benchmark/README.md
+++ b/benchmark/README.md
@@ -60,8 +60,7 @@ TPCH timing results is written to stdout in the following form: `TPCH_Result,
```
- **Note**: Ensure that you build the worker and application with .NET Core 3.0 in order to run hardware acceleration queries.
-
+ **Note**: Ensure that you build the worker and application with .NET 6 in order to run hardware acceleration queries.
## Python
1. Upload [run_python_benchmark.sh](run_python_benchmark.sh) and all [python tpch benchmark](python/) files to the cluster.
diff --git a/benchmark/csharp/Tpch/Tpch.csproj b/benchmark/csharp/Tpch/Tpch.csproj
index f2694ca67..53f324200 100644
--- a/benchmark/csharp/Tpch/Tpch.csproj
+++ b/benchmark/csharp/Tpch/Tpch.csproj
@@ -2,8 +2,8 @@
Exe
- net461;netcoreapp3.1
- netcoreapp3.1
+ net461;net6.0
+ net6.0
Tpch
Tpch
@@ -16,7 +16,7 @@
-
+
true
diff --git a/deployment/README.md b/deployment/README.md
index e3b45fbcb..dc87f3ac0 100644
--- a/deployment/README.md
+++ b/deployment/README.md
@@ -63,7 +63,7 @@ Microsoft.Spark.Worker is a backend component that lives on the individual worke
## Azure HDInsight Spark
[Azure HDInsight Spark](https://docs.microsoft.com/en-us/azure/hdinsight/spark/apache-spark-overview) is the Microsoft implementation of Apache Spark in the cloud that allows users to launch and configure Spark clusters in Azure. You can use HDInsight Spark clusters to process your data stored in Azure (e.g., [Azure Storage](https://azure.microsoft.com/en-us/services/storage/) and [Azure Data Lake Storage](https://docs.microsoft.com/en-us/azure/storage/blobs/data-lake-storage-introduction)).
-> **Note:** Azure HDInsight Spark is Linux-based. Therefore, if you are interested in deploying your app to Azure HDInsight Spark, make sure your app is .NET Standard compatible and that you use [.NET Core compiler](https://dotnet.microsoft.com/download) to compile your app.
+> **Note:** Azure HDInsight Spark is Linux-based. Therefore, if you are interested in deploying your app to Azure HDInsight Spark, make sure your app is .NET Standard compatible and that you use [.NET 6 compiler](https://dotnet.microsoft.com/download) to compile your app.
### Deploy Microsoft.Spark.Worker
*Note that this step is required only once*
@@ -115,7 +115,7 @@ EOF
## Amazon EMR Spark
[Amazon EMR](https://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-what-is-emr.html) is a managed cluster platform that simplifies running big data frameworks on AWS.
-> **Note:** AWS EMR Spark is Linux-based. Therefore, if you are interested in deploying your app to AWS EMR Spark, make sure your app is .NET Standard compatible and that you use [.NET Core compiler](https://dotnet.microsoft.com/download) to compile your app.
+> **Note:** AWS EMR Spark is Linux-based. Therefore, if you are interested in deploying your app to AWS EMR Spark, make sure your app is .NET Standard compatible and that you use [.NET 6 compiler](https://dotnet.microsoft.com/download) to compile your app.
### Deploy Microsoft.Spark.Worker
*Note that this step is only required at cluster creation*
@@ -160,7 +160,7 @@ foo@bar:~$ aws emr add-steps \
## Databricks
[Databricks](http://databricks.com) is a platform that provides cloud-based big data processing using Apache Spark.
-> **Note:** [Azure](https://azure.microsoft.com/en-us/services/databricks/) and [AWS](https://databricks.com/aws) Databricks is Linux-based. Therefore, if you are interested in deploying your app to Databricks, make sure your app is .NET Standard compatible and that you use [.NET Core compiler](https://dotnet.microsoft.com/download) to compile your app.
+> **Note:** [Azure](https://azure.microsoft.com/en-us/services/databricks/) and [AWS](https://databricks.com/aws) Databricks is Linux-based. Therefore, if you are interested in deploying your app to Databricks, make sure your app is .NET Standard compatible and that you use [.NET 6 compiler](https://dotnet.microsoft.com/download) to compile your app.
Databricks allows you to submit Spark .NET apps to an existing active cluster or create a new cluster everytime you launch a job. This requires the **Microsoft.Spark.Worker** to be installed **first** before you submit a Spark .NET app.
diff --git a/docs/building/ubuntu-instructions.md b/docs/building/ubuntu-instructions.md
index 3830614ad..dc72ad9ee 100644
--- a/docs/building/ubuntu-instructions.md
+++ b/docs/building/ubuntu-instructions.md
@@ -16,7 +16,7 @@ Building Spark .NET on Ubuntu 18.04
If you already have all the pre-requisites, skip to the [build](ubuntu-instructions.md#building) steps below.
- 1. Download and install **[.NET Core 3.1 SDK](https://dotnet.microsoft.com/download/dotnet-core/3.1)** - installing the SDK will add the `dotnet` toolchain to your path.
+ 1. Download and install **[.NET 6 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/6.0)** - installing the SDK will add the `dotnet` toolchain to your path.
2. Install **[OpenJDK 8](https://openjdk.java.net/install/)**
- You can use the following command:
```bash
@@ -117,26 +117,26 @@ You should see JARs created for the supported Spark versions:
* `microsoft-spark-2-4/target/microsoft-spark-2-4_2.11-.jar`
* `microsoft-spark-3-0/target/microsoft-spark-3-0_2.12-.jar`
-## Building .NET Sample Applications using .NET Core CLI
+## Building .NET Sample Applications using .NET 6 CLI
1. Build the Worker
```bash
cd ~/dotnet.spark/src/csharp/Microsoft.Spark.Worker/
- dotnet publish -f netcoreapp3.1 -r linux-x64
+ dotnet publish -f net6.0 -r linux-x64
```
📙 Click to see sample console output
```bash
- user@machine:/home/user/dotnet.spark/src/csharp/Microsoft.Spark.Worker$ dotnet publish -f netcoreapp3.1 -r linux-x64
+ user@machine:/home/user/dotnet.spark/src/csharp/Microsoft.Spark.Worker$ dotnet publish -f net6.0 -r linux-x64
Microsoft (R) Build Engine version 16.0.462+g62fb89029d for .NET Core
Copyright (C) Microsoft Corporation. All rights reserved.
Restore completed in 36.03 ms for /home/user/dotnet.spark/src/csharp/Microsoft.Spark.Worker/Microsoft.Spark.Worker.csproj.
Restore completed in 35.94 ms for /home/user/dotnet.spark/src/csharp/Microsoft.Spark/Microsoft.Spark.csproj.
Microsoft.Spark -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark/Debug/netstandard2.0/Microsoft.Spark.dll
- Microsoft.Spark.Worker -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/netcoreapp3.1/linux-x64/Microsoft.Spark.Worker.dll
- Microsoft.Spark.Worker -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/netcoreapp3.1/linux-x64/publish/
+ Microsoft.Spark.Worker -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/net6.0/linux-x64/Microsoft.Spark.Worker.dll
+ Microsoft.Spark.Worker -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/net6.0/linux-x64/publish/
```
@@ -144,31 +144,31 @@ You should see JARs created for the supported Spark versions:
2. Build the Samples
```bash
cd ~/dotnet.spark/examples/Microsoft.Spark.CSharp.Examples/
- dotnet publish -f netcoreapp3.1 -r linux-x64
+ dotnet publish -f net6.0 -r linux-x64
```
📙 Click to see sample console output
```bash
- user@machine:/home/user/dotnet.spark/examples/Microsoft.Spark.CSharp.Examples$ dotnet publish -f netcoreapp3.1 -r linux-x64
+ user@machine:/home/user/dotnet.spark/examples/Microsoft.Spark.CSharp.Examples$ dotnet publish -f net6.0 -r linux-x64
Microsoft (R) Build Engine version 16.0.462+g62fb89029d for .NET Core
Copyright (C) Microsoft Corporation. All rights reserved.
Restore completed in 37.11 ms for /home/user/dotnet.spark/src/csharp/Microsoft.Spark/Microsoft.Spark.csproj.
Restore completed in 281.63 ms for /home/user/dotnet.spark/examples/Microsoft.Spark.CSharp.Examples/Microsoft.Spark.CSharp.Examples.csproj.
Microsoft.Spark -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark/Debug/netstandard2.0/Microsoft.Spark.dll
- Microsoft.Spark.CSharp.Examples -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/netcoreapp3.1/linux-x64/Microsoft.Spark.CSharp.Examples.dll
- Microsoft.Spark.CSharp.Examples -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/netcoreapp3.1/linux-x64/publish/
+ Microsoft.Spark.CSharp.Examples -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/net6.0/linux-x64/Microsoft.Spark.CSharp.Examples.dll
+ Microsoft.Spark.CSharp.Examples -> /home/user/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/net6.0/linux-x64/publish/
```
# Run Samples
-Once you build the samples, you can use `spark-submit` to submit your .NET Core apps. Make sure you have followed the [pre-requisites](#pre-requisites) section and installed Apache Spark.
+Once you build the samples, you can use `spark-submit` to submit your .NET 6 apps. Make sure you have followed the [pre-requisites](#pre-requisites) section and installed Apache Spark.
- 1. Set the `DOTNET_WORKER_DIR` or `PATH` environment variable to include the path where the `Microsoft.Spark.Worker` binary has been generated (e.g., `~/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/netcoreapp3.1/linux-x64/publish`)
- 2. Open a terminal and go to the directory where your app binary has been generated (e.g., `~/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/netcoreapp3.1/linux-x64/publish`)
+ 1. Set the `DOTNET_WORKER_DIR` or `PATH` environment variable to include the path where the `Microsoft.Spark.Worker` binary has been generated (e.g., `~/dotnet.spark/artifacts/bin/Microsoft.Spark.Worker/Debug/net6.0/linux-x64/publish`)
+ 2. Open a terminal and go to the directory where your app binary has been generated (e.g., `~/dotnet.spark/artifacts/bin/Microsoft.Spark.CSharp.Examples/Debug/net6.0/linux-x64/publish`)
3. Running your app follows the basic structure:
```bash
spark-submit \
diff --git a/docs/building/windows-instructions.md b/docs/building/windows-instructions.md
index 754a4bf61..4b55a92e0 100644
--- a/docs/building/windows-instructions.md
+++ b/docs/building/windows-instructions.md
@@ -8,7 +8,7 @@ Building Spark .NET on Windows
- [Building Spark .NET Scala Extensions Layer](#building-spark-net-scala-extensions-layer)
- [Building .NET Samples Application](#building-net-samples-application)
- [Using Visual Studio for .NET Framework](#using-visual-studio-for-net-framework)
- - [Using .NET Core CLI for .NET Core](#using-net-core-cli-for-net-core)
+ - [Using .NET CLI for .NET 6](#using-net-cli-for-net-6)
- [Run Samples](#run-samples)
# Open Issues:
@@ -20,12 +20,12 @@ Building Spark .NET on Windows
If you already have all the pre-requisites, skip to the [build](windows-instructions.md#building) steps below.
- 1. Download and install the **[.NET Core 3.1 SDK](https://dotnet.microsoft.com/download/dotnet-core/3.1)** - installing the SDK will add the `dotnet` toolchain to your path.
+ 1. Download and install the **[.NET 6 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/6.0)** - installing the SDK will add the `dotnet` toolchain to your path.
2. Install **[Visual Studio 2019](https://www.visualstudio.com/downloads/)** (Version 16.4 or later). The Community version is completely free. When configuring your installation, include these components at minimum:
* .NET desktop development
* All Required Components
* .NET Framework 4.6.1 Development Tools
- * .NET Core cross-platform development
+ * .NET 6 cross-platform development
* All Required Components
3. Install **[Java 1.8](https://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html)**
- Select the appropriate version for your operating system e.g., jdk-8u201-windows-x64.exe for Win x64 machine.
@@ -90,7 +90,7 @@ git clone https://github.com/dotnet/spark.git c:\github\dotnet-spark
When you submit a .NET application, Spark .NET has the necessary logic written in Scala that inform Apache Spark how to handle your requests (e.g., request to create a new Spark Session, request to transfer data from .NET side to JVM side etc.). This logic can be found in the [Spark .NET Scala Source Code](../../src/scala).
-Regardless of whether you are using .NET Framework or .NET Core, you will need to build the Spark .NET Scala extension layer. This is easy to do:
+Regardless of whether you are using .NET Framework or .NET 6, you will need to build the Spark .NET Scala extension layer. This is easy to do:
```powershell
cd src\scala
@@ -148,59 +148,59 @@ You should see JARs created for the supported Spark versions:
-### Using .NET Core CLI for .NET Core
+### Using .NET CLI for .NET 6
-> Note: We are currently working on automating .NET Core builds for Spark .NET. Until then, we appreciate your patience in performing some of the steps manually.
+> Note: We are currently working on automating .NET 6 builds for Spark .NET. Until then, we appreciate your patience in performing some of the steps manually.
1. Build the Worker
```powershell
cd C:\github\dotnet-spark\src\csharp\Microsoft.Spark.Worker\
- dotnet publish -f netcoreapp3.1 -r win-x64
+ dotnet publish -f net6.0 -r win-x64
```
📙 Click to see sample console output
```powershell
- PS C:\github\dotnet-spark\src\csharp\Microsoft.Spark.Worker> dotnet publish -f netcoreapp3.1 -r win-x64
+ PS C:\github\dotnet-spark\src\csharp\Microsoft.Spark.Worker> dotnet publish -f net6.0 -r win-x64
Microsoft (R) Build Engine version 16.0.462+g62fb89029d for .NET Core
Copyright (C) Microsoft Corporation. All rights reserved.
Restore completed in 299.95 ms for C:\github\dotnet-spark\src\csharp\Microsoft.Spark\Microsoft.Spark.csproj.
Restore completed in 306.62 ms for C:\github\dotnet-spark\src\csharp\Microsoft.Spark.Worker\Microsoft.Spark.Worker.csproj.
Microsoft.Spark -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark\Debug\netstandard2.0\Microsoft.Spark.dll
- Microsoft.Spark.Worker -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\netcoreapp3.1\win-x64\Microsoft.Spark.Worker.dll
- Microsoft.Spark.Worker -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\netcoreapp3.1\win-x64\publish\
+ Microsoft.Spark.Worker -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\net6.0\win-x64\Microsoft.Spark.Worker.dll
+ Microsoft.Spark.Worker -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\net6.0\win-x64\publish\
```
2. Build the Samples
```powershell
cd C:\github\dotnet-spark\examples\Microsoft.Spark.CSharp.Examples\
- dotnet publish -f netcoreapp3.1 -r win-x64
+ dotnet publish -f net6.0 -r win-x64
```
📙 Click to see sample console output
```powershell
- PS C:\github\dotnet-spark\examples\Microsoft.Spark.CSharp.Examples> dotnet publish -f netcoreapp3.1 -r win10-x64
+ PS C:\github\dotnet-spark\examples\Microsoft.Spark.CSharp.Examples> dotnet publish -f net6.0 -r win10-x64
Microsoft (R) Build Engine version 16.0.462+g62fb89029d for .NET Core
Copyright (C) Microsoft Corporation. All rights reserved.
Restore completed in 44.22 ms for C:\github\dotnet-spark\src\csharp\Microsoft.Spark\Microsoft.Spark.csproj.
Restore completed in 336.94 ms for C:\github\dotnet-spark\examples\Microsoft.Spark.CSharp.Examples\Microsoft.Spark.CSharp.Examples.csproj.
Microsoft.Spark -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark\Debug\netstandard2.0\Microsoft.Spark.dll
- Microsoft.Spark.CSharp.Examples -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\netcoreapp3.1\win-x64\Microsoft.Spark.CSharp.Examples.dll
- Microsoft.Spark.CSharp.Examples -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\netcoreapp3.1\win-x64\publish\
+ Microsoft.Spark.CSharp.Examples -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\net6.0\win-x64\Microsoft.Spark.CSharp.Examples.dll
+ Microsoft.Spark.CSharp.Examples -> C:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\net6.0\win-x64\publish\
```
# Run Samples
-Once you build the samples, running them will be through `spark-submit` regardless of whether you are targeting .NET Framework or .NET Core apps. Make sure you have followed the [pre-requisites](#pre-requisites) section and installed Apache Spark.
+Once you build the samples, running them will be through `spark-submit` regardless of whether you are targeting .NET Framework or .NET 6 apps. Make sure you have followed the [pre-requisites](#pre-requisites) section and installed Apache Spark.
- 1. Set the `DOTNET_WORKER_DIR` or `PATH` environment variable to include the path where the `Microsoft.Spark.Worker` binary has been generated (e.g., `c:\github\dotnet\spark\artifacts\bin\Microsoft.Spark.Worker\Debug\net461` for .NET Framework, `c:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\netcoreapp3.1\win-x64\publish` for .NET Core)
- 2. Open Powershell and go to the directory where your app binary has been generated (e.g., `c:\github\dotnet\spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\net461` for .NET Framework, `c:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\netcoreapp3.1\win1-x64\publish` for .NET Core)
+ 1. Set the `DOTNET_WORKER_DIR` or `PATH` environment variable to include the path where the `Microsoft.Spark.Worker` binary has been generated (e.g., `c:\github\dotnet\spark\artifacts\bin\Microsoft.Spark.Worker\Debug\net461` for .NET Framework, `c:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.Worker\Debug\net6.0\win-x64\publish` for .NET 6)
+ 2. Open Powershell and go to the directory where your app binary has been generated (e.g., `c:\github\dotnet\spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\net461` for .NET Framework, `c:\github\dotnet-spark\artifacts\bin\Microsoft.Spark.CSharp.Examples\Debug\net6.0\win1-x64\publish` for .NET 6)
3. Running your app follows the basic structure:
```powershell
spark-submit.cmd `
diff --git a/docs/getting-started/macos-instructions.md b/docs/getting-started/macos-instructions.md
index 476577ee0..3a7c55d5e 100644
--- a/docs/getting-started/macos-instructions.md
+++ b/docs/getting-started/macos-instructions.md
@@ -1,10 +1,10 @@
# Getting Started with Spark .NET on MacOS
-These instructions will show you how to run a .NET for Apache Spark app using .NET Core on MacOSX.
+These instructions will show you how to run a .NET for Apache Spark app using .NET 6 on MacOSX.
## Pre-requisites
-- Download and install **[.NET Core 2.1 SDK](https://dotnet.microsoft.com/download/dotnet-core/2.1)**
+- Download and install **[.NET 6 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/6.0)**
- Install **[Java 8](https://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html)**
- Select the appropriate version for your operating system e.g., `jdk-8u231-macosx-x64.dmg`.
- Install using the installer and verify you are able to run `java` from your command-line
diff --git a/docs/getting-started/ubuntu-instructions.md b/docs/getting-started/ubuntu-instructions.md
index 8f5b2fd6b..36c048177 100644
--- a/docs/getting-started/ubuntu-instructions.md
+++ b/docs/getting-started/ubuntu-instructions.md
@@ -1,10 +1,10 @@
# Getting Started with Spark.NET on Ubuntu
-These instructions will show you how to run a .NET for Apache Spark app using .NET Core on Ubuntu 18.04.
+These instructions will show you how to run a .NET for Apache Spark app using .NET 6 on Ubuntu 18.04.
## Pre-requisites
-- Download and install the following: **[.NET Core 3.1 SDK](https://dotnet.microsoft.com/download/dotnet-core/3.1)** | **[OpenJDK 8](https://openjdk.java.net/install/)** | **[Apache Spark 2.4.1](https://archive.apache.org/dist/spark/spark-2.4.1/spark-2.4.1-bin-hadoop2.7.tgz)**
+- Download and install the following: **[.NET 6 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/6.0)** | **[OpenJDK 8](https://openjdk.java.net/install/)** | **[Apache Spark 2.4.1](https://archive.apache.org/dist/spark/spark-2.4.1/spark-2.4.1-bin-hadoop2.7.tgz)**
- Download and install **[Microsoft.Spark.Worker](https://github.com/dotnet/spark/releases)** release:
- Select a **[Microsoft.Spark.Worker](https://github.com/dotnet/spark/releases)** release from .NET for Apache Spark GitHub Releases page and download into your local machine (e.g., `~/bin/Microsoft.Spark.Worker`).
- **IMPORTANT** Create a [new environment variable](https://help.ubuntu.com/community/EnvironmentVariables) `DOTNET_WORKER_DIR` and set it to the directory where you downloaded and extracted the Microsoft.Spark.Worker (e.g., `~/bin/Microsoft.Spark.Worker`).
diff --git a/docs/getting-started/windows-instructions.md b/docs/getting-started/windows-instructions.md
index 7b45987a9..b5e2c8713 100644
--- a/docs/getting-started/windows-instructions.md
+++ b/docs/getting-started/windows-instructions.md
@@ -1,10 +1,10 @@
# Getting Started with Spark .NET on Windows
-These instructions will show you how to run a .NET for Apache Spark app using .NET Core on Windows.
+These instructions will show you how to run a .NET for Apache Spark app using .NET 6 on Windows.
## Pre-requisites
-- Download and install the following: **[.NET Core 3.1 SDK](https://dotnet.microsoft.com/download/dotnet-core/3.1)** | **[Visual Studio 2019](https://www.visualstudio.com/downloads/)** | **[Java 1.8](https://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html)** | **[Apache Spark 2.4.1](https://archive.apache.org/dist/spark/spark-2.4.1/spark-2.4.1-bin-hadoop2.7.tgz)**
+- Download and install the following: **[.NET 6 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/6.0)** | **[Visual Studio 2019](https://www.visualstudio.com/downloads/)** | **[Java 1.8](https://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html)** | **[Apache Spark 2.4.1](https://archive.apache.org/dist/spark/spark-2.4.1/spark-2.4.1-bin-hadoop2.7.tgz)**
- Download and install **[Microsoft.Spark.Worker](https://github.com/dotnet/spark/releases)** release:
- Select a **[Microsoft.Spark.Worker](https://github.com/dotnet/spark/releases)** release from .NET for Apache Spark GitHub Releases page and download into your local machine (e.g., `c:\bin\Microsoft.Spark.Worker\`).
- **IMPORTANT** Create a [new environment variable](https://www.java.com/en/download/help/path.xml) `DOTNET_WORKER_DIR` and set it to the directory where you downloaded and extracted the Microsoft.Spark.Worker (e.g., `c:\bin\Microsoft.Spark.Worker`).
@@ -12,7 +12,7 @@ These instructions will show you how to run a .NET for Apache Spark app using .N
For detailed instructions, you can see [Building .NET for Apache Spark from Source on Windows](../building/windows-instructions.md).
## Authoring a .NET for Apache Spark App
-- Open Visual Studio -> Create New Project -> Console App (.NET Core) -> Name: `HelloSpark`
+- Open Visual Studio -> Create New Project -> Console App -> Name: `HelloSpark`
- Install `Microsoft.Spark` Nuget package into the solution from the [spark nuget.org feed](https://www.nuget.org/profiles/spark) - see [Ways to install Nuget Package](https://docs.microsoft.com/en-us/nuget/consume-packages/ways-to-install-a-package)
- Write the following code into `Program.cs`:
```csharp
diff --git a/docs/migration-guide.md b/docs/migration-guide.md
index 85208d31e..daf13571e 100644
--- a/docs/migration-guide.md
+++ b/docs/migration-guide.md
@@ -2,7 +2,7 @@
- [Upgrading from Microsoft.Spark 0.x to 1.0](#upgrading-from-microsoftspark-0x-to-10)
## Upgrading from Microsoft.Spark 0.x to 1.0
-- Limited support for [.NET Framework](https://dotnet.microsoft.com/learn/dotnet/what-is-dotnet-framework). Please migrate to [.NET Core >= 3.1](https://dotnet.microsoft.com/download/dotnet-core) instead.
+- Limited support for [.NET Framework](https://dotnet.microsoft.com/learn/dotnet/what-is-dotnet-framework). Please migrate to **[.NET 6 SDK](https://dotnet.microsoft.com/en-us/download/dotnet/6.0)** instead.
- `Microsoft.Spark.Sql.Streaming.DataStreamWriter.Foreach` does not work in .NET Framework ([#576](https://github.com/dotnet/spark/issues/576))
- `Microsoft.Spark.Worker` should be upgraded to 1.0 as `Microsoft.Spark.Worker` 0.x is not forward-compatible with `Microsoft.Spark` 1.0.
- `Microsoft.Spark` should be upgraded to 1.0 as `Microsoft.Spark.Worker` 1.0 is not backward-compatible with `Microsoft.Spark` 0.x.
diff --git a/docs/udf-guide.md b/docs/udf-guide.md
index 6a2905bf4..5eb0a24fc 100644
--- a/docs/udf-guide.md
+++ b/docs/udf-guide.md
@@ -53,7 +53,7 @@ Since UDFs are functions that need to be executed on the workers, they have to b
## Good to know while implementing UDFs
-One behavior to be aware of while implementing UDFs in .NET for Apache Spark is how the target of the UDF gets serialized. .NET for Apache Spark uses .NET Core, which does not support serializing delegates, so it is instead done by using reflection to serialize the target where the delegate is defined. When multiple delegates are defined in a common scope, they have a shared closure that becomes the target of reflection for serialization. Let's take an example to illustrate what that means.
+One behavior to be aware of while implementing UDFs in .NET for Apache Spark is how the target of the UDF gets serialized. .NET for Apache Spark uses .NET 6, which does not support serializing delegates, so it is instead done by using reflection to serialize the target where the delegate is defined. When multiple delegates are defined in a common scope, they have a shared closure that becomes the target of reflection for serialization. Let's take an example to illustrate what that means.
The following code snippet defines two string variables that are being referenced in two function delegates that return the respective strings as result:
diff --git a/eng/AfterSolutionBuild.targets b/eng/AfterSolutionBuild.targets
index d9dfa692c..961a75000 100644
--- a/eng/AfterSolutionBuild.targets
+++ b/eng/AfterSolutionBuild.targets
@@ -6,9 +6,9 @@
<_PublishTarget Include="FullFramework" Framework="net461" RuntimeId="win-x64" />
- <_PublishTarget Include="WindowsCore" Framework="netcoreapp3.1" RuntimeId="win-x64" />
- <_PublishTarget Include="LinuxCore" Framework="netcoreapp3.1" RuntimeId="linux-x64" />
- <_PublishTarget Include="MacOSCore" Framework="netcoreapp3.1" RuntimeId="osx-x64" />
+ <_PublishTarget Include="WindowsCore" Framework="net6.0" RuntimeId="win-x64" />
+ <_PublishTarget Include="LinuxCore" Framework="net6.0" RuntimeId="linux-x64" />
+ <_PublishTarget Include="MacOSCore" Framework="net6.0" RuntimeId="osx-x64" />
diff --git a/examples/Microsoft.Spark.CSharp.Examples/Microsoft.Spark.CSharp.Examples.csproj b/examples/Microsoft.Spark.CSharp.Examples/Microsoft.Spark.CSharp.Examples.csproj
index 5f54d77a1..93be3acae 100644
--- a/examples/Microsoft.Spark.CSharp.Examples/Microsoft.Spark.CSharp.Examples.csproj
+++ b/examples/Microsoft.Spark.CSharp.Examples/Microsoft.Spark.CSharp.Examples.csproj
@@ -2,8 +2,8 @@
Exe
- net461;netcoreapp3.1
- netcoreapp3.1
+ net461;net6.0
+ net6.0
Microsoft.Spark.Examples
Microsoft.Spark.CSharp.Examples
diff --git a/examples/Microsoft.Spark.FSharp.Examples/Microsoft.Spark.FSharp.Examples.fsproj b/examples/Microsoft.Spark.FSharp.Examples/Microsoft.Spark.FSharp.Examples.fsproj
index 85ea23e1c..d1d577681 100644
--- a/examples/Microsoft.Spark.FSharp.Examples/Microsoft.Spark.FSharp.Examples.fsproj
+++ b/examples/Microsoft.Spark.FSharp.Examples/Microsoft.Spark.FSharp.Examples.fsproj
@@ -2,8 +2,8 @@
Exe
- net461;netcoreapp3.1
- netcoreapp3.1
+ net461;net6.0
+ net6.0
Microsoft.Spark.Examples
Microsoft.Spark.FSharp.Examples
false
diff --git a/global.json b/global.json
index 544ff1aa5..8f9d74633 100644
--- a/global.json
+++ b/global.json
@@ -1,6 +1,6 @@
{
"tools": {
- "dotnet": "3.1.101"
+ "dotnet": "6.0.400"
},
"msbuild-sdks": {
"Microsoft.DotNet.Arcade.Sdk": "1.0.0-beta.20230.5"
diff --git a/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/Microsoft.Spark.Extensions.Delta.E2ETest.csproj b/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/Microsoft.Spark.Extensions.Delta.E2ETest.csproj
index 6b742dacc..393813db4 100644
--- a/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/Microsoft.Spark.Extensions.Delta.E2ETest.csproj
+++ b/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/Microsoft.Spark.Extensions.Delta.E2ETest.csproj
@@ -1,9 +1,7 @@
-
- netcoreapp3.1
+ net6.0
-
diff --git a/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest/Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest.csproj b/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest/Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest.csproj
index c2918d07d..bd592374e 100644
--- a/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest/Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest.csproj
+++ b/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest/Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest.csproj
@@ -1,7 +1,7 @@
- netcoreapp3.1
+ net6.0
Microsoft.Spark.Extensions.DotNet.Interactive.UnitTest
diff --git a/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive/Microsoft.Spark.Extensions.DotNet.Interactive.csproj b/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive/Microsoft.Spark.Extensions.DotNet.Interactive.csproj
index 59b4a9bee..c6aba59c4 100644
--- a/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive/Microsoft.Spark.Extensions.DotNet.Interactive.csproj
+++ b/src/csharp/Extensions/Microsoft.Spark.Extensions.DotNet.Interactive/Microsoft.Spark.Extensions.DotNet.Interactive.csproj
@@ -2,7 +2,7 @@
Library
- netcoreapp3.1
+ net6.0
Microsoft.Spark.Extensions.DotNet.Interactive
true
true
diff --git a/src/csharp/Extensions/Microsoft.Spark.Extensions.Hyperspace.E2ETest/Microsoft.Spark.Extensions.Hyperspace.E2ETest.csproj b/src/csharp/Extensions/Microsoft.Spark.Extensions.Hyperspace.E2ETest/Microsoft.Spark.Extensions.Hyperspace.E2ETest.csproj
index 2417226b8..7de956704 100644
--- a/src/csharp/Extensions/Microsoft.Spark.Extensions.Hyperspace.E2ETest/Microsoft.Spark.Extensions.Hyperspace.E2ETest.csproj
+++ b/src/csharp/Extensions/Microsoft.Spark.Extensions.Hyperspace.E2ETest/Microsoft.Spark.Extensions.Hyperspace.E2ETest.csproj
@@ -1,9 +1,7 @@
-
- netcoreapp3.1
+ net6.0
-
diff --git a/src/csharp/Microsoft.Spark.E2ETest/Microsoft.Spark.E2ETest.csproj b/src/csharp/Microsoft.Spark.E2ETest/Microsoft.Spark.E2ETest.csproj
index 05997c88c..b9e2bedd8 100644
--- a/src/csharp/Microsoft.Spark.E2ETest/Microsoft.Spark.E2ETest.csproj
+++ b/src/csharp/Microsoft.Spark.E2ETest/Microsoft.Spark.E2ETest.csproj
@@ -1,29 +1,23 @@
-
- netcoreapp3.1
+ net6.0
-
PreserveNewest
-
-
-
-
-
+
\ No newline at end of file
diff --git a/src/csharp/Microsoft.Spark.UnitTest/Microsoft.Spark.UnitTest.csproj b/src/csharp/Microsoft.Spark.UnitTest/Microsoft.Spark.UnitTest.csproj
index c65b6be69..d863334da 100644
--- a/src/csharp/Microsoft.Spark.UnitTest/Microsoft.Spark.UnitTest.csproj
+++ b/src/csharp/Microsoft.Spark.UnitTest/Microsoft.Spark.UnitTest.csproj
@@ -1,7 +1,7 @@
- netcoreapp3.1
+ net6.0
Microsoft.Spark.UnitTest
diff --git a/src/csharp/Microsoft.Spark.UnitTest/UdfSerDeTests.cs b/src/csharp/Microsoft.Spark.UnitTest/UdfSerDeTests.cs
index bf4ef29f4..b655fdf1b 100644
--- a/src/csharp/Microsoft.Spark.UnitTest/UdfSerDeTests.cs
+++ b/src/csharp/Microsoft.Spark.UnitTest/UdfSerDeTests.cs
@@ -149,6 +149,8 @@ private Delegate SerDe(Delegate udf)
return Deserialize(Serialize(udf));
}
+#pragma warning disable SYSLIB0011 // Type or member is obsolete
+ // TODO: Replace BinaryFormatter with a new, secure serializer.
private byte[] Serialize(Delegate udf)
{
UdfSerDe.UdfData udfData = UdfSerDe.Serialize(udf);
@@ -170,5 +172,6 @@ private Delegate Deserialize(byte[] serializedUdf)
return UdfSerDe.Deserialize(udfData);
}
}
+#pragma warning restore
}
}
diff --git a/src/csharp/Microsoft.Spark.Worker.UnitTest/CommandExecutorTests.cs b/src/csharp/Microsoft.Spark.Worker.UnitTest/CommandExecutorTests.cs
index d3ef6175d..7e8887975 100644
--- a/src/csharp/Microsoft.Spark.Worker.UnitTest/CommandExecutorTests.cs
+++ b/src/csharp/Microsoft.Spark.Worker.UnitTest/CommandExecutorTests.cs
@@ -1059,7 +1059,10 @@ public void TestRDDCommandExecutor(Version sparkVersion, IpcOptions ipcOptions)
foreach (int input in inputs)
{
memoryStream.Position = 0;
+#pragma warning disable SYSLIB0011 // Type or member is obsolete
+ // TODO: Replace BinaryFormatter with a new, secure serializer.
formatter.Serialize(memoryStream, input);
+#pragma warning restore SYSLIB0011 // Type or member is obsolete
values.Add(memoryStream.ToArray());
}
@@ -1089,9 +1092,12 @@ public void TestRDDCommandExecutor(Version sparkVersion, IpcOptions ipcOptions)
for (int i = 0; i < inputs.Length; ++i)
{
Assert.True(SerDe.ReadInt32(outputStream) > 0);
+#pragma warning disable SYSLIB0011 // Type or member is obsolete
+ // TODO: Replace BinaryFormatter with a new, secure serializer.
Assert.Equal(
mapUdf(i),
formatter.Deserialize(outputStream));
+#pragma warning restore SYSLIB0011 // Type or member is obsolete
}
// Validate all the data on the stream is read.
diff --git a/src/csharp/Microsoft.Spark.Worker.UnitTest/Microsoft.Spark.Worker.UnitTest.csproj b/src/csharp/Microsoft.Spark.Worker.UnitTest/Microsoft.Spark.Worker.UnitTest.csproj
index 1371d5d1b..9e06e5ca8 100644
--- a/src/csharp/Microsoft.Spark.Worker.UnitTest/Microsoft.Spark.Worker.UnitTest.csproj
+++ b/src/csharp/Microsoft.Spark.Worker.UnitTest/Microsoft.Spark.Worker.UnitTest.csproj
@@ -1,22 +1,17 @@
-
- netcoreapp3.1
+ net6.0
-
-
-
-
-
+
\ No newline at end of file
diff --git a/src/csharp/Microsoft.Spark.Worker/Command/RDDCommandExecutor.cs b/src/csharp/Microsoft.Spark.Worker/Command/RDDCommandExecutor.cs
index 3777fd56c..830903ea9 100644
--- a/src/csharp/Microsoft.Spark.Worker/Command/RDDCommandExecutor.cs
+++ b/src/csharp/Microsoft.Spark.Worker/Command/RDDCommandExecutor.cs
@@ -112,7 +112,10 @@ private void Serialize(
{
case CommandSerDe.SerializedMode.Byte:
BinaryFormatter formatter = s_binaryFormatter ??= new BinaryFormatter();
+#pragma warning disable SYSLIB0011 // Type or member is obsolete
+ // TODO: Replace BinaryFormatter with a new, secure serializer.
formatter.Serialize(stream, message);
+#pragma warning restore SYSLIB0011 // Type or member is obsolete
break;
case CommandSerDe.SerializedMode.None:
case CommandSerDe.SerializedMode.String:
diff --git a/src/csharp/Microsoft.Spark.Worker/Microsoft.Spark.Worker.csproj b/src/csharp/Microsoft.Spark.Worker/Microsoft.Spark.Worker.csproj
index edb479b23..cd5e6d0eb 100644
--- a/src/csharp/Microsoft.Spark.Worker/Microsoft.Spark.Worker.csproj
+++ b/src/csharp/Microsoft.Spark.Worker/Microsoft.Spark.Worker.csproj
@@ -1,28 +1,19 @@
-
Exe
- net461;netcoreapp3.1
- netcoreapp3.1
+ net461;net6.0
+ net6.0
Microsoft.Spark.Worker
true
-
-
-
-
-
-
-
-
-
+
\ No newline at end of file
diff --git a/src/csharp/Microsoft.Spark.Worker/Processor/BroadcastVariableProcessor.cs b/src/csharp/Microsoft.Spark.Worker/Processor/BroadcastVariableProcessor.cs
index cb1fa5f4a..353358e44 100644
--- a/src/csharp/Microsoft.Spark.Worker/Processor/BroadcastVariableProcessor.cs
+++ b/src/csharp/Microsoft.Spark.Worker/Processor/BroadcastVariableProcessor.cs
@@ -62,7 +62,10 @@ internal BroadcastVariables Process(Stream stream)
$"server {readBid} is different from the Broadcast Id received " +
$"from the payload {bid}.");
}
+#pragma warning disable SYSLIB0011 // Type or member is obsolete
+ // TODO: Replace BinaryFormatter with a new, secure serializer.
object value = formatter.Deserialize(socket.InputStream);
+#pragma warning restore SYSLIB0011 // Type or member is obsolete
BroadcastRegistry.Add(bid, value);
}
else
@@ -70,7 +73,10 @@ internal BroadcastVariables Process(Stream stream)
string path = SerDe.ReadString(stream);
using FileStream fStream =
File.Open(path, FileMode.Open, FileAccess.Read, FileShare.Read);
+#pragma warning disable SYSLIB0011 // Type or member is obsolete
+ // TODO: Replace BinaryFormatter with a new, secure serializer.
object value = formatter.Deserialize(fStream);
+#pragma warning restore SYSLIB0011 // Type or member is obsolete
BroadcastRegistry.Add(bid, value);
}
}