Skip to content

Commit

Permalink
SPARK-569 Generation of SSL ceritificates for spark UI (apache#535)
Browse files Browse the repository at this point in the history
  • Loading branch information
rsotn-mapr authored and ekrivokonmapr committed Sep 19, 2019
1 parent 396963e commit 37905dc
Show file tree
Hide file tree
Showing 3 changed files with 339 additions and 3 deletions.
189 changes: 189 additions & 0 deletions bin/manageSSLKeys.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,189 @@
#!/bin/bash
# Copyright (c) 2019 & onwards. MapR Tech, Inc., All rights reserved

#This script creates the default key and trust stores used by MapR spark UI for
#various HTTPS connections. These are self signed.

CURRENT_USER=$(id -u -n)
LOG_BASE_DIR=/tmp/$CURRENT_USER
mkdir -p $LOG_BASE_DIR

exec 2> $LOG_BASE_DIR/spark-ui-mngssl.err
set -x


INSTALL_DIR=$HOME/spark/security_keys
MAPRFS_DIR=/apps/spark/$CURRENT_USER/security_keys

sslKeyStore=${INSTALL_DIR}/ssl_keystore
sslKeyStoreP12=${INSTALL_DIR}/ssl_keystore.p12
sslKeyStorePEM=${INSTALL_DIR}/ssl_keystore.pem
sslTrustStore=${INSTALL_DIR}/ssl_truststore
sslTrustStoreP12=${INSTALL_DIR}/ssl_truststore.p12
sslTrustStorePEM=${INSTALL_DIR}/ssl_truststore.pem
storePass=mapr123
storeFormat=JKS
storeFormatPKCS12=pkcs12
expireInDays="36500"
convertingKeystore=false
noPem=0
srcType=JKS
dstType=pkcs12
#VERBOSE="-v"
clusterConf=${MAPR_HOME:=/opt/mapr}/conf/mapr-clusters.conf

. ${MAPR_HOME:=/opt/mapr}/server/scripts-common.sh
if [ "$JAVA_HOME"x = "x" ]; then
KEYTOOL=`which keytool`
else
KEYTOOL=$JAVA_HOME/bin/keytool
fi

# Check if keytool is actually valid and exists
if [ ! -e "${KEYTOOL:-}" ]; then
echo "The keytool in \"${KEYTOOL}\" does not exist."
echo "Keytool not found or JAVA_HOME not set properly. Please install keytool or set JAVA_HOME properly."
exit 1
fi

CLUSTERCONF_FIRST_LINE=`head -n 1 $clusterConf`
ARR=($CLUSTERCONF_FIRST_LINE)
CLUSTERNAME=${ARR[0]}

#discover DNS domain for host unless provided on command line and use
#in certificate DN
DOMAINNAME=`hostname -d`
if [ "$DOMAINNAME"x = "x" ]; then
CERTNAME=`cat ${MAPR_HOME:=/opt/mapr}/hostname`
else
CERTNAME="*."$DOMAINNAME
fi


function confirmNotThere() {
if [ -f ${sslKeyStore} ]; then
exit 0
fi
}

function createDirs() {
mkdir -p ${INSTALL_DIR}
hadoop fs -mkdir -p ${MAPRFS_DIR}
}

function copyToMfs() {
hadoop fs -copyFromLocal ${INSTALL_DIR}/* ${MAPRFS_DIR}
}

function createCertificates() {
#create self signed certificate with private key

echo "Creating 100 year self signed certificate with subjectDN='CN=$CERTNAME'"
$KEYTOOL -genkeypair -sigalg SHA512withRSA -keyalg RSA -alias $CLUSTERNAME -dname CN=$CERTNAME -validity $expireInDays \
-storepass $storePass -keypass $storePass \
-keystore $sslKeyStore -storetype $storeFormat $VERBOSE
if [ $? -ne 0 ]; then
echo "Keytool command to generate key store failed"
fi

#extract self signed certificate into trust store
tfile=/tmp/tmpfile-mapcert.$$
/bin/rm -f $tfile
$KEYTOOL -exportcert -keystore $sslKeyStore -file $tfile \
-alias $CLUSTERNAME -storepass $storePass -storetype $storeFormat $VERBOSE
if [ $? -ne 0 ]; then
echo "Keytool command to extract certificate from key store failed"
fi
$KEYTOOL -importcert -keystore $sslTrustStore -file $tfile \
-alias $CLUSTERNAME -storepass $storePass -noprompt $VERBOSE
if [ $? -ne 0 ]; then
echo "Keytool command to create trust store failed"
fi
# create PEM version
convertToPem $sslKeyStore $sslKeyStorePEM true true
convertToPem $sslTrustStore $sslTrustStorePEM true
/bin/rm -f $tfile
}

function setPermissions() {
#set permissions on key and trust store
MAPR_UG="$CURRENT_USER:$CURRENT_USER"

chown $MAPR_UG ${INSTALL_DIR}/*
chmod 400 ${INSTALL_DIR}/ssl_keystore*
chmod 444 ${INSTALL_DIR}/ssl_truststore*

hadoop fs -chown $MAPR_UG ${MAPRFS_DIR}/*
hadoop fs -chmod 400 ${MAPRFS_DIR}/ssl_keystore*
hadoop fs -chmod 444 ${MAPRFS_DIR}/ssl_truststore*
}

function convertToPem() {
from=$1
to=$2
silent=$3
useKSPwForPem=$4
passwd=$5
base_from=$(basename $from)
base_to=$(basename $to)
base_from_without_ext="$(echo $base_from | cut -d'.' -f1)"
base_to_without_ext="$(echo $base_to | cut -d'.' -f1)"
dir_from=$(dirname $from)
dir_to=$(dirname $to)
passout=""
fExt=""
case "$dstType" in
JKS) fExt="jks";;
pkcs12) fExt="p12";;
esac
if [ "$useKSPwForPem" == "true" ]; then
passout="-passout pass:$storePass"
elif [ -n "$passwd" ]; then
passout="-passout pass:$passwd"
fi
destSSLStore="$dir_to/${base_to_without_ext}.$fExt"
if [ -z "$silent" ]; then
echo "Converting certificates from $from into $to"
if [ ! -f "$from" ]; then
echo "Source trust store not found: $from"
exit 1
fi
if [ -f "$to" ]; then
echo "Destination trust store already exists: $to"
exit 1
fi
if [ "$to" == "$from" ]; then
echo "Source trust store cannot be the same as Destination trust store"
exit 1
fi
fi
$KEYTOOL -importkeystore -srckeystore $from -destkeystore $destSSLStore \
-srcstorepass $storePass -deststorepass $storePass -srcalias $CLUSTERNAME \
-srcstoretype $srcType -deststoretype $dstType -noprompt $VERBOSE
if [ $? -ne 0 ]; then
echo "Keytool command to create $dstType trust/key store failed"
[ -z "$slient" ] && exit 1
fi
if [ "$dstType" = "pkcs12" ] && [ "$noPem" -eq 0 ]; then
if [ "$to" = "$destSSLStore" ]; then
# someone is converting to pkcs12 and using p12 as the extension
# on the "to" file without realizing we will generating a pem file
# too - fix it
to="${to/.p12/}.pem"
fi
openssl $storeFormatPKCS12 -in $destSSLStore -out $to -passin pass:$storePass $passout
if [ $? -ne 0 ]; then
echo "openssl command to create PEM trust store failed"
fi
fi
}

################
# main #
################
createDirs
confirmNotThere
createCertificates
copyToMfs
setPermissions

150 changes: 148 additions & 2 deletions core/src/main/scala/org/apache/spark/SecurityManager.scala
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,26 @@ import java.nio.charset.StandardCharsets.UTF_8

import org.apache.hadoop.io.Text
import org.apache.hadoop.security.{Credentials, UserGroupInformation}

import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.deploy.{SparkCuratorUtil, SparkHadoopUtil}
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config._
import org.apache.spark.launcher.SparkLauncher
import org.apache.spark.network.sasl.SecretKeyHolder
import org.apache.spark.util.Utils

import sys.process._
import java.io.File
import java.nio.file.{Files, Paths}
import java.util.Comparator

import com.mapr.fs.MapRFileSystem
import org.apache.curator.framework.CuratorFramework
import org.apache.curator.framework.api.CuratorWatcher
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import scala.collection.JavaConverters._
import org.apache.zookeeper.WatchedEvent

/**
* Spark class responsible for security.
*
Expand Down Expand Up @@ -123,6 +135,140 @@ private[spark] class SecurityManager(
opts
}

/**
* Generates ssl ceritificates for Spark Web UI if Ssl is enabled and
* certificates are not specified by the user. Otherwise returns
* sslOptions without any changes.
*/
def genSslCertsForWebUIifNeeded(sslOptions: SSLOptions): SSLOptions = {
if (sslOptions.enabled && sslOptions.keyStore.isEmpty) {
val currentUserHomeDir = System.getProperty("user.home")
val localBaseDir = s"$currentUserHomeDir/spark/security_keys"
val sslKeyStore = s"$localBaseDir/ssl_keystore"
val sslKeyStorePass = "mapr123"
val updatedSslOptions = updateSslOptsWithNewKeystore(sslOptions, sslKeyStore, sslKeyStorePass)

if (!Files.exists(Paths.get(sslKeyStore))){
copyFromMfsOrGenSslCertsForWebUI(localBaseDir)
}
updatedSslOptions
} else {
sslOptions
}
}

def copyFromMfsOrGenSslCertsForWebUI(localBaseDir : String){
//////////////////// Zookeeper lock utils /////////////////////
val mfs = FileSystem.get(new Configuration()).asInstanceOf[MapRFileSystem]
val zkUrl = mfs.getZkConnectString
val sslZkConfProperty = "tmp.spark.ssl.zookeeper.url"
val zkPath = "/spark/web-ui-locks"
val zkLock = s"$zkPath/lock-"

sparkConf.set(sslZkConfProperty, zkUrl)
val zk: CuratorFramework = SparkCuratorUtil.newClient(sparkConf, sslZkConfProperty)
sparkConf.remove(sslZkConfProperty)

if (zk.checkExists().forPath(zkPath) == null) {
zk.create().creatingParentsIfNeeded().forPath(zkPath)
}

def aquireLock(): String = {
val lockPath = zk.create().withProtectedEphemeralSequential().forPath(zkLock);
val lock = new Object()
lock.synchronized {
while (true) {
val nodes = zk.getChildren().usingWatcher(new CuratorWatcher {
override def process(watchedEvent: WatchedEvent): Unit = {
lock.synchronized {
lock.notifyAll()
}
}
}).forPath(zkPath)
val sortedNodes = nodes.asScala.sorted
if (lockPath.endsWith(nodes.get(0))) {
return lockPath
} else {
lock.wait()
}
}
}
lockPath
}

def releaseLock(lockPath : String): Unit = {
zk.delete().forPath(lockPath)
}
/////////////////////End of Zookeeper lock utils //////////////////////

val username = UserGroupInformation.getCurrentUser.getShortUserName
val mfsBaseDir = s"/apps/spark/$username/security_keys/"
val mfsKeyStore = s"$mfsBaseDir/ssl_keystore"
val fs = FileSystem.get(hadoopConf)

val f = new File(localBaseDir)
if (!f.exists()) {
f.mkdirs()
}


if (fs.exists(new Path(mfsKeyStore))) {
val files = fs.listFiles(new Path(mfsBaseDir), false)
files.next().getPath
while(files.hasNext) {
val f = files.next()
fs.copyToLocalFile(f.getPath, new Path(localBaseDir))
}
} else {
val lockPath = aquireLock()
if (! fs.exists(new Path(mfsKeyStore))) {
genSslCertsForWebUI()
}
releaseLock(lockPath)
}
}

private def readSparkVersion(versionFile: String): String = {
val version =
try scala.io.Source.fromFile(versionFile).mkString.trim catch {
case e: java.io.FileNotFoundException =>
throw new Exception(s"Failed to generate SSL certificates for spark WebUI: "
+ e.getLocalizedMessage())
}
version
}
private def updateSslOptsWithNewKeystore(sslOptions: SSLOptions,
sslKeyStore: String,
sslKeyStorePass: String): SSLOptions = {
new SSLOptions(
sslOptions.enabled,
sslOptions.port,
Some(new File(sslKeyStore)),
Some(sslKeyStorePass),
sslOptions.keyPassword,
sslOptions.keyStoreType,
sslOptions.needClientAuth,
sslOptions.trustStore,
sslOptions.trustStorePassword,
sslOptions.trustStoreType,
sslOptions.protocol,
sslOptions.enabledAlgorithms)
}

private def genSslCertsForWebUI() {
val maprHomeEnv = System.getenv("MAPR_HOME")
val maprHome = if (maprHomeEnv == null || maprHomeEnv.isEmpty) "/opt/mapr" else maprHomeEnv
val sparkBase = s"$maprHome/spark"
val sparkVersion = readSparkVersion(s"$sparkBase/sparkversion")
val sparkHome = s"$sparkBase/spark-$sparkVersion"
val manageSslKeysScript = s"$sparkHome/bin/manageSSLKeys.sh"

val res = manageSslKeysScript !;
if (res != 0) {
throw new Exception(s"Failed to generate SSL certificates for spark WebUI")
}
}

/**
* Split a comma separated String, filter out any empty items, and return a Set of strings
*/
Expand Down
3 changes: 2 additions & 1 deletion core/src/main/scala/org/apache/spark/ui/WebUI.scala
Original file line number Diff line number Diff line change
Expand Up @@ -43,13 +43,14 @@ import org.apache.spark.util.Utils
*/
private[spark] abstract class WebUI(
val securityManager: SecurityManager,
val sslOptions: SSLOptions,
val sslOpts: SSLOptions,
port: Int,
conf: SparkConf,
basePath: String = "",
name: String = "")
extends Logging {

protected val sslOptions: SSLOptions = securityManager.genSslCertsForWebUIifNeeded(sslOpts)
protected val tabs = ArrayBuffer[WebUITab]()
protected val handlers = ArrayBuffer[ServletContextHandler]()
protected val pageToHandlers = new HashMap[WebUIPage, ArrayBuffer[ServletContextHandler]]
Expand Down

0 comments on commit 37905dc

Please sign in to comment.