Skip to content

Commit

Permalink
MapR [SPARK-182] Spark Project External Kafka Producer v09 unit tests…
Browse files Browse the repository at this point in the history
… fixed (apache#263)
  • Loading branch information
rsotn-mapr authored and ekrivokonmapr committed Sep 19, 2019
1 parent 8caa9da commit a1e5584
Show file tree
Hide file tree
Showing 4 changed files with 310 additions and 0 deletions.
26 changes: 26 additions & 0 deletions external/kafka-producer/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -140,5 +140,31 @@
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
<plugins>
<!-- Surefire runs all Java tests -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.20.1</version>
<!-- Note config is repeated in scalatest config -->
<configuration>
<systemProperties>
<java.security.auth.login.config>${project.basedir}/src/test/resources/mapr.login.conf</java.security.auth.login.config>
</systemProperties>
</configuration>
</plugin>
<!-- Scalatest runs all Scala tests -->
<plugin>
<groupId>org.scalatest</groupId>
<artifactId>scalatest-maven-plugin</artifactId>
<version>1.0</version>
<!-- Note config is repeated in surefire config -->
<configuration>
<systemProperties>
<java.security.auth.login.config>${project.basedir}/src/test/resources/mapr.login.conf</java.security.auth.login.config>
</systemProperties>
</configuration>
</plugin>
</plugins>
</build>
</project>
281 changes: 281 additions & 0 deletions external/kafka-producer/src/test/resources/mapr.login.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,281 @@
/**
* The KerberosBugWorkAroundLoginModule works around a Java 6 bug where it
* doesn't pick up KRB5CCName properly. This is not needed with recent
* patch levels of Java 7.
*
* Used by maprlogin and MapRLogin for client authentication
*/
MAPR_CLIENT_KERBEROS {
org.apache.hadoop.security.login.KerberosBugWorkAroundLoginModule required
useTicketCache=true
doNotPrompt=true;
};

/**
* Used by CLDB for authenticating users.
* The principal value is used as the Kerberos server identity of the CLDB.
* This is generated by configure.sh by default. You can edit this value
* but ensure that every CLDB node has the same principal name.
*/
MAPR_SERVER_KERBEROS {
com.sun.security.auth.module.Krb5LoginModule required
refreshKrb5Config=true
doNotPrompt=true
useKeyTab=true
storeKey=true
keyTab="/opt/mapr/conf/mapr.keytab"
isInitiator=false
principal="mapr/cyber.mapr.cluster";
};

/**
* Used by web servers for SPNEGO authentication. These include
* MapR Webserver/MCS server in case SPNEGO REST APIs authentication is enabled
* JT/TT/HBase/Oozie/etc web UIs configured to use
org.apache.hadoop.security.authentication.server.MultiMechsAuthenticationHandler
* The principal value is the kerberos server identity provided by the
* web server for SPNEGO. Recall that SPNEGO identities are HTTP/hostname
* as perceived by the client. The value is automatically generated by
* configure.sh. However, if the hostname placed here is not the hostname
* used by your clients, you may need to edit it.
* Remember that each web server node will have a different value for
* the principal based upon that node's hostname.
*/
MAPR_WEBSERVER_KERBEROS {
com.sun.security.auth.module.Krb5LoginModule required
refreshKrb5Config=true
doNotPrompt=true
useKeyTab=true
storeKey=true
keyTab="/opt/mapr/conf/mapr.keytab"
isInitiator=false
principal="HTTP/node1";
};

/**
* Used for password authentication with PAM. jpam is a Java wrapper
* for PAM. The serviceName below determines which PAM configurations
* are to be used for validating passwords. The list is used in the order
* shown. A failure is ignored and the system proceeds to the next entry.
* If your PAM configurations (typically in /etc/pam.d) are not the same
* as our provided defaults, you may need to change the serviceName values,
* add stanzas, or remove stanzas.
*
* mapr-admin is there by default as a placeholder should you choose to
* create MapR specific PAM configuration. If you have no mapr-admin
* PAM configuration, you can just remove it.
*/
jpamLogin {
net.sf.jpam.jaas.JpamLoginModule Sufficient
serviceName="sudo"
debug=true;
net.sf.jpam.jaas.JpamLoginModule Sufficient
serviceName="sshd"
debug=true;
net.sf.jpam.jaas.JpamLoginModule Sufficient
serviceName="mapr-admin"
debug=true;
};

/********************************************************************
DO NOT EDIT BELOW THIS LINE WITHOUT CONTACTING MAPR SUPPORT
**********************************************************************/

/**
* Used by Zookeeper
*/
Server {
com.mapr.security.maprsasl.MaprSecurityLoginModule required
checkUGI=false
cldbkeylocation="/opt/mapr/conf/cldb.key"
debug=true;
};

Client {
com.mapr.security.maprsasl.MaprSecurityLoginModule required
checkUGI=false
authMech="MAPR-SECURITY"
debug=true;
};

/**
* Used by Zookeeper - Security is off
*/
Server_simple {
com.mapr.security.simplesasl.GenericLoginModule required
debug=true;
};

Client_simple {
com.mapr.security.simplesasl.GenericLoginModule required
authMech="SIMPLE-SECURITY";
};

/**
* used to obtain MapR credentials
* TODO: rename from maprsasl to something else? maprauth?
*/
hadoop_maprsasl {
org.apache.hadoop.security.login.GenericOSLoginModule required;
com.mapr.security.maprsasl.MaprSecurityLoginModule required
checkUGI=false;
org.apache.hadoop.security.login.HadoopLoginModule required
principalPriority=com.mapr.security.MapRPrincipal;
};

hadoop_maprsasl_keytab {
org.apache.hadoop.security.login.GenericOSLoginModule required;
com.mapr.security.maprsasl.MaprSecurityLoginModule required
checkUGI=false
useServerKey=true;
org.apache.hadoop.security.login.HadoopLoginModule required
principalPriority=com.mapr.security.MapRPrincipal;
};

hadoop_maprsasl_permissive {
org.apache.hadoop.security.login.PermissiveLoginModule required;
org.apache.hadoop.security.login.GenericOSLoginModule required;
com.mapr.security.maprsasl.MaprSecurityLoginModule required
checkUGI=false;
org.apache.hadoop.security.login.HadoopLoginModule required
principalPriority=com.mapr.security.MapRPrincipal;
};

hadoop_maprsasl_permissive_keytab {
org.apache.hadoop.security.login.PermissiveLoginModule required;
org.apache.hadoop.security.login.GenericOSLoginModule required;
com.mapr.security.maprsasl.MaprSecurityLoginModule required
checkUGI=false
useServerKey=true;
org.apache.hadoop.security.login.HadoopLoginModule required
principalPriority=com.mapr.security.MapRPrincipal;
};

/**
* intended for use with Kerberos authentication, no MapR
*/
hadoop_kerberos {
org.apache.hadoop.security.login.KerberosBugWorkAroundLoginModule required
useTicketCache=true
renewTGT=true
doNotPrompt=true;
org.apache.hadoop.security.login.GenericOSLoginModule required;
org.apache.hadoop.security.login.HadoopLoginModule required;
};

/**
* TODO:
* left out isInitial
* should I leave out renewTGT?
*/
hadoop_kerberos_keytab {
org.apache.hadoop.security.login.KerberosBugWorkAroundLoginModule required
refreshKrb5Config=true
doNotPrompt=true
useKeyTab=true
storeKey=true;
org.apache.hadoop.security.login.GenericOSLoginModule required;
org.apache.hadoop.security.login.HadoopLoginModule required;
};

/**
* TODO: KerberosPrincipal preferred, should be add clear to principalPriority?
* authenticate using hybrid of kerberos and MapR
* maprticket must already exist on file system as MapR login module
* cannot get kerberos identity from subject for implicit login.
*/
hadoop_hybrid {
org.apache.hadoop.security.login.KerberosBugWorkAroundLoginModule optional
useTicketCache=true
renewTGT=true
doNotPrompt=true;
com.mapr.security.maprsasl.MaprSecurityLoginModule required
checkUGI=false;
org.apache.hadoop.security.login.GenericOSLoginModule required;
org.apache.hadoop.security.login.HadoopLoginModule required
principalPriority=com.mapr.security.MapRPrincipal;
};

hadoop_hybrid_keytab {
com.mapr.security.maprsasl.MaprSecurityLoginModule required
checkUGI=false
useServerKey=true;
org.apache.hadoop.security.login.KerberosBugWorkAroundLoginModule optional
refreshKrb5Config=true
doNotPrompt=true
useKeyTab=true
storeKey=true;
org.apache.hadoop.security.login.GenericOSLoginModule required;
org.apache.hadoop.security.login.HadoopLoginModule required
principalPriority=com.mapr.security.MapRPrincipal;
};

hadoop_hybrid_permissive {
org.apache.hadoop.security.login.KerberosBugWorkAroundLoginModule optional
useTicketCache=true
renewTGT=true
doNotPrompt=true;
com.mapr.security.maprsasl.MaprSecurityLoginModule required
checkUGI=false;
org.apache.hadoop.security.login.GenericOSLoginModule required;
org.apache.hadoop.security.login.PermissiveLoginModule required;
org.apache.hadoop.security.login.HadoopLoginModule required
principalPriority=com.mapr.security.MapRPrincipal;
};

hadoop_hybrid_permissive_keytab {
org.apache.hadoop.security.login.KerberosBugWorkAroundLoginModule optional
refreshKrb5Config=true
doNotPrompt=true
useKeyTab=true
storeKey=true;
com.mapr.security.maprsasl.MaprSecurityLoginModule required
checkUGI=false
useServerKey=true;
org.apache.hadoop.security.login.GenericOSLoginModule required;
org.apache.hadoop.security.login.PermissiveLoginModule required;
org.apache.hadoop.security.login.HadoopLoginModule required
principalPriority=com.mapr.security.MapRPrincipal;
};

/**
* simple login, just get OS creds
*/
hadoop_simple {
org.apache.hadoop.security.login.GenericOSLoginModule required;
org.apache.hadoop.security.login.HadoopLoginModule required;
};
/* all configurations should have corresponding a "_keytab" section for
* loginFromKeytab(), even if it duplicates the one without.
*/
hadoop_simple_keytab {
org.apache.hadoop.security.login.GenericOSLoginModule required;
org.apache.hadoop.security.login.HadoopLoginModule required;
};

/**
* these are used when there is no jvm or config setting for hadoop.login
* it's your backup basically. No MapR components should depend on this
* but it's handy for dealing with unfamilar code.
* Note the intentional error to make sure you don't use by accident
*/
hadoop_default {
org.apache.hadoop.security.login.GenericOSLoginModule required;
com.mapr.security.maprsasl.MaprSecurityLoginModule required
checkUGI=false;
org.apache.hadoop.security.login.HadoopLoginModule required
principalPriority=com.mapr.security.MapRPrincipal;
};

/**
* keytab version of previous
*/
hadoop_default_keytab {
YouShouldntSeeThisErrorUnlessYourJVMhadoop.loginPropertiesAreBad required;
org.apache.hadoop.security.login.GenericOSLoginModule required;
com.mapr.security.maprsasl.MaprSecurityLoginModule required
checkUGI=false
useServerKey=true;
org.apache.hadoop.security.login.HadoopLoginModule required
principalPriority=com.mapr.security.MapRPrincipal;
};
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@ class RDDProducerSuite extends BaseKafkaProducerTest {
private val recordValue = "value"
private val partition = 0
private val testConf = new ProducerConf(bootstrapServers = List("localhost:9092"))
.withKeySerializer("org.apache.kafka.common.serialization.StringSerializer")

private var sparkContext: SparkContext = _

before {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ private[spark] final class EmbeddedKafka(
"broker.id" -> "1",
"zookeeper.connect" -> zkConnect,
"host.name" -> "localhost",
"offsets.topic.replication.factor" -> "1",
"log.dir" -> logDir.toString)

val props = kafkaProps.getOrElse(new Properties())
Expand Down

0 comments on commit a1e5584

Please sign in to comment.