From 0d9b256a9b7aa52c37c6a952ffd68bf4441d46e5 Mon Sep 17 00:00:00 2001 From: Davies Liu Date: Wed, 12 Nov 2014 15:00:17 -0800 Subject: [PATCH] refactor --- .../main/scala/org/apache/spark/api/python/PythonRDD.scala | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala index a36f4a1a221c8..6702baa9e12fb 100644 --- a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala +++ b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala @@ -24,6 +24,7 @@ import java.util.{List => JList, ArrayList => JArrayList, Map => JMap, Collectio import org.apache.spark.input.PortableDataStream import scala.collection.JavaConversions._ +import scala.collection.JavaConverters._ import scala.collection.mutable import scala.language.existentials @@ -762,11 +763,7 @@ private[spark] object PythonRDD extends Logging { * A helper to convert java.util.List[Double] into Array[Double] */ def listToArrayDouble(list: JList[Double]): Array[Double] = { - val r = new Array[Double](list.size) - list.zipWithIndex.foreach { - case (v, i) => r(i) = v - } - r + list.asScala.toArray } }