From 9b487bf63f15a3f03333eace24d6ee30986d0a1e Mon Sep 17 00:00:00 2001 From: Cheng Lian Date: Wed, 29 Apr 2015 23:35:03 +0800 Subject: [PATCH] Fixes compilation errors introduced while rebasing --- .../scala/org/apache/spark/sql/DataFrame.scala | 18 ++++++++++++++---- .../spark/sql/sources/DataSourceStrategy.scala | 2 +- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala index 32c867f2eeabd..7db5ae105c77b 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrame.scala @@ -37,7 +37,7 @@ import org.apache.spark.sql.catalyst.analysis.{ResolvedStar, UnresolvedAttribute import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.plans.logical.{Filter, _} import org.apache.spark.sql.catalyst.plans.{Inner, JoinType} -import org.apache.spark.sql.catalyst.{CatalystTypeConverters, ScalaReflection, SqlParser} +import org.apache.spark.sql.catalyst.{expressions, CatalystTypeConverters, ScalaReflection, SqlParser} import org.apache.spark.sql.execution.{EvaluatePython, ExplainCommand, LogicalRDD} import org.apache.spark.sql.jdbc.JDBCWriteDetails import org.apache.spark.sql.json.JacksonGenerator @@ -400,7 +400,9 @@ class DataFrame private[sql]( joined.left, joined.right, joinType = Inner, - Some(EqualTo(joined.left.resolve(usingColumn), joined.right.resolve(usingColumn)))) + Some(expressions.EqualTo( + joined.left.resolve(usingColumn), + joined.right.resolve(usingColumn)))) ) } @@ -1343,7 +1345,7 @@ class DataFrame private[sql]( mode: SaveMode, options: java.util.Map[String, String], partitionColumns: java.util.List[String]): Unit = { - ??? + saveAsTable(tableName, source, mode, options.toMap, partitionColumns) } /** @@ -1399,7 +1401,15 @@ class DataFrame private[sql]( mode: SaveMode, options: Map[String, String], partitionColumns: Seq[String]): Unit = { - ??? + sqlContext.executePlan( + CreateTableUsingAsSelect( + tableName, + source, + temporary = false, + partitionColumns.toArray, + mode, + options, + logicalPlan)).toRdd } /** diff --git a/sql/core/src/main/scala/org/apache/spark/sql/sources/DataSourceStrategy.scala b/sql/core/src/main/scala/org/apache/spark/sql/sources/DataSourceStrategy.scala index 1659a7a1989dc..52f9bd4421c53 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/sources/DataSourceStrategy.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/sources/DataSourceStrategy.scala @@ -97,7 +97,7 @@ private[sql] object DataSourceStrategy extends Strategy { .reduceOption(expressions.And) .getOrElse(Literal(true)) - val boundPredicate = InterpretedPredicate(predicate.transform { + val boundPredicate = InterpretedPredicate.create(predicate.transform { case a: AttributeReference => val index = partitionColumns.indexWhere(a.name == _.name) BoundReference(index, partitionColumns(index).dataType, nullable = true)