Skip to content

Commit

Permalink
rebase
Browse files Browse the repository at this point in the history
  • Loading branch information
dilipbiswal committed Jul 29, 2019
1 parent c31a0b2 commit cb4bb2f
Show file tree
Hide file tree
Showing 3 changed files with 322 additions and 215 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, ReturnAnswer}
import org.apache.spark.sql.catalyst.rules.{Rule, RuleExecutor}
import org.apache.spark.sql.catalyst.trees.TreeNode
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec._
import org.apache.spark.sql.execution.adaptive.rule.ReduceNumShufflePartitions
Expand Down Expand Up @@ -190,10 +191,25 @@ case class AdaptiveSparkPlanExec(
verbose: Boolean,
prefix: String = "",
addSuffix: Boolean = false,
maxFields: Int): Unit = {
super.generateTreeString(depth, lastChildren, append, verbose, prefix, addSuffix, maxFields)
maxFields: Int,
planToOperatorID: mutable.LinkedHashMap[TreeNode[_], Int]): Unit = {
super.generateTreeString(depth,
lastChildren,
append,
verbose,
prefix,
addSuffix,
maxFields,
planToOperatorID)
currentPhysicalPlan.generateTreeString(
depth + 1, lastChildren :+ true, append, verbose, "", addSuffix = false, maxFields)
depth + 1,
lastChildren :+ true,
append,
verbose,
"",
addSuffix = false,
maxFields,
planToOperatorID)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

package org.apache.spark.sql.execution.adaptive

import scala.collection.mutable
import scala.concurrent.Future

import org.apache.spark.{FutureAction, MapOutputStatistics}
Expand All @@ -26,6 +27,7 @@ import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical.Statistics
import org.apache.spark.sql.catalyst.plans.physical.Partitioning
import org.apache.spark.sql.catalyst.trees.TreeNode
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.exchange._

Expand Down Expand Up @@ -107,10 +109,18 @@ abstract class QueryStageExec extends LeafExecNode {
verbose: Boolean,
prefix: String = "",
addSuffix: Boolean = false,
maxFields: Int): Unit = {
super.generateTreeString(depth, lastChildren, append, verbose, prefix, addSuffix, maxFields)
maxFields: Int,
planToOperatorID: mutable.LinkedHashMap[TreeNode[_], Int]): Unit = {
super.generateTreeString(depth,
lastChildren,
append,
verbose,
prefix,
addSuffix,
maxFields,
planToOperatorID)
plan.generateTreeString(
depth + 1, lastChildren :+ true, append, verbose, "", false, maxFields)
depth + 1, lastChildren :+ true, append, verbose, "", false, maxFields, planToOperatorID)
}
}

Expand Down
Loading

0 comments on commit cb4bb2f

Please sign in to comment.