-
Notifications
You must be signed in to change notification settings - Fork 28.2k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[CARMEL-3157] Index upgrade to 3.0 (#52)
* index synax * index build * index prune * index metrics * index ut * [CARMEL-3157] index pruning - upgrade to 3.0 * remove ut for index treated as unsupport feature * fix conflict * fix conflict * fix style
- Loading branch information
Showing
56 changed files
with
6,265 additions
and
41 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
126 changes: 126 additions & 0 deletions
126
core/src/main/scala/org/apache/spark/executor/PruneMetrics.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,126 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one or more | ||
* contributor license agreements. See the NOTICE file distributed with | ||
* this work for additional information regarding copyright ownership. | ||
* The ASF licenses this file to You under the Apache License, Version 2.0 | ||
* (the "License"); you may not use this file except in compliance with | ||
* the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package org.apache.spark.executor | ||
|
||
import java.util.concurrent.ConcurrentHashMap | ||
import java.util.concurrent.atomic.AtomicLong | ||
|
||
import org.apache.spark.internal.Logging | ||
import org.apache.spark.util.AccumulatorV2 | ||
|
||
case class PrunedStats(name: String, var prunedFiles: AtomicLong, var prunedRowGroups: AtomicLong) { | ||
def isZero: Boolean = { | ||
prunedFiles == 0 && prunedRowGroups == 0 | ||
} | ||
|
||
def incPrunedFiles(v: Long): Unit = { | ||
prunedFiles.addAndGet(v) | ||
} | ||
def incPrunedRowGroups(v: Long): Unit = { | ||
prunedRowGroups.addAndGet(v) | ||
} | ||
|
||
override def toString: String = { | ||
"[name: " + name + ", prunedFiles: " + prunedFiles + | ||
", prunedRowGroups: " + prunedRowGroups + "]" | ||
} | ||
} | ||
|
||
// PrunedMetricsAccum is not thread-safe but only used in single thread environment | ||
class PrunedMetricsAccum() extends | ||
AccumulatorV2[(String, String, Long), List[PrunedStats]] with Logging{ | ||
import scala.collection.convert.WrapAsScala._ | ||
|
||
private val pruneStatsMap: scala.collection.concurrent.Map[String, PrunedStats] = | ||
new ConcurrentHashMap[String, PrunedStats]() | ||
|
||
private def addPrunedStat(statName: String): Unit = { | ||
if (!pruneStatsMap.contains(statName)) { | ||
pruneStatsMap.put(statName, PrunedStats(statName, new AtomicLong(0), new AtomicLong(0))) | ||
} | ||
} | ||
|
||
def setValue(value: List[PrunedStats]): Unit = { | ||
value.foreach(s => { | ||
pruneStatsMap.put(s.name, s) | ||
}) | ||
} | ||
override def isZero: Boolean = pruneStatsMap.isEmpty || pruneStatsMap.forall(p => p._2.isZero) | ||
|
||
override def reset(): Unit = { | ||
pruneStatsMap.clear() | ||
} | ||
|
||
override def add(v: (String, String, Long)): Unit = { | ||
val statName = v._1 | ||
val subMetricName = v._2 | ||
val value = v._3 | ||
addPrunedStat(statName) | ||
if (pruneStatsMap.contains(statName)) { | ||
if (subMetricName.equalsIgnoreCase(PrunedMetricsAccum.PRUNED_FILES)) { | ||
pruneStatsMap(statName).incPrunedFiles(value) | ||
} else if (subMetricName.equalsIgnoreCase(PrunedMetricsAccum.PRUNED_ROWGROUPS)) { | ||
pruneStatsMap(statName).incPrunedRowGroups(value) | ||
} | ||
} | ||
} | ||
|
||
override def copy(): AccumulatorV2[(String, String, Long), List[PrunedStats]] = { | ||
val copiedStats = new PrunedMetricsAccum | ||
pruneStatsMap.foreach(s => { | ||
copiedStats.pruneStatsMap.put(s._1, | ||
new PrunedStats(s._2.name, s._2.prunedFiles, s._2.prunedRowGroups)) | ||
}) | ||
copiedStats | ||
} | ||
|
||
override def merge(other: AccumulatorV2[(String, String, Long), List[PrunedStats]]): Unit = { | ||
if (other.isInstanceOf[PrunedMetricsAccum]) { | ||
val otherStats = other.asInstanceOf[PrunedMetricsAccum] | ||
otherStats.pruneStatsMap.foreach( s => { | ||
if (pruneStatsMap.contains(s._1)) { | ||
pruneStatsMap.get(s._1).get.incPrunedFiles(s._2.prunedFiles.get()) | ||
pruneStatsMap.get(s._1).get.incPrunedRowGroups(s._2.prunedRowGroups.get()) | ||
} else { | ||
pruneStatsMap.put(s._1, s._2) | ||
} | ||
}) | ||
} | ||
} | ||
|
||
override def value: List[PrunedStats] = { | ||
pruneStatsMap.values.toList | ||
} | ||
|
||
override def toImmutable(): AccumulatorV2[(String, String, Long), List[PrunedStats]] = { | ||
val copiedStats = new PrunedMetricsAccum | ||
copiedStats.metadata = this.metadata | ||
pruneStatsMap.foreach(s => { | ||
copiedStats.pruneStatsMap.put(s._1, | ||
new PrunedStats(s._2.name, s._2.prunedFiles, s._2.prunedRowGroups)) | ||
}) | ||
copiedStats.atDriverSide(false) | ||
copiedStats | ||
} | ||
} | ||
object PrunedMetricsAccum { | ||
val PRUNED_FILES = "PRUNED_FILES" | ||
val PRUNED_ROWGROUPS = "PRUNED_ROWGROUPS" | ||
} | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.