Skip to content

Commit

Permalink
Minor changes based on Andrew's comment on PR.
Browse files Browse the repository at this point in the history
  • Loading branch information
tdas committed Apr 8, 2014
1 parent d0b8d65 commit 6b59cfc
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 27 deletions.
23 changes: 12 additions & 11 deletions project/MimaBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -58,17 +58,18 @@ object MimaBuild {
SparkBuild.SPARK_VERSION match {
case v if v.startsWith("1.0") =>
Seq(
excludePackage("org.apache.spark.api.java"),
excludePackage("org.apache.spark.streaming.api.java"),
excludePackage("org.apache.spark.mllib")
) ++
excludeSparkClass("rdd.ClassTags") ++
excludeSparkClass("util.XORShiftRandom") ++
excludeSparkClass("mllib.recommendation.MFDataGenerator") ++
excludeSparkClass("mllib.optimization.SquaredGradient") ++
excludeSparkClass("mllib.regression.RidgeRegressionWithSGD") ++
excludeSparkClass("mllib.regression.LassoWithSGD") ++
excludeSparkClass("mllib.regression.LinearRegressionWithSGD")
excludePackage("org.apache.spark.api.java"),
excludePackage("org.apache.spark.streaming.api.java"),
excludePackage("org.apache.spark.mllib")
) ++
excludeSparkClass("rdd.ClassTags") ++
excludeSparkClass("util.XORShiftRandom") ++
excludeSparkClass("mllib.recommendation.MFDataGenerator") ++
excludeSparkClass("mllib.optimization.SquaredGradient") ++
excludeSparkClass("mllib.regression.RidgeRegressionWithSGD") ++
excludeSparkClass("mllib.regression.LassoWithSGD") ++
excludeSparkClass("mllib.regression.LinearRegressionWithSGD")
excludeSparkClass("streaming.dstream.NetworkReceiver")
case _ => Seq()
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ class JobGenerator(jobScheduler: JobScheduler) extends Logging {
while(!hasTimedOut && jobScheduler.networkInputTracker.hasMoreReceivedBlockIds) {
Thread.sleep(pollTime)
}
logInfo("Waited for all received blocsk to be consumed for job generation")
logInfo("Waited for all received blocks to be consumed for job generation")

// Stop generating jobs
val stopTime = timer.stop(false)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ class JobScheduler(val ssc: StreamingContext) extends Logging {
// Stop everything else
listenerBus.stop()
ssc.env.actorSystem.stop(eventActor)
eventActor == null
eventActor = null
logInfo("Stopped JobScheduler")
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,14 @@

package org.apache.spark.streaming.scheduler

import scala.collection.mutable.{HashMap, SynchronizedMap, Queue}
import scala.collection.mutable.{HashMap, Queue, SynchronizedMap}

import akka.actor._

import org.apache.spark.{SparkException, Logging, SparkEnv}
import org.apache.spark.{Logging, SparkEnv, SparkException}
import org.apache.spark.SparkContext._
import org.apache.spark.storage.BlockId
import org.apache.spark.streaming.{Time, StreamingContext}
import org.apache.spark.streaming.dstream.NetworkReceiver
import org.apache.spark.streaming.dstream.StopReceiver
import org.apache.spark.streaming.{StreamingContext, Time}
import org.apache.spark.streaming.dstream.{NetworkReceiver, StopReceiver}
import org.apache.spark.util.AkkaUtils

private[streaming] sealed trait NetworkInputTrackerMessage
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,16 @@

package org.apache.spark.streaming

import org.scalatest.{FunSuite, BeforeAndAfter}
import org.scalatest.exceptions.TestFailedDueToTimeoutException
import org.scalatest.concurrent.Timeouts
import org.scalatest.time.SpanSugar._
import org.apache.spark.{Logging, SparkException, SparkConf, SparkContext}
import org.apache.spark.util.{Utils, MetadataCleaner}
import org.apache.spark.streaming.dstream.{NetworkReceiver, DStream}
import java.util.concurrent.atomic.AtomicInteger

import org.apache.spark.{Logging, SparkConf, SparkContext, SparkException}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.{DStream, NetworkReceiver}
import org.apache.spark.util.{MetadataCleaner, Utils}
import org.scalatest.{BeforeAndAfter, FunSuite}
import org.scalatest.concurrent.Timeouts
import org.scalatest.exceptions.TestFailedDueToTimeoutException
import org.scalatest.time.SpanSugar._

class StreamingContextSuite extends FunSuite with BeforeAndAfter with Timeouts with Logging {

Expand Down Expand Up @@ -296,4 +297,4 @@ class TestReceiver extends NetworkReceiver[Int] {

object TestReceiver {
val counter = new AtomicInteger(1)
}
}

0 comments on commit 6b59cfc

Please sign in to comment.