Skip to content

Commit

Permalink
Resolve scalatest warnings during build
Browse files Browse the repository at this point in the history
Author: witgo <[email protected]>

Closes apache#1032 from witgo/ShouldMatchers and squashes the following commits:

7ebf34c [witgo] Resolve scalatest warnings during build
  • Loading branch information
witgo authored and pwendell committed Jun 11, 2014
1 parent 4823bf4 commit c48b622
Show file tree
Hide file tree
Showing 21 changed files with 41 additions and 41 deletions.
4 changes: 2 additions & 2 deletions core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@ package org.apache.spark
import scala.collection.mutable

import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

import org.apache.spark.SparkContext._

class AccumulatorSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
class AccumulatorSuite extends FunSuite with Matchers with LocalSparkContext {


implicit def setAccum[A] = new AccumulableParam[mutable.Set[A], A] {
Expand Down
4 changes: 2 additions & 2 deletions core/src/test/scala/org/apache/spark/DistributedSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark
import org.scalatest.BeforeAndAfter
import org.scalatest.FunSuite
import org.scalatest.concurrent.Timeouts._
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers
import org.scalatest.time.{Millis, Span}

import org.apache.spark.SparkContext._
Expand All @@ -31,7 +31,7 @@ class NotSerializableClass
class NotSerializableExn(val notSer: NotSerializableClass) extends Throwable() {}


class DistributedSuite extends FunSuite with ShouldMatchers with BeforeAndAfter
class DistributedSuite extends FunSuite with Matchers with BeforeAndAfter
with LocalSparkContext {

val clusterUrl = "local-cluster[2,1,512]"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import scala.concurrent.duration._
import scala.concurrent.future

import org.scalatest.{BeforeAndAfter, FunSuite}
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

import org.apache.spark.SparkContext._
import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskStart}
Expand All @@ -35,7 +35,7 @@ import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskStart}
* (e.g. count) as well as multi-job action (e.g. take). We test the local and cluster schedulers
* in both FIFO and fair scheduling modes.
*/
class JobCancellationSuite extends FunSuite with ShouldMatchers with BeforeAndAfter
class JobCancellationSuite extends FunSuite with Matchers with BeforeAndAfter
with LocalSparkContext {

override def afterEach() {
Expand Down
4 changes: 2 additions & 2 deletions core/src/test/scala/org/apache/spark/ShuffleSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,15 @@
package org.apache.spark

import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

import org.apache.spark.SparkContext._
import org.apache.spark.ShuffleSuite.NonJavaSerializableClass
import org.apache.spark.rdd.{CoGroupedRDD, OrderedRDDFunctions, RDD, ShuffledRDD, SubtractedRDD}
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.util.MutablePair

class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
class ShuffleSuite extends FunSuite with Matchers with LocalSparkContext {

val conf = new SparkConf(loadDefaults = false)

Expand Down
4 changes: 2 additions & 2 deletions core/src/test/scala/org/apache/spark/deploy/ClientSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@
package org.apache.spark.deploy

import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

class ClientSuite extends FunSuite with ShouldMatchers {
class ClientSuite extends FunSuite with Matchers {
test("correctly validates driver jar URL's") {
ClientArguments.isValidJarUrl("http://someHost:8080/foo.jar") should be (true)
ClientArguments.isValidJarUrl("file://some/path/to/a/jarFile.jar") should be (true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@ import org.apache.spark.{SparkConf, SparkContext, SparkEnv, SparkException, Test
import org.apache.spark.deploy.SparkSubmit._
import org.apache.spark.util.Utils
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

class SparkSubmitSuite extends FunSuite with ShouldMatchers {
class SparkSubmitSuite extends FunSuite with Matchers {
def beforeAll() {
System.setProperty("spark.testing", "true")
}
Expand Down
4 changes: 2 additions & 2 deletions core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,12 @@
package org.apache.spark.rdd

import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

import org.apache.spark.{Logging, SharedSparkContext}
import org.apache.spark.SparkContext._

class SortingSuite extends FunSuite with SharedSparkContext with ShouldMatchers with Logging {
class SortingSuite extends FunSuite with SharedSparkContext with Matchers with Logging {

test("sortByKey") {
val pairs = sc.parallelize(Array((1, 0), (2, 0), (0, 0), (3, 0)), 2)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,13 @@ import java.util.concurrent.Semaphore
import scala.collection.mutable

import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSuite}
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

import org.apache.spark.{LocalSparkContext, SparkContext}
import org.apache.spark.SparkContext._
import org.apache.spark.executor.TaskMetrics

class SparkListenerSuite extends FunSuite with LocalSparkContext with ShouldMatchers
class SparkListenerSuite extends FunSuite with LocalSparkContext with Matchers
with BeforeAndAfter with BeforeAndAfterAll {

/** Length of time to wait while draining listener events. */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import org.mockito.Mockito.{mock, when}
import org.scalatest.{BeforeAndAfter, FunSuite, PrivateMethodTester}
import org.scalatest.concurrent.Eventually._
import org.scalatest.concurrent.Timeouts._
import org.scalatest.matchers.ShouldMatchers._
import org.scalatest.Matchers
import org.scalatest.time.SpanSugar._

import org.apache.spark.{MapOutputTrackerMaster, SecurityManager, SparkConf}
Expand All @@ -39,7 +39,8 @@ import org.apache.spark.util.{AkkaUtils, ByteBufferInputStream, SizeEstimator, U
import scala.language.implicitConversions
import scala.language.postfixOps

class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodTester {
class BlockManagerSuite extends FunSuite with Matchers with BeforeAndAfter
with PrivateMethodTester {
private val conf = new SparkConf(false)
var store: BlockManager = null
var store2: BlockManager = null
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,14 @@
package org.apache.spark.ui.jobs

import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

import org.apache.spark.{LocalSparkContext, SparkConf, Success}
import org.apache.spark.executor.{ShuffleReadMetrics, TaskMetrics}
import org.apache.spark.scheduler._
import org.apache.spark.util.Utils

class JobProgressListenerSuite extends FunSuite with LocalSparkContext with ShouldMatchers {
class JobProgressListenerSuite extends FunSuite with LocalSparkContext with Matchers {
test("test LRU eviction of stages") {
val conf = new SparkConf()
conf.set("spark.ui.retainedStages", 5.toString)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@
package org.apache.spark.util

import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

/**
*
*/

class DistributionSuite extends FunSuite with ShouldMatchers {
class DistributionSuite extends FunSuite with Matchers {
test("summary") {
val d = new Distribution((1 to 100).toArray.map{_.toDouble})
val stats = d.statCounter
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@ import java.util.NoSuchElementException
import scala.collection.mutable.Buffer

import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

class NextIteratorSuite extends FunSuite with ShouldMatchers {
class NextIteratorSuite extends FunSuite with Matchers {
test("one iteration") {
val i = new StubIterator(Buffer(1))
i.hasNext should be === true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@ package org.apache.spark.util.collection
import scala.collection.mutable.HashSet

import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

import org.apache.spark.util.SizeEstimator

class OpenHashMapSuite extends FunSuite with ShouldMatchers {
class OpenHashMapSuite extends FunSuite with Matchers {

test("size for specialized, primitive value (int)") {
val capacity = 1024
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@
package org.apache.spark.util.collection

import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

import org.apache.spark.util.SizeEstimator

class OpenHashSetSuite extends FunSuite with ShouldMatchers {
class OpenHashSetSuite extends FunSuite with Matchers {

test("size for specialized, primitive int") {
val loadFactor = 0.7
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@ package org.apache.spark.util.collection
import scala.collection.mutable.HashSet

import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

import org.apache.spark.util.SizeEstimator

class PrimitiveKeyOpenHashMapSuite extends FunSuite with ShouldMatchers {
class PrimitiveKeyOpenHashMapSuite extends FunSuite with Matchers {

test("size for specialized, primitive key, value (int, int)") {
val capacity = 1024
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@
package org.apache.spark.util.random

import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

import org.apache.spark.util.Utils.times

import scala.language.reflectiveCalls

class XORShiftRandomSuite extends FunSuite with ShouldMatchers {
class XORShiftRandomSuite extends FunSuite with Matchers {

def fixture = new {
val seed = 1L
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import scala.util.Random
import scala.collection.JavaConversions._

import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression._
Expand Down Expand Up @@ -56,7 +56,7 @@ object LogisticRegressionSuite {
}
}

class LogisticRegressionSuite extends FunSuite with LocalSparkContext with ShouldMatchers {
class LogisticRegressionSuite extends FunSuite with LocalSparkContext with Matchers {
def validatePrediction(predictions: Seq[Double], input: Seq[LabeledPoint]) {
val numOffPredictions = predictions.zip(input).count { case (prediction, expected) =>
prediction != expected.label
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import scala.util.Random
import scala.collection.JavaConversions._

import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

import org.apache.spark.mllib.regression._
import org.apache.spark.mllib.util.LocalSparkContext
Expand Down Expand Up @@ -61,7 +61,7 @@ object GradientDescentSuite {
}
}

class GradientDescentSuite extends FunSuite with LocalSparkContext with ShouldMatchers {
class GradientDescentSuite extends FunSuite with LocalSparkContext with Matchers {

test("Assert the loss is decreasing.") {
val nPoints = 10000
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@
package org.apache.spark.mllib.optimization

import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers

import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.util.LocalSparkContext

class LBFGSSuite extends FunSuite with LocalSparkContext with ShouldMatchers {
class LBFGSSuite extends FunSuite with LocalSparkContext with Matchers {

val nPoints = 10000
val A = 2.0
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ import org.scalatest.FunSuite
import org.scalatest.concurrent.Timeouts
import org.scalatest.concurrent.Eventually._
import org.scalatest.time.SpanSugar._
import scala.language.postfixOps

/** Testsuite for testing the network receiver behavior */
class NetworkReceiverSuite extends FunSuite with Timeouts {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,12 @@ import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.receiver.Receiver
import org.apache.spark.streaming.scheduler._

import org.scalatest.matchers.ShouldMatchers
import org.scalatest.Matchers
import org.scalatest.concurrent.Eventually._
import org.scalatest.time.SpanSugar._
import org.apache.spark.Logging

class StreamingListenerSuite extends TestSuiteBase with ShouldMatchers {
class StreamingListenerSuite extends TestSuiteBase with Matchers {

val input = (1 to 4).map(Seq(_)).toSeq
val operation = (d: DStream[Int]) => d.map(x => x)
Expand Down

0 comments on commit c48b622

Please sign in to comment.