Skip to content

Commit ffbcbbb

Browse files
committed
resolve more
1 parent d509834 commit ffbcbbb

File tree

73 files changed

+241
-241
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

73 files changed

+241
-241
lines changed

core/src/test/scala/org/apache/spark/AccumulatorSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -47,15 +47,15 @@ class AccumulatorSuite extends SparkFunSuite with Matchers with LocalSparkContex
4747

4848
implicit def setAccum[A]: AccumulableParam[mutable.Set[A], A] =
4949
new AccumulableParam[mutable.Set[A], A] {
50-
def addInPlace(t1: mutable.Set[A], t2: mutable.Set[A]) : mutable.Set[A] = {
50+
def addInPlace(t1: mutable.Set[A], t2: mutable.Set[A]): mutable.Set[A] = {
5151
t1 ++= t2
5252
t1
5353
}
54-
def addAccumulator(t1: mutable.Set[A], t2: A) : mutable.Set[A] = {
54+
def addAccumulator(t1: mutable.Set[A], t2: A): mutable.Set[A] = {
5555
t1 += t2
5656
t1
5757
}
58-
def zero(t: mutable.Set[A]) : mutable.Set[A] = {
58+
def zero(t: mutable.Set[A]): mutable.Set[A] = {
5959
new mutable.HashSet[A]()
6060
}
6161
}

core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -523,22 +523,22 @@ class CleanerTester(
523523
toBeCheckpointIds.synchronized { toBeCheckpointIds.isEmpty }
524524

525525
private def getRDDBlocks(rddId: Int): Seq[BlockId] = {
526-
blockManager.master.getMatchingBlockIds( _ match {
526+
blockManager.master.getMatchingBlockIds(_ match {
527527
case RDDBlockId(`rddId`, _) => true
528528
case _ => false
529529
}, askSlaves = true)
530530
}
531531

532532
private def getShuffleBlocks(shuffleId: Int): Seq[BlockId] = {
533-
blockManager.master.getMatchingBlockIds( _ match {
533+
blockManager.master.getMatchingBlockIds(_ match {
534534
case ShuffleBlockId(`shuffleId`, _, _) => true
535535
case ShuffleIndexBlockId(`shuffleId`, _, _) => true
536536
case _ => false
537537
}, askSlaves = true)
538538
}
539539

540540
private def getBroadcastBlocks(broadcastId: Long): Seq[BlockId] = {
541-
blockManager.master.getMatchingBlockIds( _ match {
541+
blockManager.master.getMatchingBlockIds(_ match {
542542
case BroadcastBlockId(`broadcastId`, _) => true
543543
case _ => false
544544
}, askSlaves = true)

core/src/test/scala/org/apache/spark/Smuggle.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ object Smuggle {
6262
private val lock = new ReentrantReadWriteLock
6363
private val smuggledObjects = mutable.WeakHashMap.empty[Symbol, Any]
6464

65-
private def get[T](key: Symbol) : T = {
65+
private def get[T](key: Symbol): T = {
6666
lock.readLock().lock()
6767
try {
6868
smuggledObjects(key).asInstanceOf[T]
@@ -78,6 +78,6 @@ object Smuggle {
7878
* @tparam T
7979
* @return the smuggled object represented by the wrapper.
8080
*/
81-
implicit def unpackSmuggledObject[T](smuggle : Smuggle[T]): T = smuggle.smuggledObject
81+
implicit def unpackSmuggledObject[T](smuggle: Smuggle[T]): T = smuggle.smuggledObject
8282

8383
}

core/src/test/scala/org/apache/spark/deploy/DeployTestUtils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ private[deploy] object DeployTestUtils {
3030
new ApplicationDescription("name", Some(4), 1234, cmd, "appUiUrl")
3131
}
3232

33-
def createAppInfo() : ApplicationInfo = {
33+
def createAppInfo(): ApplicationInfo = {
3434
val appDesc = createAppDesc()
3535
val appInfo = new ApplicationInfo(JsonConstants.appInfoStartTime,
3636
"id", appDesc, JsonConstants.submitDate, null, Int.MaxValue)

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ trait TestPrematureExit {
6464
private[spark] def testPrematureExit(
6565
input: Array[String],
6666
searchString: String,
67-
mainObject: CommandLineUtils = SparkSubmit) : Unit = {
67+
mainObject: CommandLineUtils = SparkSubmit): Unit = {
6868
val printStream = new BufferPrintStream()
6969
mainObject.printStream = printStream
7070

core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ class ApplicationCacheSuite extends SparkFunSuite with Logging with MockitoSugar
7777
override def getAppUI(appId: String, attemptId: Option[String]): Option[LoadedAppUI] = {
7878
logDebug(s"getAppUI($appId, $attemptId)")
7979
getAppUICount += 1
80-
instances.get(CacheKey(appId, attemptId)).map( e =>
80+
instances.get(CacheKey(appId, attemptId)).map(e =>
8181
LoadedAppUI(e.ui, updateProbe(appId, attemptId, e.probeTime)))
8282
}
8383

@@ -393,7 +393,7 @@ class ApplicationCacheSuite extends SparkFunSuite with Logging with MockitoSugar
393393
val ids = new ListBuffer[String]()
394394
// build a list of applications
395395
val count = 100
396-
for (i <- 1 to count ) {
396+
for (i <- 1 to count) {
397397
val appId = f"app-$i%04d"
398398
ids += appId
399399
clock.advance(10)

core/src/test/scala/org/apache/spark/deploy/worker/WorkerSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ class WorkerSuite extends SparkFunSuite with Matchers with BeforeAndAfter {
3030
import org.apache.spark.deploy.DeployTestUtils._
3131

3232
def cmd(javaOpts: String*): Command = {
33-
Command("", Seq.empty, Map.empty, Seq.empty, Seq.empty, Seq(javaOpts : _*))
33+
Command("", Seq.empty, Map.empty, Seq.empty, Seq.empty, Seq(javaOpts: _*))
3434
}
3535
def conf(opts: (String, String)*): SparkConf = new SparkConf(loadDefaults = false).setAll(opts)
3636

core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ class JdbcRDDSuite extends SparkFunSuite with BeforeAndAfter with LocalSparkCont
7878
() => { DriverManager.getConnection("jdbc:derby:target/JdbcRDDSuiteDb") },
7979
"SELECT DATA FROM FOO WHERE ? <= ID AND ID <= ?",
8080
1, 100, 3,
81-
(r: ResultSet) => { r.getInt(1) } ).cache()
81+
(r: ResultSet) => { r.getInt(1) }).cache()
8282

8383
assert(rdd.count === 100)
8484
assert(rdd.reduce(_ + _) === 10100)
@@ -91,7 +91,7 @@ class JdbcRDDSuite extends SparkFunSuite with BeforeAndAfter with LocalSparkCont
9191
() => { DriverManager.getConnection("jdbc:derby:target/JdbcRDDSuiteDb") },
9292
"SELECT DATA FROM BIGINT_TEST WHERE ? <= ID AND ID <= ?",
9393
1131544775L, 567279358897692673L, 20,
94-
(r: ResultSet) => { r.getInt(1) } ).cache()
94+
(r: ResultSet) => { r.getInt(1) }).cache()
9595
assert(rdd.count === 100)
9696
assert(rdd.reduce(_ + _) === 5050)
9797
}

core/src/test/scala/org/apache/spark/rdd/ZippedPartitionsSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.rdd
2020
import org.apache.spark.{SharedSparkContext, SparkFunSuite}
2121

2222
object ZippedPartitionsSuite {
23-
def procZippedData(i: Iterator[Int], s: Iterator[String], d: Iterator[Double]) : Iterator[Int] = {
23+
def procZippedData(i: Iterator[Int], s: Iterator[String], d: Iterator[Double]): Iterator[Int] = {
2424
Iterator(i.toArray.size, s.toArray.size, d.toArray.size)
2525
}
2626
}

core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -745,18 +745,18 @@ abstract class RpcEnvSuite extends SparkFunSuite with BeforeAndAfterAll {
745745

746746
// Construct RpcTimeout with a single property
747747
val rt1 = RpcTimeout(conf, testProp)
748-
assert( testDurationSeconds === rt1.duration.toSeconds )
748+
assert(testDurationSeconds === rt1.duration.toSeconds )
749749

750750
// Construct RpcTimeout with prioritized list of properties
751751
val rt2 = RpcTimeout(conf, Seq("spark.ask.invalid.timeout", testProp, secondaryProp), "1s")
752-
assert( testDurationSeconds === rt2.duration.toSeconds )
752+
assert(testDurationSeconds === rt2.duration.toSeconds )
753753

754754
// Construct RpcTimeout with default value,
755755
val defaultProp = "spark.ask.default.timeout"
756756
val defaultDurationSeconds = 1
757757
val rt3 = RpcTimeout(conf, Seq(defaultProp), defaultDurationSeconds.toString + "s")
758-
assert( defaultDurationSeconds === rt3.duration.toSeconds )
759-
assert( rt3.timeoutProp.contains(defaultProp) )
758+
assert(defaultDurationSeconds === rt3.duration.toSeconds )
759+
assert(rt3.timeoutProp.contains(defaultProp) )
760760

761761
// Try to construct RpcTimeout with an unconfigured property
762762
intercept[NoSuchElementException] {

0 commit comments

Comments
 (0)