Skip to content

Commit 0a8cc30

Browse files
committed
Move some classes to more appropriate packages:
* RDD, *RDDFunctions -> org.apache.spark.rdd * Utils, ClosureCleaner, SizeEstimator -> org.apache.spark.util * JavaSerializer, KryoSerializer -> org.apache.spark.serializer
1 parent 5b4dea2 commit 0a8cc30

File tree

193 files changed

+462
-418
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

193 files changed

+462
-418
lines changed

bagel/src/main/scala/org/apache/spark/bagel/Bagel.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark.bagel
1919

2020
import org.apache.spark._
2121
import org.apache.spark.SparkContext._
22-
22+
import org.apache.spark.rdd.RDD
2323
import org.apache.spark.storage.StorageLevel
2424

2525
object Bagel extends Logging {

core/src/main/scala/org/apache/spark/Accumulators.scala

+1
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import java.io._
2121

2222
import scala.collection.mutable.Map
2323
import scala.collection.generic.Growable
24+
import org.apache.spark.serializer.JavaSerializer
2425

2526
/**
2627
* A datatype that can be accumulated, i.e. has an commutative and associative "add" operation,

core/src/main/scala/org/apache/spark/CacheManager.scala

+1
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ package org.apache.spark
1919

2020
import scala.collection.mutable.{ArrayBuffer, HashSet}
2121
import org.apache.spark.storage.{BlockManager, StorageLevel}
22+
import org.apache.spark.rdd.RDD
2223

2324

2425
/** Spark class responsible for passing RDDs split contents to the BlockManager and making

core/src/main/scala/org/apache/spark/Dependency.scala

+2
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@
1717

1818
package org.apache.spark
1919

20+
import org.apache.spark.rdd.RDD
21+
2022
/**
2123
* Base class for dependencies.
2224
*/

core/src/main/scala/org/apache/spark/HttpFileServer.scala

+1
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ package org.apache.spark
1919

2020
import java.io.{File}
2121
import com.google.common.io.Files
22+
import org.apache.spark.util.Utils
2223

2324
private[spark] class HttpFileServer extends Logging {
2425

core/src/main/scala/org/apache/spark/HttpServer.scala

+1
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ import org.eclipse.jetty.server.handler.DefaultHandler
2626
import org.eclipse.jetty.server.handler.HandlerList
2727
import org.eclipse.jetty.server.handler.ResourceHandler
2828
import org.eclipse.jetty.util.thread.QueuedThreadPool
29+
import org.apache.spark.util.Utils
2930

3031
/**
3132
* Exception type thrown by HttpServer when it is in the wrong state for an operation.

core/src/main/scala/org/apache/spark/MapOutputTracker.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ import akka.util.Duration
3232

3333
import org.apache.spark.scheduler.MapStatus
3434
import org.apache.spark.storage.BlockManagerId
35-
import org.apache.spark.util.{MetadataCleaner, TimeStampedHashMap}
35+
import org.apache.spark.util.{Utils, MetadataCleaner, TimeStampedHashMap}
3636

3737

3838
private[spark] sealed trait MapOutputTrackerMessage

core/src/main/scala/org/apache/spark/Partitioner.scala

+3
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@
1717

1818
package org.apache.spark
1919

20+
import org.apache.spark.util.Utils
21+
import org.apache.spark.rdd.RDD
22+
2023
/**
2124
* An object that defines how the elements in a key-value pair RDD are partitioned by key.
2225
* Maps each key to a partition ID, from 0 to `numPartitions - 1`.

core/src/main/scala/org/apache/spark/SparkContext.scala

+3-5
Original file line numberDiff line numberDiff line change
@@ -54,17 +54,15 @@ import org.apache.mesos.MesosNativeLibrary
5454

5555
import org.apache.spark.deploy.LocalSparkCluster
5656
import org.apache.spark.partial.{ApproximateEvaluator, PartialResult}
57-
import org.apache.spark.rdd.{CheckpointRDD, HadoopRDD, NewHadoopRDD, UnionRDD, ParallelCollectionRDD,
58-
OrderedRDDFunctions}
57+
import org.apache.spark.rdd._
5958
import org.apache.spark.scheduler._
6059
import org.apache.spark.scheduler.cluster.{StandaloneSchedulerBackend, SparkDeploySchedulerBackend,
6160
ClusterScheduler, Schedulable, SchedulingMode}
6261
import org.apache.spark.scheduler.local.LocalScheduler
6362
import org.apache.spark.scheduler.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend}
64-
import org.apache.spark.storage.{StorageStatus, StorageUtils, RDDInfo, BlockManagerSource}
63+
import org.apache.spark.storage.{StorageUtils, BlockManagerSource}
6564
import org.apache.spark.ui.SparkUI
66-
import org.apache.spark.util.{MetadataCleaner, TimeStampedHashMap}
67-
import scala.Some
65+
import org.apache.spark.util.{ClosureCleaner, Utils, MetadataCleaner, TimeStampedHashMap}
6866
import org.apache.spark.scheduler.StageInfo
6967
import org.apache.spark.storage.RDDInfo
7068
import org.apache.spark.storage.StorageStatus

core/src/main/scala/org/apache/spark/SparkEnv.scala

+3-3
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ import org.apache.spark.deploy.SparkHadoopUtil
2929
import org.apache.spark.storage.{BlockManagerMasterActor, BlockManager, BlockManagerMaster}
3030
import org.apache.spark.network.ConnectionManager
3131
import org.apache.spark.serializer.{Serializer, SerializerManager}
32-
import org.apache.spark.util.AkkaUtils
32+
import org.apache.spark.util.{Utils, AkkaUtils}
3333
import org.apache.spark.api.python.PythonWorkerFactory
3434

3535

@@ -155,10 +155,10 @@ object SparkEnv extends Logging {
155155
val serializerManager = new SerializerManager
156156

157157
val serializer = serializerManager.setDefault(
158-
System.getProperty("spark.serializer", "org.apache.spark.JavaSerializer"))
158+
System.getProperty("spark.serializer", "org.apache.spark.serializer.JavaSerializer"))
159159

160160
val closureSerializer = serializerManager.get(
161-
System.getProperty("spark.closure.serializer", "org.apache.spark.JavaSerializer"))
161+
System.getProperty("spark.closure.serializer", "org.apache.spark.serializer.JavaSerializer"))
162162

163163
def registerOrLookup(name: String, newActor: => Actor): ActorRef = {
164164
if (isDriver) {

core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.api.java
1919

20-
import org.apache.spark.RDD
20+
import org.apache.spark.rdd.RDD
2121
import org.apache.spark.SparkContext.doubleRDDToDoubleRDDFunctions
2222
import org.apache.spark.api.java.function.{Function => JFunction}
2323
import org.apache.spark.util.StatCounter

core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -33,12 +33,12 @@ import org.apache.hadoop.conf.Configuration
3333
import org.apache.spark.HashPartitioner
3434
import org.apache.spark.Partitioner
3535
import org.apache.spark.Partitioner._
36-
import org.apache.spark.RDD
3736
import org.apache.spark.SparkContext.rddToPairRDDFunctions
3837
import org.apache.spark.api.java.function.{Function2 => JFunction2}
3938
import org.apache.spark.api.java.function.{Function => JFunction}
4039
import org.apache.spark.partial.BoundedDouble
4140
import org.apache.spark.partial.PartialResult
41+
import org.apache.spark.rdd.RDD
4242
import org.apache.spark.rdd.OrderedRDDFunctions
4343
import org.apache.spark.storage.StorageLevel
4444

core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala

+1
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
package org.apache.spark.api.java
1919

2020
import org.apache.spark._
21+
import org.apache.spark.rdd.RDD
2122
import org.apache.spark.api.java.function.{Function => JFunction}
2223
import org.apache.spark.storage.StorageLevel
2324

core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala

+4-2
Original file line numberDiff line numberDiff line change
@@ -21,13 +21,15 @@ import java.util.{List => JList, Comparator}
2121
import scala.Tuple2
2222
import scala.collection.JavaConversions._
2323

24+
import com.google.common.base.Optional
2425
import org.apache.hadoop.io.compress.CompressionCodec
25-
import org.apache.spark.{SparkContext, Partition, RDD, TaskContext}
26+
27+
import org.apache.spark.{SparkContext, Partition, TaskContext}
28+
import org.apache.spark.rdd.RDD
2629
import org.apache.spark.api.java.JavaPairRDD._
2730
import org.apache.spark.api.java.function.{Function2 => JFunction2, Function => JFunction, _}
2831
import org.apache.spark.partial.{PartialResult, BoundedDouble}
2932
import org.apache.spark.storage.StorageLevel
30-
import com.google.common.base.Optional
3133

3234

3335
trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {

core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala

+3-3
Original file line numberDiff line numberDiff line change
@@ -26,13 +26,13 @@ import org.apache.hadoop.conf.Configuration
2626
import org.apache.hadoop.mapred.InputFormat
2727
import org.apache.hadoop.mapred.JobConf
2828
import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat}
29+
import com.google.common.base.Optional
2930

30-
import org.apache.spark.{Accumulable, AccumulableParam, Accumulator, AccumulatorParam, RDD, SparkContext}
31+
import org.apache.spark.{Accumulable, AccumulableParam, Accumulator, AccumulatorParam, SparkContext}
3132
import org.apache.spark.SparkContext.IntAccumulatorParam
3233
import org.apache.spark.SparkContext.DoubleAccumulatorParam
3334
import org.apache.spark.broadcast.Broadcast
34-
35-
import com.google.common.base.Optional
35+
import org.apache.spark.rdd.RDD
3636

3737
/**
3838
* A Java-friendly version of [[org.apache.spark.SparkContext]] that returns [[org.apache.spark.api.java.JavaRDD]]s and

core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@
1818
package org.apache.spark.api.python
1919

2020
import org.apache.spark.Partitioner
21-
import org.apache.spark.Utils
2221
import java.util.Arrays
22+
import org.apache.spark.util.Utils
2323

2424
/**
2525
* A [[org.apache.spark.Partitioner]] that performs handling of byte arrays, for use by the Python API.

core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala

+2
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,9 @@ import scala.collection.JavaConversions._
2626
import org.apache.spark.api.java.{JavaSparkContext, JavaPairRDD, JavaRDD}
2727
import org.apache.spark.broadcast.Broadcast
2828
import org.apache.spark._
29+
import org.apache.spark.rdd.RDD
2930
import org.apache.spark.rdd.PipedRDD
31+
import org.apache.spark.util.Utils
3032

3133

3234
private[spark] class PythonRDD[T: ClassManifest](

core/src/main/scala/org/apache/spark/broadcast/BitTorrentBroadcast.scala

+1
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ import scala.math
2727

2828
import org.apache.spark._
2929
import org.apache.spark.storage.StorageLevel
30+
import org.apache.spark.util.Utils
3031

3132
private[spark] class BitTorrentBroadcast[T](@transient var value_ : T, isLocal: Boolean, id: Long)
3233
extends Broadcast[T](id)

core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -23,10 +23,10 @@ import java.net.URL
2323
import it.unimi.dsi.fastutil.io.FastBufferedInputStream
2424
import it.unimi.dsi.fastutil.io.FastBufferedOutputStream
2525

26-
import org.apache.spark.{HttpServer, Logging, SparkEnv, Utils}
26+
import org.apache.spark.{HttpServer, Logging, SparkEnv}
2727
import org.apache.spark.io.CompressionCodec
2828
import org.apache.spark.storage.StorageLevel
29-
import org.apache.spark.util.{MetadataCleaner, TimeStampedHashSet}
29+
import org.apache.spark.util.{Utils, MetadataCleaner, TimeStampedHashSet}
3030

3131

3232
private[spark] class HttpBroadcast[T](@transient var value_ : T, isLocal: Boolean, id: Long)

core/src/main/scala/org/apache/spark/broadcast/MultiTracker.scala

+1
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import java.util.Random
2424
import scala.collection.mutable.Map
2525

2626
import org.apache.spark._
27+
import org.apache.spark.util.Utils
2728

2829
private object MultiTracker
2930
extends Logging {

core/src/main/scala/org/apache/spark/broadcast/TreeBroadcast.scala

+1
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ import scala.math
2626

2727
import org.apache.spark._
2828
import org.apache.spark.storage.StorageLevel
29+
import org.apache.spark.util.Utils
2930

3031
private[spark] class TreeBroadcast[T](@transient var value_ : T, isLocal: Boolean, id: Long)
3132
extends Broadcast[T](id) with Logging with Serializable {

core/src/main/scala/org/apache/spark/deploy/DeployMessage.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,10 @@ package org.apache.spark.deploy
1919

2020
import scala.collection.immutable.List
2121

22-
import org.apache.spark.Utils
2322
import org.apache.spark.deploy.ExecutorState.ExecutorState
2423
import org.apache.spark.deploy.master.{WorkerInfo, ApplicationInfo}
2524
import org.apache.spark.deploy.worker.ExecutorRunner
25+
import org.apache.spark.util.Utils
2626

2727

2828
private[deploy] sealed trait DeployMessage extends Serializable

core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,8 @@ import akka.actor.{ActorRef, Props, Actor, ActorSystem, Terminated}
2121

2222
import org.apache.spark.deploy.worker.Worker
2323
import org.apache.spark.deploy.master.Master
24-
import org.apache.spark.util.AkkaUtils
25-
import org.apache.spark.{Logging, Utils}
24+
import org.apache.spark.util.{Utils, AkkaUtils}
25+
import org.apache.spark.{Logging}
2626

2727
import scala.collection.mutable.ArrayBuffer
2828

core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@
1717

1818
package org.apache.spark.deploy.client
1919

20-
import org.apache.spark.util.AkkaUtils
21-
import org.apache.spark.{Logging, Utils}
20+
import org.apache.spark.util.{Utils, AkkaUtils}
21+
import org.apache.spark.{Logging}
2222
import org.apache.spark.deploy.{Command, ApplicationDescription}
2323

2424
private[spark] object TestClient {

core/src/main/scala/org/apache/spark/deploy/master/Master.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -27,12 +27,12 @@ import akka.actor.Terminated
2727
import akka.remote.{RemoteClientLifeCycleEvent, RemoteClientDisconnected, RemoteClientShutdown}
2828
import akka.util.duration._
2929

30-
import org.apache.spark.{Logging, SparkException, Utils}
30+
import org.apache.spark.{Logging, SparkException}
3131
import org.apache.spark.deploy.{ApplicationDescription, ExecutorState}
3232
import org.apache.spark.deploy.DeployMessages._
3333
import org.apache.spark.deploy.master.ui.MasterWebUI
3434
import org.apache.spark.metrics.MetricsSystem
35-
import org.apache.spark.util.AkkaUtils
35+
import org.apache.spark.util.{Utils, AkkaUtils}
3636

3737

3838
private[spark] class Master(host: String, port: Int, webUiPort: Int) extends Actor with Logging {

core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala

+1-2
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,7 @@
1717

1818
package org.apache.spark.deploy.master
1919

20-
import org.apache.spark.util.IntParam
21-
import org.apache.spark.Utils
20+
import org.apache.spark.util.{Utils, IntParam}
2221

2322
/**
2423
* Command-line parser for the master.

core/src/main/scala/org/apache/spark/deploy/master/WorkerInfo.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark.deploy.master
1919

2020
import akka.actor.ActorRef
2121
import scala.collection.mutable
22-
import org.apache.spark.Utils
22+
import org.apache.spark.util.Utils
2323

2424
private[spark] class WorkerInfo(
2525
val id: String,

core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMaste
3131
import org.apache.spark.deploy.JsonProtocol
3232
import org.apache.spark.deploy.master.ExecutorInfo
3333
import org.apache.spark.ui.UIUtils
34-
import org.apache.spark.Utils
34+
import org.apache.spark.util.Utils
3535

3636
private[spark] class ApplicationPage(parent: MasterWebUI) {
3737
val master = parent.masterActorRef

core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,12 +27,12 @@ import akka.util.duration._
2727

2828
import net.liftweb.json.JsonAST.JValue
2929

30-
import org.apache.spark.Utils
3130
import org.apache.spark.deploy.DeployWebUI
3231
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
3332
import org.apache.spark.deploy.JsonProtocol
3433
import org.apache.spark.deploy.master.{ApplicationInfo, WorkerInfo}
3534
import org.apache.spark.ui.UIUtils
35+
import org.apache.spark.util.Utils
3636

3737
private[spark] class IndexPage(parent: MasterWebUI) {
3838
val master = parent.masterActorRef

core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala

+2-1
Original file line numberDiff line numberDiff line change
@@ -23,10 +23,11 @@ import javax.servlet.http.HttpServletRequest
2323

2424
import org.eclipse.jetty.server.{Handler, Server}
2525

26-
import org.apache.spark.{Logging, Utils}
26+
import org.apache.spark.{Logging}
2727
import org.apache.spark.deploy.master.Master
2828
import org.apache.spark.ui.JettyUtils
2929
import org.apache.spark.ui.JettyUtils._
30+
import org.apache.spark.util.Utils
3031

3132
/**
3233
* Web UI server for the standalone master.

core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala

+2-1
Original file line numberDiff line numberDiff line change
@@ -25,9 +25,10 @@ import akka.actor.ActorRef
2525
import com.google.common.base.Charsets
2626
import com.google.common.io.Files
2727

28-
import org.apache.spark.{Utils, Logging}
28+
import org.apache.spark.{Logging}
2929
import org.apache.spark.deploy.{ExecutorState, ApplicationDescription}
3030
import org.apache.spark.deploy.DeployMessages.ExecutorStateChanged
31+
import org.apache.spark.util.Utils
3132

3233
/**
3334
* Manages the execution of one executor process.

core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -27,13 +27,13 @@ import akka.actor.{ActorRef, Props, Actor, ActorSystem, Terminated}
2727
import akka.remote.{RemoteClientLifeCycleEvent, RemoteClientShutdown, RemoteClientDisconnected}
2828
import akka.util.duration._
2929

30-
import org.apache.spark.{Logging, Utils}
30+
import org.apache.spark.{Logging}
3131
import org.apache.spark.deploy.ExecutorState
3232
import org.apache.spark.deploy.DeployMessages._
3333
import org.apache.spark.deploy.master.Master
3434
import org.apache.spark.deploy.worker.ui.WorkerWebUI
3535
import org.apache.spark.metrics.MetricsSystem
36-
import org.apache.spark.util.AkkaUtils
36+
import org.apache.spark.util.{Utils, AkkaUtils}
3737

3838

3939
private[spark] class Worker(

core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala

+1-3
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,7 @@
1717

1818
package org.apache.spark.deploy.worker
1919

20-
import org.apache.spark.util.IntParam
21-
import org.apache.spark.util.MemoryParam
22-
import org.apache.spark.Utils
20+
import org.apache.spark.util.{Utils, IntParam, MemoryParam}
2321
import java.lang.management.ManagementFactory
2422

2523
/**

core/src/main/scala/org/apache/spark/deploy/worker/ui/IndexPage.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,11 +27,11 @@ import akka.util.duration._
2727

2828
import net.liftweb.json.JsonAST.JValue
2929

30-
import org.apache.spark.Utils
3130
import org.apache.spark.deploy.JsonProtocol
3231
import org.apache.spark.deploy.DeployMessages.{RequestWorkerState, WorkerStateResponse}
3332
import org.apache.spark.deploy.worker.ExecutorRunner
3433
import org.apache.spark.ui.UIUtils
34+
import org.apache.spark.util.Utils
3535

3636

3737
private[spark] class IndexPage(parent: WorkerWebUI) {

0 commit comments

Comments
 (0)