Skip to content

Commit f957c26

Browse files
committed
Merge pull request #882 from mateiz/package-rename
Rename spark package to org.apache.spark
2 parents a30fac1 + 0a8cc30 commit f957c26

File tree

536 files changed

+2160
-2089
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

536 files changed

+2160
-2089
lines changed

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ Or, for the Python API, the Python shell (`./pyspark`).
2727
Spark also comes with several sample programs in the `examples` directory.
2828
To run one of them, use `./run-example <class> <params>`. For example:
2929

30-
./run-example spark.examples.SparkLR local[2]
30+
./run-example org.apache.spark.examples.SparkLR local[2]
3131

3232
will run the Logistic Regression example locally on 2 CPUs.
3333

assembly/pom.xml

+9-9
Original file line numberDiff line numberDiff line change
@@ -19,16 +19,16 @@
1919
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
2020
<modelVersion>4.0.0</modelVersion>
2121
<parent>
22-
<groupId>org.spark-project</groupId>
22+
<groupId>org.apache.spark</groupId>
2323
<artifactId>spark-parent</artifactId>
2424
<version>0.8.0-SNAPSHOT</version>
2525
<relativePath>../pom.xml</relativePath>
2626
</parent>
2727

28-
<groupId>org.spark-project</groupId>
28+
<groupId>org.apache.spark</groupId>
2929
<artifactId>spark-assembly</artifactId>
3030
<name>Spark Project Assembly</name>
31-
<url>http://spark-project.org/</url>
31+
<url>http://spark.incubator.apache.org/</url>
3232

3333
<repositories>
3434
<!-- A repository in the local filesystem for the Py4J JAR, which is not in Maven central -->
@@ -40,27 +40,27 @@
4040

4141
<dependencies>
4242
<dependency>
43-
<groupId>org.spark-project</groupId>
43+
<groupId>org.apache.spark</groupId>
4444
<artifactId>spark-core</artifactId>
4545
<version>${project.version}</version>
4646
</dependency>
4747
<dependency>
48-
<groupId>org.spark-project</groupId>
48+
<groupId>org.apache.spark</groupId>
4949
<artifactId>spark-bagel</artifactId>
5050
<version>${project.version}</version>
5151
</dependency>
5252
<dependency>
53-
<groupId>org.spark-project</groupId>
53+
<groupId>org.apache.spark</groupId>
5454
<artifactId>spark-mllib</artifactId>
5555
<version>${project.version}</version>
5656
</dependency>
5757
<dependency>
58-
<groupId>org.spark-project</groupId>
58+
<groupId>org.apache.spark</groupId>
5959
<artifactId>spark-repl</artifactId>
6060
<version>${project.version}</version>
6161
</dependency>
6262
<dependency>
63-
<groupId>org.spark-project</groupId>
63+
<groupId>org.apache.spark</groupId>
6464
<artifactId>spark-streaming</artifactId>
6565
<version>${project.version}</version>
6666
</dependency>
@@ -121,7 +121,7 @@
121121
<id>hadoop2-yarn</id>
122122
<dependencies>
123123
<dependency>
124-
<groupId>org.spark-project</groupId>
124+
<groupId>org.apache.spark</groupId>
125125
<artifactId>spark-yarn</artifactId>
126126
<version>${project.version}</version>
127127
</dependency>

assembly/src/main/assembly/assembly.xml

+5-5
Original file line numberDiff line numberDiff line change
@@ -30,9 +30,9 @@
3030
</fileSet>
3131
<fileSet>
3232
<directory>
33-
${project.parent.basedir}/core/src/main/resources/spark/ui/static/
33+
${project.parent.basedir}/core/src/main/resources/org/apache/spark/ui/static/
3434
</directory>
35-
<outputDirectory>/ui-resources/spark/ui/static</outputDirectory>
35+
<outputDirectory>/ui-resources/org/apache/spark/ui/static</outputDirectory>
3636
<includes>
3737
<include>**/*</include>
3838
</includes>
@@ -63,10 +63,10 @@
6363
<dependencySets>
6464
<dependencySet>
6565
<includes>
66-
<include>org.spark-project:*:jar</include>
66+
<include>org.apache.spark:*:jar</include>
6767
</includes>
6868
<excludes>
69-
<exclude>org.spark-project:spark-assembly:jar</exclude>
69+
<exclude>org.apache.spark:spark-assembly:jar</exclude>
7070
</excludes>
7171
</dependencySet>
7272
<dependencySet>
@@ -77,7 +77,7 @@
7777
<useProjectArtifact>false</useProjectArtifact>
7878
<excludes>
7979
<exclude>org.apache.hadoop:*:jar</exclude>
80-
<exclude>org.spark-project:*:jar</exclude>
80+
<exclude>org.apache.spark:*:jar</exclude>
8181
</excludes>
8282
</dependencySet>
8383
</dependencySets>

bagel/pom.xml

+4-4
Original file line numberDiff line numberDiff line change
@@ -19,21 +19,21 @@
1919
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
2020
<modelVersion>4.0.0</modelVersion>
2121
<parent>
22-
<groupId>org.spark-project</groupId>
22+
<groupId>org.apache.spark</groupId>
2323
<artifactId>spark-parent</artifactId>
2424
<version>0.8.0-SNAPSHOT</version>
2525
<relativePath>../pom.xml</relativePath>
2626
</parent>
2727

28-
<groupId>org.spark-project</groupId>
28+
<groupId>org.apache.spark</groupId>
2929
<artifactId>spark-bagel</artifactId>
3030
<packaging>jar</packaging>
3131
<name>Spark Project Bagel</name>
32-
<url>http://spark-project.org/</url>
32+
<url>http://spark.incubator.apache.org/</url>
3333

3434
<dependencies>
3535
<dependency>
36-
<groupId>org.spark-project</groupId>
36+
<groupId>org.apache.spark</groupId>
3737
<artifactId>spark-core</artifactId>
3838
<version>${project.version}</version>
3939
</dependency>

bagel/src/main/scala/spark/bagel/Bagel.scala renamed to bagel/src/main/scala/org/apache/spark/bagel/Bagel.scala

+18-19
Original file line numberDiff line numberDiff line change
@@ -15,32 +15,31 @@
1515
* limitations under the License.
1616
*/
1717

18-
package spark.bagel
18+
package org.apache.spark.bagel
1919

20-
import spark._
21-
import spark.SparkContext._
22-
23-
import scala.collection.mutable.ArrayBuffer
24-
import storage.StorageLevel
20+
import org.apache.spark._
21+
import org.apache.spark.SparkContext._
22+
import org.apache.spark.rdd.RDD
23+
import org.apache.spark.storage.StorageLevel
2524

2625
object Bagel extends Logging {
2726
val DEFAULT_STORAGE_LEVEL = StorageLevel.MEMORY_AND_DISK
2827

2928
/**
3029
* Runs a Bagel program.
31-
* @param sc [[spark.SparkContext]] to use for the program.
30+
* @param sc [[org.apache.spark.SparkContext]] to use for the program.
3231
* @param vertices vertices of the graph represented as an RDD of (Key, Vertex) pairs. Often the Key will be
3332
* the vertex id.
3433
* @param messages initial set of messages represented as an RDD of (Key, Message) pairs. Often this will be an
3534
* empty array, i.e. sc.parallelize(Array[K, Message]()).
36-
* @param combiner [[spark.bagel.Combiner]] combines multiple individual messages to a given vertex into one
35+
* @param combiner [[org.apache.spark.bagel.Combiner]] combines multiple individual messages to a given vertex into one
3736
* message before sending (which often involves network I/O).
38-
* @param aggregator [[spark.bagel.Aggregator]] performs a reduce across all vertices after each superstep,
37+
* @param aggregator [[org.apache.spark.bagel.Aggregator]] performs a reduce across all vertices after each superstep,
3938
* and provides the result to each vertex in the next superstep.
40-
* @param partitioner [[spark.Partitioner]] partitions values by key
39+
* @param partitioner [[org.apache.spark.Partitioner]] partitions values by key
4140
* @param numPartitions number of partitions across which to split the graph.
4241
* Default is the default parallelism of the SparkContext
43-
* @param storageLevel [[spark.storage.StorageLevel]] to use for caching of intermediate RDDs in each superstep.
42+
* @param storageLevel [[org.apache.spark.storage.StorageLevel]] to use for caching of intermediate RDDs in each superstep.
4443
* Defaults to caching in memory.
4544
* @param compute function that takes a Vertex, optional set of (possibly combined) messages to the Vertex,
4645
* optional Aggregator and the current superstep,
@@ -98,7 +97,7 @@ object Bagel extends Logging {
9897
verts
9998
}
10099

101-
/** Runs a Bagel program with no [[spark.bagel.Aggregator]] and the default storage level */
100+
/** Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]] and the default storage level */
102101
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
103102
sc: SparkContext,
104103
vertices: RDD[(K, V)],
@@ -110,7 +109,7 @@ object Bagel extends Logging {
110109
compute: (V, Option[C], Int) => (V, Array[M])
111110
): RDD[(K, V)] = run(sc, vertices, messages, combiner, numPartitions, DEFAULT_STORAGE_LEVEL)(compute)
112111

113-
/** Runs a Bagel program with no [[spark.bagel.Aggregator]] */
112+
/** Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]] */
114113
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
115114
sc: SparkContext,
116115
vertices: RDD[(K, V)],
@@ -128,7 +127,7 @@ object Bagel extends Logging {
128127
}
129128

130129
/**
131-
* Runs a Bagel program with no [[spark.bagel.Aggregator]], default [[spark.HashPartitioner]]
130+
* Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]], default [[org.apache.spark.HashPartitioner]]
132131
* and default storage level
133132
*/
134133
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
@@ -141,7 +140,7 @@ object Bagel extends Logging {
141140
compute: (V, Option[C], Int) => (V, Array[M])
142141
): RDD[(K, V)] = run(sc, vertices, messages, combiner, numPartitions, DEFAULT_STORAGE_LEVEL)(compute)
143142

144-
/** Runs a Bagel program with no [[spark.bagel.Aggregator]] and the default [[spark.HashPartitioner]]*/
143+
/** Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]] and the default [[org.apache.spark.HashPartitioner]]*/
145144
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
146145
sc: SparkContext,
147146
vertices: RDD[(K, V)],
@@ -159,8 +158,8 @@ object Bagel extends Logging {
159158
}
160159

161160
/**
162-
* Runs a Bagel program with no [[spark.bagel.Aggregator]], default [[spark.HashPartitioner]],
163-
* [[spark.bagel.DefaultCombiner]] and the default storage level
161+
* Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]], default [[org.apache.spark.HashPartitioner]],
162+
* [[org.apache.spark.bagel.DefaultCombiner]] and the default storage level
164163
*/
165164
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest](
166165
sc: SparkContext,
@@ -172,8 +171,8 @@ object Bagel extends Logging {
172171
): RDD[(K, V)] = run(sc, vertices, messages, numPartitions, DEFAULT_STORAGE_LEVEL)(compute)
173172

174173
/**
175-
* Runs a Bagel program with no [[spark.bagel.Aggregator]], the default [[spark.HashPartitioner]]
176-
* and [[spark.bagel.DefaultCombiner]]
174+
* Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]], the default [[org.apache.spark.HashPartitioner]]
175+
* and [[org.apache.spark.bagel.DefaultCombiner]]
177176
*/
178177
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest](
179178
sc: SparkContext,

bagel/src/test/scala/bagel/BagelSuite.scala renamed to bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala

+4-6
Original file line numberDiff line numberDiff line change
@@ -15,16 +15,14 @@
1515
* limitations under the License.
1616
*/
1717

18-
package spark.bagel
18+
package org.apache.spark.bagel
1919

20-
import org.scalatest.{FunSuite, Assertions, BeforeAndAfter}
20+
import org.scalatest.{BeforeAndAfter, FunSuite, Assertions}
2121
import org.scalatest.concurrent.Timeouts
2222
import org.scalatest.time.SpanSugar._
2323

24-
import scala.collection.mutable.ArrayBuffer
25-
26-
import spark._
27-
import storage.StorageLevel
24+
import org.apache.spark._
25+
import org.apache.spark.storage.StorageLevel
2826

2927
class TestVertex(val active: Boolean, val age: Int) extends Vertex with Serializable
3028
class TestMessage(val targetId: String) extends Message[String] with Serializable

bin/start-master.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -49,4 +49,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then
4949
fi
5050
fi
5151

52-
"$bin"/spark-daemon.sh start spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT
52+
"$bin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT

bin/start-slave.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -32,4 +32,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then
3232
fi
3333
fi
3434

35-
"$bin"/spark-daemon.sh start spark.deploy.worker.Worker "$@"
35+
"$bin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@"

bin/stop-master.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -24,4 +24,4 @@ bin=`cd "$bin"; pwd`
2424

2525
. "$bin/spark-config.sh"
2626

27-
"$bin"/spark-daemon.sh stop spark.deploy.master.Master 1
27+
"$bin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1

bin/stop-slaves.sh

+2-2
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,9 @@ if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
2929
fi
3030

3131
if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
32-
"$bin"/spark-daemons.sh stop spark.deploy.worker.Worker 1
32+
"$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1
3333
else
3434
for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
35-
"$bin"/spark-daemons.sh stop spark.deploy.worker.Worker $(( $i + 1 ))
35+
"$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
3636
done
3737
fi

core/pom.xml

+3-3
Original file line numberDiff line numberDiff line change
@@ -19,17 +19,17 @@
1919
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
2020
<modelVersion>4.0.0</modelVersion>
2121
<parent>
22-
<groupId>org.spark-project</groupId>
22+
<groupId>org.apache.spark</groupId>
2323
<artifactId>spark-parent</artifactId>
2424
<version>0.8.0-SNAPSHOT</version>
2525
<relativePath>../pom.xml</relativePath>
2626
</parent>
2727

28-
<groupId>org.spark-project</groupId>
28+
<groupId>org.apache.spark</groupId>
2929
<artifactId>spark-core</artifactId>
3030
<packaging>jar</packaging>
3131
<name>Spark Project Core</name>
32-
<url>http://spark-project.org/</url>
32+
<url>http://spark.incubator.apache.org/</url>
3333

3434
<dependencies>
3535
<dependency>

core/src/main/java/spark/network/netty/FileClient.java renamed to core/src/main/java/org/apache/spark/network/netty/FileClient.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
* limitations under the License.
1616
*/
1717

18-
package spark.network.netty;
18+
package org.apache.spark.network.netty;
1919

2020
import io.netty.bootstrap.Bootstrap;
2121
import io.netty.channel.Channel;

core/src/main/java/spark/network/netty/FileClientChannelInitializer.java renamed to core/src/main/java/org/apache/spark/network/netty/FileClientChannelInitializer.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
* limitations under the License.
1616
*/
1717

18-
package spark.network.netty;
18+
package org.apache.spark.network.netty;
1919

2020
import io.netty.buffer.BufType;
2121
import io.netty.channel.ChannelInitializer;

core/src/main/java/spark/network/netty/FileClientHandler.java renamed to core/src/main/java/org/apache/spark/network/netty/FileClientHandler.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
* limitations under the License.
1616
*/
1717

18-
package spark.network.netty;
18+
package org.apache.spark.network.netty;
1919

2020
import io.netty.buffer.ByteBuf;
2121
import io.netty.channel.ChannelHandlerContext;

core/src/main/java/spark/network/netty/FileServer.java renamed to core/src/main/java/org/apache/spark/network/netty/FileServer.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
* limitations under the License.
1616
*/
1717

18-
package spark.network.netty;
18+
package org.apache.spark.network.netty;
1919

2020
import java.net.InetSocketAddress;
2121

core/src/main/java/spark/network/netty/FileServerChannelInitializer.java renamed to core/src/main/java/org/apache/spark/network/netty/FileServerChannelInitializer.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
* limitations under the License.
1616
*/
1717

18-
package spark.network.netty;
18+
package org.apache.spark.network.netty;
1919

2020
import io.netty.channel.ChannelInitializer;
2121
import io.netty.channel.socket.SocketChannel;

core/src/main/java/spark/network/netty/FileServerHandler.java renamed to core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
* limitations under the License.
1616
*/
1717

18-
package spark.network.netty;
18+
package org.apache.spark.network.netty;
1919

2020
import java.io.File;
2121
import java.io.FileInputStream;

core/src/main/java/spark/network/netty/PathResolver.java renamed to core/src/main/java/org/apache/spark/network/netty/PathResolver.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
* limitations under the License.
1616
*/
1717

18-
package spark.network.netty;
18+
package org.apache.spark.network.netty;
1919

2020

2121
public interface PathResolver {

0 commit comments

Comments
 (0)