Skip to content

Commit ec992e1

Browse files
committed
Revert "[SPARK-1150] fix repo location in create script"
This reverts commit 9aa0957.
1 parent 9aa0957 commit ec992e1

File tree

3 files changed

+5
-11
lines changed

3 files changed

+5
-11
lines changed

conf/spark-env.sh.template

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
# - SPARK_MASTER_IP, to bind the master to a different IP address or hostname
1616
# - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports
1717
# - SPARK_WORKER_CORES, to set the number of cores to use on this machine
18-
# - SPARK_WORKER_MEM, to set how much memory to use (e.g. 1000m, 2g)
18+
# - SPARK_WORKER_MEMORY, to set how much memory to use (e.g. 1000m, 2g)
1919
# - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT
2020
# - SPARK_WORKER_INSTANCES, to set the number of worker processes per node
2121
# - SPARK_WORKER_DIR, to set the working directory of worker processes

core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala

+2-8
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,13 @@
1818
package org.apache.spark.deploy.worker
1919

2020
import java.lang.management.ManagementFactory
21-
import org.apache.spark.Logging
2221

2322
import org.apache.spark.util.{IntParam, MemoryParam, Utils}
2423

2524
/**
2625
* Command-line parser for the master.
2726
*/
28-
private[spark] class WorkerArguments(args: Array[String]) extends Logging {
29-
initLogging()
27+
private[spark] class WorkerArguments(args: Array[String]) {
3028
var host = Utils.localHostName()
3129
var port = 0
3230
var webUiPort = 8081
@@ -42,13 +40,9 @@ private[spark] class WorkerArguments(args: Array[String]) extends Logging {
4240
if (System.getenv("SPARK_WORKER_CORES") != null) {
4341
cores = System.getenv("SPARK_WORKER_CORES").toInt
4442
}
45-
if (System.getenv("SPARK_WORKER_MEM") != null) {
46-
memory = Utils.memoryStringToMb(System.getenv("SPARK_WORKER_MEM"))
47-
} else if (System.getenv("SPARK_WORKER_MEMORY") != null) {
48-
logWarning("SPARK_WORKER_MEMORY is deprecated. Please use SPARK_WORKER_MEM instead")
43+
if (System.getenv("SPARK_WORKER_MEMORY") != null) {
4944
memory = Utils.memoryStringToMb(System.getenv("SPARK_WORKER_MEMORY"))
5045
}
51-
5246
if (System.getenv("SPARK_WORKER_WEBUI_PORT") != null) {
5347
webUiPort = System.getenv("SPARK_WORKER_WEBUI_PORT").toInt
5448
}

docs/spark-standalone.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -104,8 +104,8 @@ You can optionally configure the cluster further by setting environment variable
104104
<td>Total number of cores to allow Spark applications to use on the machine (default: all available cores).</td>
105105
</tr>
106106
<tr>
107-
<td><code>SPARK_WORKER_MEM</code></td>
108-
<td>Total amount of memory to allow Spark applications to use on the machine, e.g. <code>1000m</code>, <code>2g</code> (default: total memory minus 1 GB); note that each application's <i>individual</i> memory is configured using its <code>spark.executor.memory</code> property. The old variable </code>SPARK_WORKER_MEMORY</code> has been deprecated.</td>
107+
<td><code>SPARK_WORKER_MEMORY</code></td>
108+
<td>Total amount of memory to allow Spark applications to use on the machine, e.g. <code>1000m</code>, <code>2g</code> (default: total memory minus 1 GB); note that each application's <i>individual</i> memory is configured using its <code>spark.executor.memory</code> property.</td>
109109
</tr>
110110
<tr>
111111
<td><code>SPARK_WORKER_WEBUI_PORT</code></td>

0 commit comments

Comments
 (0)