@@ -30,7 +30,7 @@ if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
30
30
31
31
rem Test that an argument was given
32
32
if not " x%1 " == " x" goto arg_given
33
- echo Usage: run ^ < spark- class^ > [^ < args^ > ]
33
+ echo Usage: spark-class ^ < class^ > [^ < args^ > ]
34
34
goto exit
35
35
:arg_given
36
36
@@ -44,12 +44,6 @@ rem Do not overwrite SPARK_JAVA_OPTS environment variable in this script
44
44
if " %RUNNING_DAEMON% " == " 0" set OUR_JAVA_OPTS = %SPARK_JAVA_OPTS%
45
45
if " %RUNNING_DAEMON% " == " 1" set OUR_JAVA_OPTS = %SPARK_DAEMON_JAVA_OPTS%
46
46
47
- rem Check that SCALA_HOME has been specified
48
- if not " x%SCALA_HOME% " == " x" goto scala_exists
49
- echo SCALA_HOME is not set
50
- goto exit
51
- :scala_exists
52
-
53
47
rem Figure out how much memory to use per executor and set it as an environment
54
48
rem variable so that our process sees it and can report it to Mesos
55
49
if " x%SPARK_MEM% " == " x" set SPARK_MEM = 512m
@@ -58,43 +52,27 @@ rem Set JAVA_OPTS to be able to load native libraries and to set heap size
58
52
set JAVA_OPTS = %OUR_JAVA_OPTS% -Djava.library.path=%SPARK_LIBRARY_PATH% -Xms%SPARK_MEM% -Xmx%SPARK_MEM%
59
53
rem Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in ExecutorRunner.scala!
60
54
61
- set CORE_DIR = %FWDIR% core
62
- set EXAMPLES_DIR = %FWDIR% examples
63
- set REPL_DIR = %FWDIR% repl
55
+ rem Test whether the user has built Spark
56
+ if exist " %FWDIR% RELEASE" goto skip_build_test
57
+ set FOUND_JAR = 0
58
+ for %%d in (" %FWDIR% assembly\target\scala-%SCALA_VERSION% \spark-assembly*hadoop*.jar" ) do (
59
+ set FOUND_JAR = 1
60
+ )
61
+ if " %FOUND_JAR% " == " 0" (
62
+ echo Failed to find Spark assembly JAR.
63
+ echo You need to build Spark with sbt\sbt assembly before running this program.
64
+ goto exit
65
+ )
66
+ :skip_build_test
64
67
65
68
rem Compute classpath using external script
66
69
set DONT_PRINT_CLASSPATH = 1
67
70
call " %FWDIR% bin\compute-classpath.cmd"
68
71
set DONT_PRINT_CLASSPATH = 0
69
72
70
- rem Figure out the JAR file that our examples were packaged into.
71
- rem First search in the build path from SBT:
72
- for %%d in (" examples/target/scala-%SCALA_VERSION% /spark-examples*.jar" ) do (
73
- set SPARK_EXAMPLES_JAR = examples/target/scala-%SCALA_VERSION% /%%d
74
- )
75
- rem Then search in the build path from Maven:
76
- for %%d in (" examples/target/spark-examples*hadoop*.jar" ) do (
77
- set SPARK_EXAMPLES_JAR = examples/target/%%d
78
- )
79
-
80
- rem Figure out whether to run our class with java or with the scala launcher.
81
- rem In most cases, we'd prefer to execute our process with java because scala
82
- rem creates a shell script as the parent of its Java process, which makes it
83
- rem hard to kill the child with stuff like Process.destroy(). However, for
84
- rem the Spark shell, the wrapper is necessary to properly reset the terminal
85
- rem when we exit, so we allow it to set a variable to launch with scala.
86
- if " %SPARK_LAUNCH_WITH_SCALA% " NEQ 1 goto java_runner
87
- set RUNNER = %SCALA_HOME% \bin\scala
88
- # Java options will be passed to scala as JAVA_OPTS
89
- set EXTRA_ARGS =
90
- goto run_spark
91
- :java_runner
92
- set CLASSPATH = %CLASSPATH% ;%SCALA_HOME% \lib\scala-library.jar;%SCALA_HOME% \lib\scala-compiler.jar;%SCALA_HOME% \lib\jline.jar
93
- set RUNNER = java
94
- if not " x%JAVA_HOME% " == " x" set RUNNER = %JAVA_HOME% \bin\java
95
- rem The JVM doesn't read JAVA_OPTS by default so we need to pass it in
96
- set EXTRA_ARGS = %JAVA_OPTS%
97
- :run_spark
73
+ rem Figure out where java is.
74
+ set RUNNER = java
75
+ if not " x%JAVA_HOME% " == " x" set RUNNER = %JAVA_HOME% \bin\java
98
76
99
- " %RUNNER% " -cp " %CLASSPATH% " %EXTRA_ARGS % %*
77
+ " %RUNNER% " -cp " %CLASSPATH% " %JAVA_OPTS % %*
100
78
:exit
0 commit comments