1
- // buildscript {
2
- // repositories {
3
- // mavenCentral()
4
- // }
5
- // }
6
1
plugins {
7
- id ' scala'
8
- id ' application'
9
- id " com.github.spotbugs-base"
10
- id " com.diffplug.spotless"
11
- id ' net.nemerosa.versioning'
12
- id ' com.jfrog.artifactory'
13
- id ' org.scoverage'
14
- id ' template.spark.livyPlugin'
2
+ id ' scala'
3
+ id ' application'
4
+ id " com.github.spotbugs-base"
5
+ id " com.diffplug.spotless"
6
+ id ' net.nemerosa.versioning'
7
+ id ' com.jfrog.artifactory'
8
+ id ' org.scoverage'
15
9
}
16
10
17
- // apply plugin: LivyPlugin
18
11
apply plugin : ' java'
19
12
apply plugin : ' scala'
20
13
apply plugin : ' idea'
@@ -31,85 +24,81 @@ sourceCompatibility = JavaVersion.VERSION_1_8
31
24
targetCompatibility = JavaVersion . VERSION_1_8
32
25
33
26
configurations {
34
- provided
27
+ provided
35
28
}
36
29
37
30
sourceSets {
38
- main {
39
- compileClasspath + = configurations. provided
40
- }
31
+ main {
32
+ compileClasspath + = configurations. provided
33
+ }
41
34
}
42
35
43
36
application {
44
- mainClassName = ' dev.template.spark.Main'
37
+ mainClassName = ' dev.template.spark.Main'
45
38
}
46
39
47
40
48
41
repositories {
49
- mavenCentral()
42
+ mavenCentral()
50
43
}
51
44
52
45
dependencies {
53
- implementation " org.scalameta:scalafmt-core_${ scalaVersion} :${ scalafmt} "
54
- implementation " org.apache.spark:spark-sql_${ scalaVersion} :${ sparkVersion} "
55
- implementation " org.apache.spark:spark-graphx_${ scalaVersion} :${ sparkVersion} "
56
- implementation " org.apache.spark:spark-launcher_${ scalaVersion} :${ sparkVersion} "
57
- implementation " org.apache.spark:spark-catalyst_${ scalaVersion} :${ sparkVersion} "
58
- implementation " org.apache.spark:spark-streaming_${ scalaVersion} :${ sparkVersion} "
59
- implementation " org.apache.spark:spark-core_${ scalaVersion} :${ sparkVersion} "
60
- implementation " commons-io:commons-io:${ commonsIO} "
61
-
62
- implementation " org.apache.hadoop:hadoop-aws:${ hadoopAWS} "
63
- implementation " org.apache.spark:spark-hive_${ scalaVersion} :${ sparkVersion} "
64
- implementation " io.delta:delta-core_${ scalaVersion} :${ deltaVersion} "
65
- implementation " com.google.guava:guava:31.1-jre"
66
- compileOnly " org.scala-lang:scala-library:$scalaVersion "
67
- compileOnly " org.scala-lang:scala-compiler:${ scalaVersion} "
68
-
69
- testImplementation " org.scalatestplus:junit-4-13_${ scalaVersion} :3.2.2.0"
70
- testImplementation " junit:junit:${ junitVersion} "
71
- testRuntimeOnly " org.scala-lang.modules:scala-xml_${ scalaVersion} :1.2.0"
72
- testImplementation ' org.mockito:mockito-core:5.3.1'
73
-
74
- testImplementation " org.junit.jupiter:junit-jupiter-api:${ jupiterApi} "
75
- testImplementation " org.scalatest:scalatest_${ scalaVersion} :${ scalaTests} "
46
+ implementation " org.scalameta:scalafmt-core_${ scalaVersion} :${ scalafmt} "
47
+ implementation " org.apache.spark:spark-sql_${ scalaVersion} :${ sparkVersion} "
48
+ implementation " org.apache.spark:spark-graphx_${ scalaVersion} :${ sparkVersion} "
49
+ implementation " org.apache.spark:spark-launcher_${ scalaVersion} :${ sparkVersion} "
50
+ implementation " org.apache.spark:spark-catalyst_${ scalaVersion} :${ sparkVersion} "
51
+ implementation " org.apache.spark:spark-streaming_${ scalaVersion} :${ sparkVersion} "
52
+ implementation " org.apache.spark:spark-core_${ scalaVersion} :${ sparkVersion} "
53
+
54
+ implementation " org.apache.hadoop:hadoop-aws:${ hadoopAWS} "
55
+ implementation " org.apache.spark:spark-hive_${ scalaVersion} :${ sparkVersion} "
56
+ implementation " io.delta:delta-core_${ scalaVersion} :${ deltaVersion} "
57
+ compileOnly " org.scala-lang:scala-library:$scalaVersion "
58
+ compileOnly " org.scala-lang:scala-compiler:${ scalaVersion} "
59
+
60
+ testImplementation " org.scalatestplus:junit-4-13_${ scalaVersion} :3.2.2.0"
61
+ testImplementation " junit:junit:${ junitVersion} "
62
+ testRuntimeOnly " org.scala-lang.modules:scala-xml_${ scalaVersion} :1.2.0"
63
+ testImplementation ' org.mockito:mockito-core:5.3.1'
64
+
65
+ testImplementation " org.junit.jupiter:junit-jupiter-api:${ jupiterApi} "
66
+ testImplementation " org.scalatest:scalatest_${ scalaVersion} :${ scalaTests} "
76
67
}
77
68
78
69
79
70
jar {
80
- classifier ' all'
81
- manifest {
82
- attributes ' Implementation-Title' : title,
83
- ' Implementation-Version' : archiveVersion,
84
- ' Main-Class' : mainClassFile
85
- }
86
- exclude ' META-INF/*.RSA' , ' META-INF/*.SF' , ' META-INF/*.DSA'
87
- from files(sourceSets. main. output. classesDirs)
88
- zip64 true
71
+ classifier ' all'
72
+ manifest {
73
+ attributes ' Implementation-Title' : title,
74
+ ' Implementation-Version' : archiveVersion,
75
+ ' Main-Class' : mainClassFile
76
+ }
77
+ exclude ' META-INF/*.RSA' , ' META-INF/*.SF' , ' META-INF/*.DSA'
78
+ from files(sourceSets. main. output. classesDirs)
79
+ zip64 true
89
80
}
90
81
91
82
92
83
tasks. register(' scalaTest' , JavaExec ) {
93
- dependsOn[' testClasses' ]
94
- mainClass. set(" org.scalatest.tools.Runner" )
95
- args = [' -R' , ' build/classes/scala/test' , ' -o' ]
96
- classpath = sourceSets. test. runtimeClasspath
84
+ dependsOn[' testClasses' ]
85
+ mainClass. set(" org.scalatest.tools.Runner" )
86
+ args = [' -R' , ' build/classes/scala/test' , ' -o' ]
87
+ classpath = sourceSets. test. runtimeClasspath
97
88
}
98
89
99
90
test. dependsOn scalaTest
100
91
101
92
idea {
102
- module {
103
- // IntelliJ does not know about the standard idiom of provided as used in managing
104
- // uber/shaded jar dependencies. Make it so!
105
- scopes. PROVIDED . plus + = [configurations. provided]
106
- }
93
+ module {
94
+ // IntelliJ does not know about the standard idiom of provided as used in managing
95
+ // uber/shaded jar dependencies. Make it so!
96
+ scopes. PROVIDED . plus + = [configurations. provided]
97
+ }
107
98
}
108
99
109
- def runLivy = tasks. register(" runLivy" , template.spark.Publish ) {
110
- def avscTree = project. fileTree(allAvscs)
111
-
112
- targetFiles. from(avscTree)
113
- schemaFile. value(layout. projectDirectory. file(" validator/avro-schema.json" ))
100
+ task sparkSubmit (type : Exec ) {
101
+ commandLine ' sh' , ' -c' , " /Users/e1xx/spark-3.4.1-bin-hadoop3/bin/spark-submit " +
102
+ " --class dev.template.spark.RddCollect" +
103
+ " --master spark://localhost:7077 build/libs/spark-scala-gradle-bootstrap-2.12.0-all.jar"
114
104
}
115
-
0 commit comments