-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbuild.gradle
107 lines (88 loc) · 3.17 KB
/
build.gradle
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
plugins {
id 'scala'
id 'application'
id "com.github.spotbugs-base"
id "com.diffplug.spotless"
id 'net.nemerosa.versioning'
id 'com.jfrog.artifactory'
id 'org.scoverage'
}
apply plugin: 'java'
apply plugin: 'scala'
apply plugin: 'idea'
apply plugin: 'maven-publish'
apply from: "$rootDir/gradle/artifactory.gradle"
apply from: "$rootDir/gradle/checkstyle.gradle"
apply from: "$rootDir/gradle/spotless.gradle"
apply from: "$rootDir/gradle/scoverage.gradle"
group group
version version
sourceCompatibility = JavaVersion.VERSION_11
targetCompatibility = JavaVersion.VERSION_11
configurations {
provided
}
sourceSets {
main {
compileClasspath += configurations.provided
}
}
application {
mainClassName = 'dev.template.spark.Main'
}
repositories {
mavenCentral()
// To import kafka confluent dependencies
maven { url 'https://packages.confluent.io/maven/' }
}
dependencies {
implementation "org.scalameta:scalafmt-core_${scalaVersion}:${scalafmt}"
implementation "org.apache.spark:spark-sql_${scalaVersion}:${sparkVersion}"
implementation "org.apache.spark:spark-graphx_${scalaVersion}:${sparkVersion}"
implementation "org.apache.spark:spark-launcher_${scalaVersion}:${sparkVersion}"
implementation "org.apache.spark:spark-catalyst_${scalaVersion}:${sparkVersion}"
implementation "org.apache.spark:spark-streaming_${scalaVersion}:${sparkVersion}"
implementation "org.apache.spark:spark-core_${scalaVersion}:${sparkVersion}"
implementation "org.apache.spark:spark-avro_${scalaVersion}:${sparkVersion}"
implementation "org.apache.hadoop:hadoop-aws:${hadoopAWS}"
implementation "org.apache.spark:spark-hive_${scalaVersion}:${sparkVersion}"
implementation "io.delta:delta-core_${scalaVersion}:${deltaVersion}"
compileOnly "org.scala-lang:scala-library:$scalaVersion"
compileOnly "org.scala-lang:scala-compiler:${scalaVersion}"
testImplementation "org.scalatestplus:junit-4-13_${scalaVersion}:3.2.2.0"
testImplementation "junit:junit:${junitVersion}"
testRuntimeOnly "org.scala-lang.modules:scala-xml_${scalaVersion}:1.2.0"
testImplementation 'org.mockito:mockito-core:5.3.1'
testImplementation "org.junit.jupiter:junit-jupiter-api:${jupiterApi}"
testImplementation "org.scalatest:scalatest_${scalaVersion}:${scalaTests}"
}
jar {
classifier 'all'
manifest {
attributes 'Implementation-Title': title,
'Implementation-Version': archiveVersion,
'Main-Class': mainClassFile
}
exclude 'META-INF/*.RSA', 'META-INF/*.SF', 'META-INF/*.DSA'
from files(sourceSets.main.output.classesDirs)
zip64 true
}
tasks.register('scalaTest', JavaExec) {
dependsOn['testClasses']
mainClass.set("org.scalatest.tools.Runner")
args = ['-R', 'build/classes/scala/test', '-o']
classpath = sourceSets.test.runtimeClasspath
}
test.dependsOn scalaTest
idea {
module {
// IntelliJ does not know about the standard idiom of provided as used in managing
// uber/shaded jar dependencies. Make it so!
scopes.PROVIDED.plus += [configurations.provided]
}
}
task sparkSubmit(type: Exec) {
commandLine 'sh', '-c', "/Users/e1xx/spark-3.4.1-bin-hadoop3/bin/spark-submit " +
"--class dev.template.spark.RddCollect" +
" --master spark://localhost:7077 build/libs/spark-scala-gradle-bootstrap-2.12.0-all.jar"
}