-
Notifications
You must be signed in to change notification settings - Fork 1
/
build.sbt
88 lines (78 loc) · 2.64 KB
/
build.sbt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
name := "hotels-kafka-streams"
lazy val commonSettings = Seq(
version := "0.5.0",
scalaVersion := "2.11.12",
scalacOptions := Seq(
"-feature",
"-encoding",
//"-deprecation",
"UTF-8",
"-language:higherKinds",
"-language:existentials",
"-language:implicitConversions",
"-language:postfixOps",
"-Ypartial-unification"
),
resolvers ++= Seq(
Resolver.bintrayRepo("akka", "maven"),
"Sonatype OSS Snapshots".at("https://oss.sonatype.org/content/repositories/snapshots"),
"krasserm at bintray".at("http://dl.bintray.com/krasserm/maven"),
"jitpack".at("https://jitpack.io")
),
libraryDependencies ++= Dependencies.common,
scalafmtOnCompile := true,
assemblyMergeStrategy in assembly := {
case PathList("META-INF", _ @_*) => MergeStrategy.discard
case _ => MergeStrategy.first
}
)
lazy val root =
(project in file("."))
.aggregate(interface, spark_common, generator, batching, streaming, elastic)
.settings(
crossScalaVersions := List()
)
lazy val interface = (project in file("interface")).settings(
commonSettings
)
lazy val spark_common = (project in file("spark-common"))
.settings(
commonSettings,
libraryDependencies ++= Dependencies.sparkCommon,
libraryDependencies ++= unitTesting(Dependencies.commonTest)
)
.dependsOn(interface)
lazy val generator = (project in file("generator"))
.settings(
commonSettings,
libraryDependencies ++= Dependencies.generatorModule,
libraryDependencies ++= unitTesting(Dependencies.generatorTests)
)
.dependsOn(interface)
.enablePlugins(JavaAppPackaging)
lazy val batching = standardSparkModule(project in file("batching"))
.settings(
libraryDependencies ++= Dependencies.batchingModule
)
lazy val streaming = standardSparkModule(project in file("streaming"))
.settings(
libraryDependencies ++= Dependencies.streamingModule
)
lazy val elastic = standardSparkModule(project in file("elastic"))
.settings(
libraryDependencies ++= Dependencies.elasticModule
)
def standardSparkModule(proj: Project): Project =
proj
.configs(IntegrationTest)
.settings(
commonSettings,
Defaults.itSettings,
assemblyOption in assembly := (assemblyOption in assembly).value.copy(includeScala = false),
libraryDependencies ++= integTesting(Dependencies.integTests),
dependencyOverrides ++= Dependencies.overrides
)
.dependsOn(interface, spark_common, generator % IntegrationTest)
.enablePlugins(JavaAppPackaging)
def unitTesting(tests: Seq[ModuleID]) = tests.map(_ % Test)
def integTesting(tests: Seq[ModuleID]) = tests.map(_ % IntegrationTest)