This repository has been archived by the owner on Mar 14, 2021. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
build.sbt
148 lines (129 loc) · 5.65 KB
/
build.sbt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
import build.BuildType
lazy val baseName = "lakefs-spark"
lazy val projectVersion = "0.1.0-SNAPSHOT.4"
isSnapshot := true
// Spark versions 2.4.7 and 3.0.1 use different Scala versions. Changing this is a deep
// change, so key the Spark distinction by the Scala distinction. sbt doesn't appear to
// support other ways of changing emitted Scala binary versions using the same compiler.
// SO https://stackoverflow.com/a/60177627/192263 hints that we cannot use 2.11 here before
// this version
lazy val scala211Version = "2.11.12"
lazy val scala212Version = "2.12.12"
def settingsToCompileIn(dir: String) = {
Seq(
Compile / scalaSource := (ThisBuild / baseDirectory).value / dir / "src" / "main" / "scala",
Test / scalaSource := (ThisBuild / baseDirectory).value / dir / "src" / "test" / "scala",
Compile / resourceDirectory := (ThisBuild / baseDirectory).value / dir / "src" / "main" / "resources",
Compile / PB.includePaths += (Compile / resourceDirectory).value,
Compile / PB.protoSources += (Compile / resourceDirectory).value,
)
}
def generateCoreProject(buildType: BuildType) =
Project(s"${baseName}-client-${buildType.name}", file(s"target/core-${buildType.name}"))
.settings(
sharedSettings,
settingsToCompileIn("core"),
scalaVersion := buildType.scalaVersion,
PB.targets := Seq(
scalapb.gen() -> (Compile / sourceManaged).value / "scalapb"
),
libraryDependencies ++= Seq("org.rocksdb" % "rocksdbjni" % "6.6.4",
"commons-codec" % "commons-codec" % "1.15",
"org.apache.spark" %% "spark-sql" % buildType.sparkVersion % "provided",
"com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.compiler.Version.scalapbVersion % "protobuf",
"org.apache.hadoop" % "hadoop-aws" % buildType.hadoopVersion,
"org.apache.hadoop" % "hadoop-common" % buildType.hadoopVersion,
"org.scalaj" %% "scalaj-http" % "2.4.2",
"org.json4s" %% "json4s-native" % "3.7.0-M8",
"com.google.guava" % "guava" % "16.0.1",
"com.google.guava" % "failureaccess" % "1.0.1",
)
)
def generateExamplesProject(buildType: BuildType) =
Project(s"${baseName}-examples-${buildType.name}", file(s"target/examples-${buildType.name}"))
.settings(
sharedSettings,
settingsToCompileIn("core"),
scalaVersion := buildType.scalaVersion,
libraryDependencies += "org.apache.spark" %% "spark-sql" % buildType.sparkVersion % "provided",
mainClass in assembly := Some("io.treeverse.examples.List"),
)
lazy val spark2Type = new BuildType("247", scala211Version, "2.4.7", "0.9.8", "2.7.7")
lazy val spark3Type = new BuildType("301", scala212Version, "3.0.1", "0.10.11", "2.7.7")
lazy val core2 = generateCoreProject(spark2Type)
lazy val core3 = generateCoreProject(spark3Type)
lazy val examples2 = generateExamplesProject(spark2Type).dependsOn(core2)
lazy val examples3 = generateExamplesProject(spark3Type).dependsOn(core3)
lazy val root = (project in file(".")).aggregate(core2, core3, examples2, examples3)
// Use an older JDK to be Spark compatible
javacOptions ++= Seq("-source", "1.8", "-target", "1.8")
scalacOptions ++= Seq("-release", "8", "-target:jvm-1.8")
lazy val assemblySettings = Seq(
assembly / assemblyMergeStrategy := (_ => MergeStrategy.first),
assembly / assemblyShadeRules := Seq(
ShadeRule.rename("org.apache.http.**" -> "org.apache.httpShaded@1").inAll,
ShadeRule.rename("com.google.protobuf.**" -> "shadeproto.@1").inAll,
ShadeRule.rename("com.google.common.**" -> "shadegooglecommon.@1")
.inLibrary("com.google.guava" % "guava" % "30.1-jre", "com.google.guava" % "failureaccess" % "1.0.1")
.inProject,
ShadeRule.rename("scala.collection.compat.**" -> "shadecompat.@1").inAll,
),
)
// Don't publish root project
root / publish / skip := true
lazy val commonSettings = Seq(
version := projectVersion
)
lazy val publishSettings = Seq(
publishTo := {
val nexus = "https://s01.oss.sonatype.org/"
if (isSnapshot.value) Some("snapshots" at nexus + "content/repositories/snapshots")
else Some("releases" at nexus + "service/local/staging/deploy/maven2")
},
// Remove all additional repository other than Maven Central from POM
pomIncludeRepository := { _ => false },
)
lazy val sharedSettings = commonSettings ++ assemblySettings ++ publishSettings
ThisBuild / scmInfo := Some(
ScmInfo(
url("https://github.com/treeverse/spark-client"),
"scm:[email protected]:treeverse/spark-client.git"
)
)
ThisBuild / developers := List(
Developer(
id = "ariels",
name = "Ariel Shaqed (Scolnicov)",
email = "[email protected]",
url = url("https://github.com/arielshaqed")
),
Developer(
id = "baraktr",
name = "B. A.",
email = "[email protected]",
url = url("https://github.com/nopcoder"),
),
Developer(
id = "ozkatz",
name = "Oz Katz",
email = "[email protected]",
url = url("https://github.com/ozkatz"),
),
Developer(
id = "johnnyaug",
name = "J. A.",
email = "[email protected]",
url = url("https://github.com/johnnyaug"),
),
)
credentials ++= Seq(
Credentials(Path.userHome / ".sbt" / "credentials"),
Credentials(Path.userHome / ".sbt" / "sonatype_credentials"),
)
ThisBuild / versionScheme := Some("early-semver")
ThisBuild / organization := "io.treeverse"
ThisBuild / organizationName := "Treeverse Labs"
ThisBuild / organizationHomepage := Some(url("http://treeverse.io"))
ThisBuild / description := "Spark client for lakeFS object metadata."
ThisBuild / licenses := List("Apache 2" -> new URL("http://www.apache.org/licenses/LICENSE-2.0.txt"))
ThisBuild / homepage := Some(url("https://github.com/treeverse/spark-client"))