-
Notifications
You must be signed in to change notification settings - Fork 149
/
build.mill
170 lines (145 loc) · 6.22 KB
/
build.mill
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
import coursier.maven.MavenRepository
import mill._, scalalib._, publish._
import Assembly._
import $ivy.`com.lihaoyi::mill-contrib-sonatypecentral:`
import mill.contrib.sonatypecentral.SonatypeCentralPublishModule
import $ivy.`de.tototec::de.tobiasroeser.mill.vcs.version::0.4.0`
import de.tobiasroeser.mill.vcs.version.VcsVersion
trait SparkModule extends Cross.Module2[String, String] with SbtModule with SonatypeCentralPublishModule {
outer =>
override def scalaVersion = crossValue
val sparkVersion = crossValue2
val Array(sparkMajor, sparkMinor, sparkPatch) = sparkVersion.split("\\.")
val sparkBinaryVersion = s"$sparkMajor.$sparkMinor"
override def millSourcePath = super.millSourcePath / os.up
object LowerOrEqual {
def unapply(otherVersion: String): Boolean = otherVersion match {
case s"${sparkMaj}.${sparkMin}.${sparkPat}" =>
sparkMaj == sparkMajor && (sparkMin < sparkMinor || (sparkMin == sparkMinor && sparkPat <= sparkPatch))
case s"${sparkMaj}.${sparkMin}" => sparkMaj == sparkMajor && sparkMin <= sparkMinor
case sparkMaj => sparkMaj == sparkMajor
}
}
object HigherOrEqual {
def unapply(otherVersion: String): Boolean = otherVersion match {
case s"${sparkMaj}.${sparkMin}.${sparkPat}" =>
sparkMaj == sparkMajor && (sparkMin > sparkMinor || (sparkMin == sparkMinor && sparkPat >= sparkPatch))
case s"${sparkMaj}.${sparkMin}" => sparkMaj == sparkMajor && sparkMin >= sparkMinor
case sparkMaj => sparkMaj == sparkMajor
}
}
def sparkVersionSpecificSources = T {
val versionSpecificDirs = os.list(mill.api.WorkspaceRoot.workspaceRoot / "src" / "main")
val Array(sparkMajor, sparkMinor, sparkPatch) = sparkVersion.split("\\.")
val sparkBinaryVersion = s"$sparkMajor.$sparkMinor"
versionSpecificDirs.filter(_.last match {
case "scala" => true
case `sparkBinaryVersion` => true
case s"${LowerOrEqual()}_and_up" => true
case s"${LowerOrEqual()}_to_${HigherOrEqual()}" => true
case _ => false
})
}
override def sources = T.sources {
super.sources() ++ sparkVersionSpecificSources().map(PathRef(_))
}
override def docSources = T.sources(Seq[PathRef]())
override def artifactName = "spark-excel"
override def publishVersion: T[String] = T {
val vcsVersion = VcsVersion.vcsState().format(untaggedSuffix = "-SNAPSHOT")
s"${sparkVersion}_${vcsVersion}"
}
def pomSettings = PomSettings(
description = "A Spark plugin for reading and writing Excel files",
organization = "dev.mauch",
url = "https://github.com/nightscape/spark-excel",
licenses = Seq(License.`Apache-2.0`),
versionControl = VersionControl.github("nightscape", "spark-excel"),
developers = Seq(Developer("nightscape", "Martin Mauch", "https://github.com/nightscape"))
)
def assemblyRules = Seq(
Rule.AppendPattern(".*\\.conf"), // all *.conf files will be concatenated into single file
Rule.Relocate("org.apache.commons.io.**", "shadeio.commons.io.@1"),
Rule.Relocate("org.apache.commons.compress.**", "shadeio.commons.compress.@1")
)
override def extraPublish = Seq(PublishInfo(assembly(), classifier = None, ivyConfig = "compile"))
def publishArtifacts: T[PublishModule.PublishData] = Task {
val publishData = super.publishArtifacts()
publishData.copy(
payload = publishData.payload.filterNot { case (ref, name) => ref.toString.contains("jar.dest") }
)
}
override def sonatypeCentralReadTimeout: T[Int] = 600000
override def sonatypeCentralAwaitTimeout: T[Int] = 1200 * 1000
val sparkDeps = Agg(
ivy"org.apache.spark::spark-core:$sparkVersion",
ivy"org.apache.spark::spark-sql:$sparkVersion",
ivy"org.apache.spark::spark-hive:$sparkVersion"
)
override def compileIvyDeps = if (sparkVersion < "3.3.0") {
sparkDeps ++ Agg(ivy"org.slf4j:slf4j-api:1.7.36".excludeOrg("stax"))
} else {
sparkDeps
}
val poiVersion = "5.3.0"
override def ivyDeps = {
val base = Agg(
ivy"org.apache.poi:poi:$poiVersion",
ivy"org.apache.poi:poi-ooxml:$poiVersion",
ivy"org.apache.poi:poi-ooxml-lite:$poiVersion",
ivy"org.apache.xmlbeans:xmlbeans:5.2.1",
ivy"com.norbitltd::spoiwo:2.2.1",
ivy"com.github.pjfanning:excel-streaming-reader:5.0.2",
ivy"commons-io:commons-io:2.17.0",
ivy"org.apache.commons:commons-compress:1.27.1",
ivy"org.apache.logging.log4j:log4j-api:2.24.1",
ivy"com.zaxxer:SparseBitSet:1.3",
ivy"org.apache.commons:commons-collections4:4.4",
ivy"com.github.virtuald:curvesapi:1.08",
ivy"commons-codec:commons-codec:1.17.1",
ivy"org.apache.commons:commons-math3:3.6.1",
ivy"org.scala-lang.modules::scala-collection-compat:2.12.0"
)
if (sparkVersion >= "3.3.0") {
base ++ Agg(ivy"org.apache.logging.log4j:log4j-core:2.24.1")
} else {
base
}
}
object test extends SbtTests with TestModule.ScalaTest {
override def millSourcePath = super.millSourcePath
override def sources = T.sources {
Seq(PathRef(millSourcePath / "src" / "test" / "scala"))
}
override def resources = T.sources {
Seq(PathRef(millSourcePath / "src" / "test" / "resources"))
}
def scalaVersion = outer.scalaVersion()
def repositoriesTask = T.task {
super.repositoriesTask() ++ Seq(MavenRepository("https://jitpack.io"))
}
def ivyDeps = sparkDeps ++ Agg(
ivy"org.typelevel::cats-core:2.12.0",
ivy"org.scalatest::scalatest:3.2.19",
ivy"org.scalatestplus::scalacheck-1-16:3.2.14.0",
ivy"org.scalacheck::scalacheck:1.18.1",
ivy"com.github.alexarchambault::scalacheck-shapeless_1.15:1.3.0",
ivy"com.github.mrpowers::spark-fast-tests:1.3.0",
ivy"org.scalamock::scalamock:5.2.0"
)
}
}
val scala213 = "2.13.15"
val scala212 = "2.12.20"
val spark24 = List("2.4.8")
val spark30 = List("3.0.3")
val spark31 = List("3.1.3")
val spark32 = List("3.2.4")
val spark33 = List("3.3.4")
val spark34 = List("3.4.4")
val spark35 = List("3.5.3")
val sparkVersions = spark24 ++ spark30 ++ spark31 ++ spark32 ++ spark33 ++ spark34 ++ spark35
val crossMatrix =
sparkVersions.map(spark => (scala212, spark)) ++
sparkVersions.filter(_ >= "3.2").map(spark => (scala213, spark))
object `spark-excel` extends Cross[SparkModule](crossMatrix) {}