1- // Your sbt build file. Guides on how to write one can be found at
2- // http://www.scala-sbt.org/0.13/docs/index.html
3-
41import ReleaseTransformations ._
52
6- val sparkVer = sys.props.getOrElse(" spark.version" , " 3.5.3 " )
7- val sparkBranch = sparkVer.substring(0 , 3 )
8- val defaultScalaVer = sparkBranch match {
3+ lazy val sparkVer = sys.props.getOrElse(" spark.version" , " 3.5.4 " )
4+ lazy val sparkBranch = sparkVer.substring(0 , 3 )
5+ lazy val defaultScalaVer = sparkBranch match {
96 case " 3.5" => " 2.12.18"
107 case _ => throw new IllegalArgumentException (s " Unsupported Spark version: $sparkVer. " )
118}
12- val scalaVer = sys.props.getOrElse(" scala.version" , defaultScalaVer)
13- val defaultScalaTestVer = scalaVer match {
9+ lazy val scalaVer = sys.props.getOrElse(" scala.version" , defaultScalaVer)
10+ lazy val defaultScalaTestVer = scalaVer match {
1411 case s if s.startsWith(" 2.12" ) || s.startsWith(" 2.13" ) => " 3.0.8"
1512}
1613
17- sparkVersion := sparkVer
18-
19- scalaVersion := scalaVer
20-
21- name := " graphframes"
22-
23- spName := " graphframes/graphframes"
24-
25- organization := " org.graphframes"
26-
27- version := (version in ThisBuild ).value + s " -spark $sparkBranch"
28-
29- isSnapshot := version.value.contains(" SNAPSHOT" )
30-
31- // All Spark Packages need a license
32- licenses := Seq (" Apache-2.0" -> url(" http://opensource.org/licenses/Apache-2.0" ))
33-
34- spAppendScalaVersion := true
35-
36- // Add Spark components this package depends on, e.g, "mllib", ....
37- sparkComponents ++= Seq (" graphx" , " sql" , " mllib" )
38-
39- // uncomment and change the value below to change the directory where your zip artifact will be created
40- // spDistDirectory := target.value
41-
42- // add any Spark Package dependencies using spDependencies.
43- // e.g. spDependencies += "databricks/spark-avro:0.1"
44-
45- libraryDependencies += " org.slf4j" % " slf4j-api" % " 1.7.16"
46-
47- libraryDependencies += " org.scalatest" %% " scalatest" % defaultScalaTestVer % " test"
48-
49- libraryDependencies += " com.github.zafarkhaja" % " java-semver" % " 0.9.0" % " test" // MIT license
50-
51- parallelExecution := false
52-
53- scalacOptions ++= Seq (" -deprecation" , " -feature" )
54-
55- scalacOptions in (Compile , doc) ++= Seq (
56- " -groups" ,
57- " -implicits" ,
58- " -skip-packages" , Seq (" org.apache.spark" ).mkString(" :" ))
59-
60- scalacOptions in (Test , doc) ++= Seq (" -groups" , " -implicits" )
61-
62- // This fixes a class loader problem with scala.Tuple2 class, scala-2.11, Spark 2.x
63- fork in Test := true
64-
65- // This and the next line fix a problem with forked run: https://github.com/scalatest/scalatest/issues/770
66- javaOptions in Test ++= Seq (
67- " -XX:+IgnoreUnrecognizedVMOptions" ,
68- " -Xmx2048m" ,
69- " -XX:ReservedCodeCacheSize=384m" ,
70- " -XX:MaxMetaspaceSize=384m" ,
71- " --add-opens=java.base/sun.nio.ch=ALL-UNNAMED" ,
72- " --add-opens=java.base/java.lang=ALL-UNNAMED"
73- )
74-
75- concurrentRestrictions in Global := Seq (
76- Tags .limitAll(1 ))
77-
78- autoAPIMappings := true
79-
80- coverageHighlighting := false
81-
82- // We only use sbt-release to update version numbers.
83- releaseProcess := Seq [ReleaseStep ](
84- inquireVersions,
85- setReleaseVersion,
86- commitReleaseVersion,
87- tagRelease,
88- setNextVersion,
89- commitNextVersion
90- )
14+ ThisBuild / version := {
15+ val baseVersion = (ThisBuild / version).value
16+ s " ${baseVersion}-spark ${sparkBranch}"
17+ }
9118
92- credentials += Credentials (Path .userHome / " .ivy2" / " .sbtcredentials" )
19+ ThisBuild / scalaVersion := scalaVer
20+ ThisBuild / organization := " org.graphframes"
21+ ThisBuild / crossScalaVersions := Seq (" 2.12.18" , " 2.13.8" )
22+
23+ lazy val root = (project in file(" ." ))
24+ .settings(
25+ name := " graphframes" ,
26+
27+ // Replace spark-packages plugin functionality with explicit dependencies
28+ libraryDependencies ++= Seq (
29+ " org.apache.spark" %% " spark-graphx" % sparkVer % " provided" cross CrossVersion .for3Use2_13,
30+ " org.apache.spark" %% " spark-sql" % sparkVer % " provided" cross CrossVersion .for3Use2_13,
31+ " org.apache.spark" %% " spark-mllib" % sparkVer % " provided" cross CrossVersion .for3Use2_13,
32+ " org.slf4j" % " slf4j-api" % " 1.7.16" ,
33+ " org.scalatest" %% " scalatest" % defaultScalaTestVer % Test ,
34+ " com.github.zafarkhaja" % " java-semver" % " 0.9.0" % Test
35+ ),
36+
37+ licenses := Seq (" Apache-2.0" -> url(" http://opensource.org/licenses/Apache-2.0" )),
38+
39+ // Modern way to set Scala options
40+ Compile / scalacOptions ++= Seq (" -deprecation" , " -feature" ),
41+
42+ Compile / doc / scalacOptions ++= Seq (
43+ " -groups" ,
44+ " -implicits" ,
45+ " -skip-packages" , Seq (" org.apache.spark" ).mkString(" :" )
46+ ),
47+
48+ Test / doc / scalacOptions ++= Seq (" -groups" , " -implicits" ),
49+
50+ // Test settings
51+ Test / fork := true ,
52+ Test / parallelExecution := false ,
53+
54+ Test / javaOptions ++= Seq (
55+ " -XX:+IgnoreUnrecognizedVMOptions" ,
56+ " -Xmx2048m" ,
57+ " -XX:ReservedCodeCacheSize=384m" ,
58+ " -XX:MaxMetaspaceSize=384m" ,
59+ " --add-opens=java.base/sun.nio.ch=ALL-UNNAMED" ,
60+ " --add-opens=java.base/java.lang=ALL-UNNAMED"
61+ ),
62+
63+ // Global settings
64+ Global / concurrentRestrictions := Seq (
65+ Tags .limitAll(1 )
66+ ),
67+
68+ autoAPIMappings := true ,
69+
70+ coverageHighlighting := false ,
71+
72+ // Release settings
73+ releaseProcess := Seq [ReleaseStep ](
74+ inquireVersions,
75+ setReleaseVersion,
76+ commitReleaseVersion,
77+ tagRelease,
78+ setNextVersion,
79+ commitNextVersion
80+ ),
81+
82+ // Assembly settings
83+ assembly / test := {}, // No tests in assembly
84+ assembly / assemblyMergeStrategy := {
85+ case PathList (" META-INF" , xs @ _* ) => MergeStrategy .discard
86+ case x if x.endsWith(" module-info.class" ) => MergeStrategy .discard
87+ case x =>
88+ val oldStrategy = (assembly / assemblyMergeStrategy).value
89+ oldStrategy(x)
90+ },
91+
92+ credentials += Credentials (Path .userHome / " .ivy2" / " .sbtcredentials" )
93+ )
0 commit comments