Skip to content

Commit e54f249

Browse files
authored
Add Spark 3.5.0 support (#436)
* Add Spark 3.5.0 support * Use 2.12.12 in CI to make scover work * Disable AQE for test 'intermediate storage level' in ConnectedComponentsSuite
1 parent 8ede065 commit e54f249

File tree

5 files changed

+37
-16
lines changed

5 files changed

+37
-16
lines changed

.github/workflows/python-ci.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,9 @@ jobs:
66
fail-fast: false
77
matrix:
88
include:
9+
- spark-version: 3.5.0
10+
scala-version: 2.12.18
11+
python-version: 3.9
912
- spark-version: 3.4.1
1013
scala-version: 2.12.17
1114
python-version: 3.9

.github/workflows/scala-ci.yml

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,10 @@ jobs:
66
fail-fast: false
77
matrix:
88
include:
9+
- spark-version: 3.5.0
10+
scala-version: 2.13.8
11+
- spark-version: 3.5.0
12+
scala-version: 2.12.12
913
- spark-version: 3.4.1
1014
scala-version: 2.13.8
1115
- spark-version: 3.4.1
@@ -14,10 +18,10 @@ jobs:
1418
scala-version: 2.13.8
1519
- spark-version: 3.3.3
1620
scala-version: 2.12.12
17-
- spark-version: 3.2.4
18-
scala-version: 2.12.12
1921
- spark-version: 3.2.4
2022
scala-version: 2.13.5
23+
- spark-version: 3.2.4
24+
scala-version: 2.12.12
2125
runs-on: ubuntu-22.04
2226
env:
2327
# fixing this error after tests success: sbt.ForkMain failed with exit code 134

build.sbt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,10 @@ import ReleaseTransformations._
55

66
resolvers += "Spark snapshot repository" at "https://repository.apache.org/snapshots/"
77

8-
val sparkVer = sys.props.getOrElse("spark.version", "3.4.1")
8+
val sparkVer = sys.props.getOrElse("spark.version", "3.5.0")
99
val sparkBranch = sparkVer.substring(0, 3)
1010
val defaultScalaVer = sparkBranch match {
11+
case "3.5" => "2.12.18"
1112
case "3.4" => "2.12.17"
1213
case "3.3" => "2.12.15"
1314
case "3.2" => "2.12.15"

dev/release.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ def verify(prompt, interactive):
3939
@click.option("--publish-docs", type=bool, default=PUBLISH_DOCS_DEFAULT, show_default=True,
4040
help="Publish docs to github-pages.")
4141
@click.option("--spark-version", multiple=True, show_default=True,
42-
default=["3.2.4", "3.3.3", "3.4.1"])
42+
default=["3.2.4", "3.3.3", "3.4.1", "3.5.0"])
4343
def main(release_version, next_version, publish_to, no_prompt, git_remote, publish_docs,
4444
spark_version):
4545
interactive = not no_prompt

src/test/scala/org/graphframes/lib/ConnectedComponentsSuite.scala

Lines changed: 25 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -215,18 +215,31 @@ class ConnectedComponentsSuite extends SparkFunSuite with GraphFrameTestSparkCon
215215
}
216216

217217
test("intermediate storage level") {
218-
val friends = Graphs.friends
219-
val expected = Set(Set("a", "b", "c", "d", "e", "f"), Set("g"))
220-
221-
val cc = friends.connectedComponents
222-
assert(cc.getIntermediateStorageLevel === StorageLevel.MEMORY_AND_DISK)
223-
224-
for (storageLevel <- Seq(StorageLevel.DISK_ONLY, StorageLevel.MEMORY_ONLY, StorageLevel.NONE)) {
225-
// TODO: it is not trivial to confirm the actual storage level used
226-
val components = cc
227-
.setIntermediateStorageLevel(storageLevel)
228-
.run()
229-
assertComponents(components, expected)
218+
// disabling adaptive query execution helps assertComponents
219+
val enabled = spark.conf.getOption("spark.sql.adaptive.enabled")
220+
try {
221+
spark.conf.set("spark.sql.adaptive.enabled", value = false)
222+
223+
val friends = Graphs.friends
224+
val expected = Set(Set("a", "b", "c", "d", "e", "f"), Set("g"))
225+
226+
val cc = friends.connectedComponents
227+
assert(cc.getIntermediateStorageLevel === StorageLevel.MEMORY_AND_DISK)
228+
229+
for (storageLevel <- Seq(StorageLevel.DISK_ONLY, StorageLevel.MEMORY_ONLY, StorageLevel.NONE)) {
230+
// TODO: it is not trivial to confirm the actual storage level used
231+
val components = cc
232+
.setIntermediateStorageLevel(storageLevel)
233+
.run()
234+
assertComponents(components, expected)
235+
}
236+
} finally {
237+
// restoring earlier conf
238+
if (enabled.isDefined) {
239+
spark.conf.set("spark.sql.adaptive.enabled", value = enabled.get)
240+
} else {
241+
spark.conf.unset("spark.sql.adaptive.enabled")
242+
}
230243
}
231244
}
232245

0 commit comments

Comments
 (0)