Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/python-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,15 @@ jobs:
fail-fast: false
matrix:
include:
- spark-version: 3.5.5
- spark-version: 3.5.6
scala-version: 2.12
python-version: "3.10"
java-version: 11
- spark-version: 4.0.0
- spark-version: 4.0.1
scala-version: 2.13
python-version: 3.12
java-version: 17
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
env:
# define Java options for both official sbt and sbt-extras
JAVA_OPTS: -Xms2048M -Xmx2048M -Xss6M -XX:ReservedCodeCacheSize=256M -Dfile.encoding=UTF-8
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/python-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name: python-pypi-publish
on:
push:
branches:
- master
- main
tags:
- "*.*.*"

Expand Down
10 changes: 6 additions & 4 deletions .github/workflows/scala-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@ jobs:
fail-fast: false
matrix:
include:
- spark-version: 3.5.5
- spark-version: 3.5.6
java-version: 11
- spark-version: 3.5.5
- spark-version: 3.5.6
java-version: 17
- spark-version: 4.0.0
- spark-version: 4.0.1
java-version: 17
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
env:
# fixing this error after tests success: sbt.ForkMain failed with exit code 134
# https://stackoverflow.com/questions/33287424/strange-exception-in-sbt-test
Expand All @@ -33,6 +33,8 @@ jobs:
run: build/sbt -Dspark.version=${{ matrix.spark-version }} scalafmtCheckAll
- name: Check scalastyle
run: build/sbt -Dspark.version=${{ matrix.spark-version }} "scalafixAll --check"
- name: Check docs build
run: build/sbt -Dspark.version=${{ matrix.spark-version }} doc
- name: Build and Test
run: build/sbt -v -Dspark.version=${{ matrix.spark-version }} coverage +test coverageReport
- uses: codecov/codecov-action@v3
2 changes: 1 addition & 1 deletion .github/workflows/scala-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name: scala-central-publish
on:
push:
branches:
- master
- main
tags:
- "*.*.*"

Expand Down
4 changes: 2 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@ import laika.config.SyntaxHighlighting
import laika.format.Markdown.GitHubFlavor
import org.typelevel.scalacoptions.ScalacOptions

lazy val sparkVer = sys.props.getOrElse("spark.version", "3.5.5")
lazy val sparkVer = sys.props.getOrElse("spark.version", "3.5.6")
lazy val sparkMajorVer = sparkVer.substring(0, 1)
lazy val sparkBranch = sparkVer.substring(0, 3)
lazy val scalaVersions = sparkMajorVer match {
case "4" => Seq("2.13.16")
case "3" => Seq("2.12.18", "2.13.16")
case "3" => Seq("2.12.20", "2.13.16")
case _ => throw new IllegalArgumentException(s"Unsupported Spark version: $sparkVer.")
}
lazy val scalaVer = sys.props.getOrElse("scala.version", scalaVersions.head)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,7 @@ import org.graphframes.WithIntermediateStorageLevel
* - [[AggregateMessages.msg]]: message sent to vertex (for aggregation function)
*
* Note: If you use this operation to write an iterative algorithm, you may want to use
* [[AggregateMessages$.getCachedDataFrame getCachedDataFrame()]] as a workaround for caching
* issues.
* `checkpoint()` (`localCheckpoint()`) as a workaround for caching issues.
*
* @example
* We can use this function to compute the in-degree of each vertex
Expand Down