Skip to content

remote should sync test classes ~cut-release #70

remote should sync test classes ~cut-release

remote should sync test classes ~cut-release #70

Workflow file for this run

# Full build setup off main, with ~docsOnly commit filter to run just mvn_site then docs.
name: Default CI
on:
push:
branches: [ "main", "temp/*" ]
pull_request:
branches: [ "main", "temp/*" ]
env:
MAVEN_CLI_OPTS: "--batch-mode --errors --fail-at-end --show-version -DinstallAtEnd=true -DdeployAtEnd=true"
# -Dmaven.repo.local=.m2/repository not needed on github, seems ~/.m2 is supported / expected
MAVEN_OPTS: "-Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.http.retryHandler.class=standard -Dmaven.wagon.http.retryHandler.count=3 -Dhttps.protocols=TLSv1.2 -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=WARN -Dorg.slf4j.simpleLogger.showDateTime=true -Djava.awt.headless=true"
jobs:
test_builds:
uses: sparkutils/building/.github/workflows/run_build.yml@main
with:
testString: test # use tests for verifying only the delta tests have issues testString: test
theMatrix: |
{
"profile": ["Spark350", "Spark341", "Spark332", "Spark321", "Spark32", "Spark313", "Spark3"],
"scala": [ "dummy" ]
}
jdk: 11
secrets: inherit
test_builds_21:
uses: sparkutils/building/.github/workflows/run_build.yml@main
with:
testString: test # use tests for verifying only the delta tests have issues testString: test
theMatrix: |
{
"profile": ["Spark4"],
"scala": [ "dummy" ]
}
sparkVersion: 3.5.5
hadoopVersion: 3
jdk: 21
mvnCliOpts: "--batch-mode --errors --fail-at-end --show-version -DinstallAtEnd=true -DdeployAtEnd=true"
mvnOpts: "-ea -XX:+IgnoreUnrecognizedVMOptions -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.http.retryHandler.class=standard -Dmaven.wagon.http.retryHandler.count=3 -Dhttps.protocols=TLSv1.2 -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=WARN -Dorg.slf4j.simpleLogger.showDateTime=true -Djava.awt.headless=true --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.lang.invoke=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED --add-opens java.base/java.io=ALL-UNNAMED --add-opens java.base/java.net=ALL-UNNAMED --add-opens java.base/java.nio=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.util.concurrent=ALL-UNNAMED --add-opens java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens java.base/sun.nio.ch=ALL-UNNAMED --add-opens java.base/sun.nio.cs=ALL-UNNAMED --add-opens java.base/sun.security.action=ALL-UNNAMED --add-opens java.base/sun.util.calendar=ALL-UNNAMED"
secrets: inherit
noTest_builds:
uses: sparkutils/building/.github/workflows/run_build.yml@main
with:
testString: -DskipTests
theMatrix: |
{
"profile": ["12.2.dbr", "13.1.dbr", "13.3.dbr", "14.0.dbr", "14.3.dbr", "15.4.dbr", "16.4.dbr"],
"scala": [ "dummy" ]
}
jdk: 11
secrets: inherit
noTest_builds_21:
uses: sparkutils/building/.github/workflows/run_build.yml@main
with:
testString: -DskipTests
theMatrix: |
{
"profile": ["17.3.dbr"],
"scala": [ "dummy" ]
}
sparkVersion: 3.5.5
hadoopVersion: 3
jdk: 21
mvnCliOpts: "--batch-mode --errors --fail-at-end --show-version -DinstallAtEnd=true -DdeployAtEnd=true"
mvnOpts: "-ea -XX:+IgnoreUnrecognizedVMOptions -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.http.retryHandler.class=standard -Dmaven.wagon.http.retryHandler.count=3 -Dhttps.protocols=TLSv1.2 -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=WARN -Dorg.slf4j.simpleLogger.showDateTime=true -Djava.awt.headless=true --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.lang.invoke=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED --add-opens java.base/java.io=ALL-UNNAMED --add-opens java.base/java.net=ALL-UNNAMED --add-opens java.base/java.nio=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.util.concurrent=ALL-UNNAMED --add-opens java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens java.base/sun.nio.ch=ALL-UNNAMED --add-opens java.base/sun.nio.cs=ALL-UNNAMED --add-opens java.base/sun.security.action=ALL-UNNAMED --add-opens java.base/sun.util.calendar=ALL-UNNAMED"
secrets: inherit
publish:
uses: sparkutils/building/.github/workflows/publish_build.yml@main
needs: [noTest_builds, test_builds]
with:
theMatrix: |
{
"profile": ["Spark350", "Spark341", "Spark332", "Spark321", "Spark32", "Spark313", "Spark3", "12.2.dbr", "13.1.dbr", "13.3.dbr", "14.0.dbr", "14.3.dbr", "15.4.dbr", "16.4.dbr"],
"scala": [ "dummy" ]
}
jdk: 11
secrets: inherit
publish_21:
uses: sparkutils/building/.github/workflows/publish_build.yml@main
needs: [noTest_builds_21, test_builds_21]
with:
theMatrix: |
{
"profile": ["Spark4", "17.3.dbr"],
"scala": [ "dummy" ]
}
sparkVersion: 3.5.5
hadoopVersion: 3
jdk: 21
mvnCliOpts: "--batch-mode --errors --fail-at-end --show-version -DinstallAtEnd=true -DdeployAtEnd=true"
mvnOpts: "-ea -XX:+IgnoreUnrecognizedVMOptions -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.http.retryHandler.class=standard -Dmaven.wagon.http.retryHandler.count=3 -Dhttps.protocols=TLSv1.2 -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=WARN -Dorg.slf4j.simpleLogger.showDateTime=true -Djava.awt.headless=true --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/java.lang.invoke=ALL-UNNAMED --add-opens java.base/java.lang.reflect=ALL-UNNAMED --add-opens java.base/java.io=ALL-UNNAMED --add-opens java.base/java.net=ALL-UNNAMED --add-opens java.base/java.nio=ALL-UNNAMED --add-opens java.base/java.util=ALL-UNNAMED --add-opens java.base/java.util.concurrent=ALL-UNNAMED --add-opens java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens java.base/sun.nio.ch=ALL-UNNAMED --add-opens java.base/sun.nio.cs=ALL-UNNAMED --add-opens java.base/sun.security.action=ALL-UNNAMED --add-opens java.base/sun.util.calendar=ALL-UNNAMED"
secrets: inherit