Skip to content

Commit 58347b2

Browse files
committed
cleanup builds and docs
1 parent 3788cd8 commit 58347b2

3 files changed

Lines changed: 5 additions & 50 deletions

File tree

.github/workflows/build.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ jobs:
4949
testString: -DskipTests
5050
theMatrix: |
5151
{
52-
"profile": ["12.2.dbr", "13.1.dbr", "13.3.dbr", "14.0.dbr", "14.3.dbr", "15.4.dbr", "16.4.dbr"],
52+
"profile": ["12.2.dbr", "13.3.dbr", "14.3.dbr", "15.4.dbr", "16.4.dbr"],
5353
"scala": [ "dummy" ]
5454
}
5555
jdk: 11
@@ -78,7 +78,7 @@ jobs:
7878
with:
7979
theMatrix: |
8080
{
81-
"profile": ["Spark350", "Spark341", "Spark332", "Spark321", "Spark32", "Spark313", "Spark3", "12.2.dbr", "13.1.dbr", "13.3.dbr", "14.0.dbr", "14.3.dbr", "15.4.dbr", "16.4.dbr"],
81+
"profile": ["Spark350", "Spark341", "Spark332", "Spark321", "Spark32", "Spark313", "Spark3", "12.2.dbr", "13.3.dbr", "14.3.dbr", "15.4.dbr", "16.4.dbr"],
8282
"scala": [ "dummy" ]
8383
}
8484
jdk: 11

README.md

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,9 @@ Key features:
1717
* ScalaTests are run against both Spark Connect and Classic
1818
* Tests can run remotely against Connect Severs (including Databricks 17.3)
1919
- local class and test files will be automatically sent to the servers (jars can additionally be sent via config) enabling map and udf usage
20+
- SPARK_REMOTE is detected to disable classic tests by default
2021
* Session handling works with existing connections,
21-
- automatically disabling rule Suites that are assignable to DontRunOnPureConnect and running those with ConnectSafe marker traits.
22+
- with Connect connections, automatically disabling rule Suites that are assignable to DontRunOnPureConnect and running those with ConnectSafe marker traits.
2223
- will start up local Connect Servers for you in the IDE
2324

2425
## Usage
@@ -96,4 +97,4 @@ class MyTestSuite extends SharedPureConnectTests {
9697

9798
```
9899

99-
The 'my logic' test will run on both Connect and Classic for Spark 4 builds.
100+
The 'my logic' test will run on both Connect and Classic for Spark 4 builds, or just ConnectSafe tests when defining SPARK_REMOTE for example.

pom.xml

Lines changed: 0 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -176,27 +176,6 @@
176176
<snakeScope>provided</snakeScope>
177177
</properties>
178178
</profile>
179-
<profile>
180-
<id>13.1.dbr</id>
181-
<properties> <!-- also contains backports of 3.5 functionality -->
182-
<shimRuntime>13.1.dbr</shimRuntime>
183-
<shimCompilationRuntime>13.1.dbr</shimCompilationRuntime>
184-
<dbrCompatVersion>13.1.dbr_</dbrCompatVersion>
185-
<scalaVersion>2.12.15</scalaVersion>
186-
<scalaCompatVersion>2.12</scalaCompatVersion>
187-
<framelessVersion>0.14.1</framelessVersion>
188-
<sparkVersion>3.4.1</sparkVersion>
189-
<sparkCompatVersion>3.4</sparkCompatVersion>
190-
<profileDir>13.1.dbr</profileDir>
191-
<jacksonVersion>2.14.2</jacksonVersion> <!-- scalameter dependency is 2.9.10 so perf tests runs need to change it -->
192-
<guavaVersion>15.0</guavaVersion> <!-- databricks runtime version -->
193-
<parserCombinatorVersion>1.1.2</parserCombinatorVersion> <!-- databricks runtime version -->
194-
<elidebelow>2</elidebelow>
195-
<refinedVersion>0.9.28</refinedVersion>
196-
<snakeVersion>1.33</snakeVersion>
197-
<snakeScope>provided</snakeScope>
198-
</properties>
199-
</profile>
200179
<profile>
201180
<id>13.3.dbr</id>
202181
<properties> <!-- also contains backports of 3.5 functionality -->
@@ -284,31 +263,6 @@
284263
</properties>
285264

286265
</profile>
287-
<profile>
288-
<id>14.0.dbr</id>
289-
<properties>
290-
<shimRuntime>14.0.dbr</shimRuntime>
291-
<shimCompilationRuntime>14.0.dbr</shimCompilationRuntime>
292-
<dbrCompatVersion>14.0.dbr_</dbrCompatVersion>
293-
<scalaVersion>2.12.15</scalaVersion>
294-
<scalaCompatVersion>2.12</scalaCompatVersion>
295-
<framelessVersion>0.17.0</framelessVersion>
296-
<sparkVersion>3.5.0</sparkVersion>
297-
<sparkCompatVersion>3.5</sparkCompatVersion>
298-
<profileDir>14.0.dbr</profileDir>
299-
<jacksonVersion>2.14.2</jacksonVersion> <!-- scalameter dependency is 2.9.10 so perf tests runs need to change it -->
300-
<guavaVersion>15.0</guavaVersion> <!-- databricks runtime version -->
301-
<parserCombinatorVersion>1.1.2</parserCombinatorVersion> <!-- databricks runtime version -->
302-
<elidebelow>2</elidebelow>
303-
<refinedVersion>0.9.28</refinedVersion>
304-
<snakeVersion>2.0</snakeVersion>
305-
<snakeScope>provided</snakeScope>
306-
307-
<framelessCoreCompatVersion>_3.5</framelessCoreCompatVersion>
308-
<framelessCompatVersion>_3.5</framelessCompatVersion>
309-
<framelessOrg>com.sparkutils</framelessOrg>
310-
</properties>
311-
</profile>
312266
<profile>
313267
<id>14.3.dbr</id>
314268
<properties>

0 commit comments

Comments
 (0)