Add Spark 4.0 support #2587
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Tests | |
on: | |
push: | |
branches: | |
- develop | |
pull_request: | |
branches-ignore: | |
- master | |
workflow_dispatch: | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} | |
cancel-in-progress: true | |
env: | |
DEFAULT_PYTHON: '3.12' | |
jobs: | |
get-matrix: | |
name: Get version matrix based on changes in PR/commit | |
uses: ./.github/workflows/get-matrix.yml | |
tests-core: | |
name: Run core tests (spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-core) }} | |
uses: ./.github/workflows/test-core.yml | |
with: | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-clickhouse: | |
name: Run Clickhouse tests (server=${{ matrix.clickhouse-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-clickhouse) }} | |
uses: ./.github/workflows/test-clickhouse.yml | |
with: | |
clickhouse-image: ${{ matrix.clickhouse-image }} | |
clickhouse-version: ${{ matrix.clickhouse-version }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-greenplum: | |
name: Run Greenplum tests (server=${{ matrix.greenplum-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-greenplum) }} | |
uses: ./.github/workflows/test-greenplum.yml | |
with: | |
greenplum-version: ${{ matrix.greenplum-version }} | |
spark-version: ${{ matrix.spark-version }} | |
package-version: ${{ matrix.package-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
secrets: | |
GREENPLUM_PACKAGES_USER: ${{ secrets.GREENPLUM_PACKAGES_USER }} | |
GREENPLUM_PACKAGES_PASSWORD: ${{ secrets.GREENPLUM_PACKAGES_PASSWORD }} | |
tests-hive: | |
name: Run Hive tests (spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-hive) }} | |
uses: ./.github/workflows/test-hive.yml | |
with: | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-kafka: | |
name: Run Kafka tests (server=${{ matrix.kafka-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-kafka) }} | |
uses: ./.github/workflows/test-kafka.yml | |
with: | |
kafka-version: ${{ matrix.kafka-version }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-local-fs: | |
name: Run LocalFS tests (spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-local-fs) }} | |
uses: ./.github/workflows/test-local-fs.yml | |
with: | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-mongodb: | |
name: Run MongoDB tests (server=${{ matrix.mongodb-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-mongodb) }} | |
uses: ./.github/workflows/test-mongodb.yml | |
with: | |
mongodb-version: ${{ matrix.mongodb-version }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-mssql: | |
name: Run MSSQL tests (server=${{ matrix.mssql-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-mssql) }} | |
uses: ./.github/workflows/test-mssql.yml | |
with: | |
mssql-version: ${{ matrix.mssql-version }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-mysql: | |
name: Run MySQL tests (server=${{ matrix.mysql-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-mysql) }} | |
uses: ./.github/workflows/test-mysql.yml | |
with: | |
mysql-version: ${{ matrix.mysql-version }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-oracle: | |
name: Run Oracle tests (server=${{ matrix.oracle-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-oracle) }} | |
uses: ./.github/workflows/test-oracle.yml | |
with: | |
oracle-image: ${{ matrix.oracle-image }} | |
oracle-version: ${{ matrix.oracle-version }} | |
db-name: ${{ matrix.db-name }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-postgres: | |
name: Run Postgres tests (server=${{ matrix.postgres-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-postgres) }} | |
uses: ./.github/workflows/test-postgres.yml | |
with: | |
postgres-version: ${{ matrix.postgres-version }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-teradata: | |
name: Run Teradata tests (spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-teradata) }} | |
uses: ./.github/workflows/test-teradata.yml | |
with: | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-ftp: | |
name: Run FTP tests (server=${{ matrix.ftp-version }}, pydantic=${{ matrix.pydantic-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-ftp) }} | |
uses: ./.github/workflows/test-ftp.yml | |
with: | |
ftp-version: ${{ matrix.ftp-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-ftps: | |
name: Run FTPS tests (server=${{ matrix.ftps-version }}, pydantic=${{ matrix.pydantic-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-ftps) }} | |
uses: ./.github/workflows/test-ftps.yml | |
with: | |
ftps-version: ${{ matrix.ftps-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-hdfs: | |
name: Run HDFS tests (server=${{ matrix.hadoop-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-hdfs) }} | |
uses: ./.github/workflows/test-hdfs.yml | |
with: | |
hadoop-version: ${{ matrix.hadoop-version }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-s3: | |
name: Run S3 tests (server=${{ matrix.minio-version }}, spark=${{ matrix.spark-version }}, pydantic=${{ matrix.pydantic-version }}, java=${{ matrix.java-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-s3) }} | |
uses: ./.github/workflows/test-s3.yml | |
with: | |
minio-version: ${{ matrix.minio-version }} | |
spark-version: ${{ matrix.spark-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
java-version: ${{ matrix.java-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-sftp: | |
name: Run SFTP tests (server=${{ matrix.openssh-version }}, pydantic=${{ matrix.pydantic-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-sftp) }} | |
uses: ./.github/workflows/test-sftp.yml | |
with: | |
openssh-version: ${{ matrix.openssh-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-samba: | |
name: Run Samba tests (server=${{ matrix.server-version }}, pydantic=${{ matrix.pydantic-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-samba) }} | |
uses: ./.github/workflows/test-samba.yml | |
with: | |
server-version: ${{ matrix.server-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
tests-webdav: | |
name: Run WebDAV tests (server=${{ matrix.webdav-version }}, pydantic=${{ matrix.pydantic-version }}, python=${{ matrix.python-version }}, os=${{ matrix.os }}) | |
needs: [get-matrix] | |
strategy: | |
fail-fast: false | |
matrix: | |
include: ${{ fromJson(needs.get-matrix.outputs.matrix-webdav) }} | |
uses: ./.github/workflows/test-webdav.yml | |
with: | |
webdav-version: ${{ matrix.webdav-version }} | |
pydantic-version: ${{ matrix.pydantic-version }} | |
python-version: ${{ matrix.python-version }} | |
os: ${{ matrix.os }} | |
all_done: | |
name: Tests done | |
runs-on: ubuntu-latest | |
needs: | |
- tests-core | |
- tests-clickhouse | |
- tests-hive | |
- tests-kafka | |
- tests-local-fs | |
- tests-mongodb | |
- tests-mssql | |
- tests-mysql | |
- tests-oracle | |
- tests-postgres | |
- tests-teradata | |
- tests-ftp | |
- tests-ftps | |
- tests-hdfs | |
- tests-s3 | |
- tests-sftp | |
- tests-samba | |
- tests-webdav | |
steps: | |
- name: Checkout code | |
uses: actions/checkout@v4 | |
- name: Set up Python ${{ env.DEFAULT_PYTHON }} | |
uses: actions/setup-python@v5 | |
with: | |
python-version: ${{ env.DEFAULT_PYTHON }} | |
- name: Cache pip | |
uses: actions/cache@v4 | |
with: | |
path: ~/.cache/pip | |
key: ${{ runner.os }}-python-${{ env.DEFAULT_PYTHON }}-coverage | |
- name: Upgrade pip | |
run: python -m pip install --upgrade pip setuptools wheel | |
- name: Install dependencies | |
run: pip install -I coverage pytest | |
- name: Download all coverage reports | |
uses: actions/download-artifact@v4 | |
with: | |
path: reports/ | |
pattern: coverage-* | |
merge-multiple: true | |
- name: Generate coverate reports | |
run: ./combine_coverage.sh | |
- name: Check coverage | |
uses: codecov/codecov-action@v5 | |
with: | |
token: ${{ secrets.CODECOV_TOKEN }} | |
file: ./reports/coverage.xml | |
fail_ci_if_error: true | |
plugin: noop | |
- name: All done | |
run: echo 1 |