Skip to content

allow to disable duplicate rows support with assume-unique-key #51

allow to disable duplicate rows support with assume-unique-key

allow to disable duplicate rows support with assume-unique-key #51

Workflow file for this run

name: CI-COVER-DATABASES
on:
# push:
# paths:
# - '**.py'
# - '.github/workflows/**'
# - '!dev/**'
pull_request:
paths:
- '**.py'
- 'pyproject.toml'
- '.github/workflows/**'
- 'docker-compose.yml'
- '!dev/**'
- '!docs/**'
branches: [ master ]
workflow_dispatch:
permissions:
id-token: write # This is required for requesting the JWT
contents: read # This is required for actions/checkout
jobs:
unit_tests:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
python-version:
- "3.10"
name: Check Python ${{ matrix.python-version }} on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Build the stack
run: docker compose up -d mysql postgres trino vertica # presto clickhouse
- name: Install Poetry
run: pip install poetry
- name: Install package
run: "poetry install"
# BigQuery start
# - id: 'auth'
# uses: 'google-github-actions/auth@v1'
# with:
# credentials_json: '${{ secrets.GOOGLE_CREDENTIALS }}'
# - name: 'Set up BigQuery Cloud SDK'
# uses: 'google-github-actions/setup-gcloud@v1'
# - name: "Install BigQuery for Python"
# run: poetry add google-cloud-bigquery
# BigQuery end
- name: Run unit tests
env:
TRINO_URI: 'trino://[email protected]:8081/postgresql/public'
SNOWFLAKE_URI: '${{ secrets.SNOWFLAKE_URI }}'
# PRESTO_URI: '${{ secrets.PRESTO_URI }}'
# CLICKHOUSE_URI: 'clickhouse://clickhouse:Password1@localhost:9000/clickhouse'
VERTICA_URI: 'vertica://vertica:Password1@localhost:5433/vertica'
# BIGQUERY_URI: '${{ secrets.BIGQUERY_URI }}'
REDSHIFT_URI: '${{ secrets.REDSHIFT_URI }}'
run: |
chmod +x tests/waiting_for_stack_up.sh
./tests/waiting_for_stack_up.sh && poetry run unittest-parallel -j 16