# This workflows runs airbyte-ci connectors --name=source-postgres test # We created this in the context of our project to improve CI performances for this connector # It's made to be triggered manually from the GitHub UI # It will allow us to collect performance metrics outside of the context of nightly builds # And also to use different runner types (e.g. conn-prod-xxlarge-runner) to test the connector with various resources. name: source-postgres ci - for testing only on: schedule: # Runs four times a day to observe variance - cron: "0 6,12,16,20 * * *" workflow_dispatch: inputs: runner: description: "The runner to use for this job" default: "conn-prod-xlarge-runner" jobs: source_postgres_ci: name: Source Postgres CI on ${{ inputs.runner || 'conn-prod-xlarge-runner'}} runs-on: ${{ inputs.runner || 'conn-prod-xlarge-runner'}} steps: - name: Checkout Airbyte uses: actions/checkout@v3 with: repository: ${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.gitref }} - name: Extract branch name shell: bash run: echo "branch=${GITHUB_REF#refs/heads/}" >> $GITHUB_OUTPUT id: extract_branch - name: Check if PR is from a fork if: github.event_name == 'pull_request' shell: bash run: | if [ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]; then echo "PR is from a fork. Exiting workflow..." exit 78 fi - name: Docker login uses: docker/login-action@v1 with: username: ${{ secrets.DOCKER_HUB_USERNAME }} password: ${{ secrets.DOCKER_HUB_PASSWORD }} - name: Get start timestamp id: get-start-timestamp shell: bash run: echo "name=start-timestamp=$(date +%s)" >> $GITHUB_OUTPUT - name: Install Python 3.10 uses: actions/setup-python@v4 with: python-version: "3.10" token: ${{ secrets.GITHUB_TOKEN }} - name: Install ci-connector-ops package shell: bash run: | pip install pipx pipx ensurepath pipx install airbyte-ci/connectors/pipelines/ - name: Run airbyte-ci shell: bash run: | export _EXPERIMENTAL_DAGGER_RUNNER_HOST="unix:///var/run/buildkit/buildkitd.sock" airbyte-ci --is-ci --show-dagger-logs --gha-workflow-run-id=${{ github.run_id }} connectors --name=source-postgres test env: _EXPERIMENTAL_DAGGER_CLOUD_TOKEN: "p.eyJ1IjogIjFiZjEwMmRjLWYyZmQtNDVhNi1iNzM1LTgxNzI1NGFkZDU2ZiIsICJpZCI6ICJlNjk3YzZiYy0yMDhiLTRlMTktODBjZC0yNjIyNGI3ZDBjMDEifQ.hT6eMOYt3KZgNoVGNYI3_v4CC-s19z8uQsBkGrBhU3k" CI_CONTEXT: "master" CI_GIT_BRANCH: ${{ github.head_ref }} CI_GIT_REVISION: ${{ github.sha }} CI_GITHUB_ACCESS_TOKEN: ${{ secrets.GITHUB_TOKEN }} CI_PIPELINE_START_TIMESTAMP: ${{ steps.get-start-timestamp.outputs.start-timestamp }} CI_REPORT_BUCKET_NAME: airbyte-ci-reports-multi GCP_GSM_CREDENTIALS: ${{ secrets.GCP_GSM_CREDENTIALS }} SENTRY_DSN: ${{ secrets.SENTRY_AIRBYTE_CI_DSN }} DOCKER_HUB_USERNAME: ${{ secrets.DOCKER_HUB_USERNAME }} DOCKER_HUB_PASSWORD: ${{ secrets.DOCKER_HUB_PASSWORD }} # S3_BUILD_CACHE_ACCESS_KEY_ID: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }} # S3_BUILD_CACHE_SECRET_KEY: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }} CI: "True"