mirror of
https://github.com/getredash/redash.git
synced 2025-12-19 17:37:19 -05:00
reuse built frontend in ci, merge compose files (#6674)
* reuse built frontend in ci, merge compose files * pr comments * added make create_db alias to create_database * fixed lint --------- Co-authored-by: Andrii Chubatiuk <wachy@Andriis-MBP-2.lan>
This commit is contained in:
@@ -1,25 +0,0 @@
|
||||
services:
|
||||
redash:
|
||||
build: ../
|
||||
command: manage version
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
ports:
|
||||
- "5000:5000"
|
||||
environment:
|
||||
PYTHONUNBUFFERED: 0
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
POSTGRES_PASSWORD: "FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres:FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb@postgres/postgres"
|
||||
REDASH_COOKIE_SECRET: "2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF"
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_HOST_AUTH_METHOD: "trust"
|
||||
@@ -1,73 +0,0 @@
|
||||
x-redash-service: &redash-service
|
||||
build:
|
||||
context: ../
|
||||
args:
|
||||
install_groups: "main"
|
||||
code_coverage: ${CODE_COVERAGE}
|
||||
x-redash-environment: &redash-environment
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
POSTGRES_PASSWORD: "FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres:FmTKs5vX52ufKR1rd8tn4MoSP7zvCJwb@postgres/postgres"
|
||||
REDASH_RATELIMIT_ENABLED: "false"
|
||||
REDASH_ENFORCE_CSRF: "true"
|
||||
REDASH_COOKIE_SECRET: "2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF"
|
||||
services:
|
||||
server:
|
||||
<<: *redash-service
|
||||
command: server
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
ports:
|
||||
- "5000:5000"
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
PYTHONUNBUFFERED: 0
|
||||
scheduler:
|
||||
<<: *redash-service
|
||||
command: scheduler
|
||||
depends_on:
|
||||
- server
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
worker:
|
||||
<<: *redash-service
|
||||
command: worker
|
||||
depends_on:
|
||||
- server
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
PYTHONUNBUFFERED: 0
|
||||
cypress:
|
||||
ipc: host
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: .ci/Dockerfile.cypress
|
||||
depends_on:
|
||||
- server
|
||||
- worker
|
||||
- scheduler
|
||||
environment:
|
||||
CYPRESS_baseUrl: "http://server:5000"
|
||||
CYPRESS_coverage: ${CODE_COVERAGE}
|
||||
PERCY_TOKEN: ${PERCY_TOKEN}
|
||||
PERCY_BRANCH: ${CIRCLE_BRANCH}
|
||||
PERCY_COMMIT: ${CIRCLE_SHA1}
|
||||
PERCY_PULL_REQUEST: ${CIRCLE_PR_NUMBER}
|
||||
COMMIT_INFO_BRANCH: ${CIRCLE_BRANCH}
|
||||
COMMIT_INFO_MESSAGE: ${COMMIT_INFO_MESSAGE}
|
||||
COMMIT_INFO_AUTHOR: ${CIRCLE_USERNAME}
|
||||
COMMIT_INFO_SHA: ${CIRCLE_SHA1}
|
||||
COMMIT_INFO_REMOTE: ${CIRCLE_REPOSITORY_URL}
|
||||
CYPRESS_PROJECT_ID: ${CYPRESS_PROJECT_ID}
|
||||
CYPRESS_RECORD_KEY: ${CYPRESS_RECORD_KEY}
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
postgres:
|
||||
image: pgautoupgrade/pgautoupgrade:latest
|
||||
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_HOST_AUTH_METHOD: "trust"
|
||||
@@ -1,39 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This script only needs to run on the main Redash repo
|
||||
|
||||
if [ "${GITHUB_REPOSITORY}" != "getredash/redash" ]; then
|
||||
echo "Skipping image build for Docker Hub, as this isn't the main Redash repository"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "${GITHUB_REF_NAME}" != "master" ] && [ "${GITHUB_REF_NAME}" != "preview-image" ]; then
|
||||
echo "Skipping image build for Docker Hub, as this isn't the 'master' nor 'preview-image' branch"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "x${DOCKER_USER}" = "x" ] || [ "x${DOCKER_PASS}" = "x" ]; then
|
||||
echo "Skipping image build for Docker Hub, as the login details aren't available"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
set -e
|
||||
VERSION=$(jq -r .version package.json)
|
||||
VERSION_TAG="$VERSION.b${GITHUB_RUN_ID}.${GITHUB_RUN_NUMBER}"
|
||||
|
||||
export DOCKER_BUILDKIT=1
|
||||
export COMPOSE_DOCKER_CLI_BUILD=1
|
||||
|
||||
docker login -u "${DOCKER_USER}" -p "${DOCKER_PASS}"
|
||||
|
||||
DOCKERHUB_REPO="redash/redash"
|
||||
DOCKER_TAGS="-t redash/redash:preview -t redash/preview:${VERSION_TAG}"
|
||||
|
||||
# Build the docker container
|
||||
docker build --build-arg install_groups="main,all_ds,dev" ${DOCKER_TAGS} .
|
||||
|
||||
# Push the container to the preview build locations
|
||||
docker push "${DOCKERHUB_REPO}:preview"
|
||||
docker push "redash/preview:${VERSION_TAG}"
|
||||
|
||||
echo "Built: ${VERSION_TAG}"
|
||||
9
.ci/pack
9
.ci/pack
@@ -1,9 +0,0 @@
|
||||
#!/bin/bash
|
||||
NAME=redash
|
||||
VERSION=$(jq -r .version package.json)
|
||||
FULL_VERSION=$VERSION+b$CIRCLE_BUILD_NUM
|
||||
FILENAME=$NAME.$FULL_VERSION.tar.gz
|
||||
|
||||
mkdir -p /tmp/artifacts/
|
||||
|
||||
tar -zcv -f /tmp/artifacts/$FILENAME --exclude=".git" --exclude="optipng*" --exclude="cypress" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" *
|
||||
@@ -1,6 +0,0 @@
|
||||
#!/bin/bash
|
||||
VERSION=$(jq -r .version package.json)
|
||||
FULL_VERSION=${VERSION}+b${GITHUB_RUN_ID}.${GITHUB_RUN_NUMBER}
|
||||
|
||||
sed -ri "s/^__version__ = '([A-Za-z0-9.-]*)'/__version__ = '${FULL_VERSION}'/" redash/__init__.py
|
||||
sed -i "s/dev/${GITHUB_SHA}/" client/app/version.json
|
||||
@@ -1,5 +1,4 @@
|
||||
client/.tmp/
|
||||
client/dist/
|
||||
node_modules/
|
||||
viz-lib/node_modules/
|
||||
.tmp/
|
||||
|
||||
76
.github/workflows/ci.yml
vendored
76
.github/workflows/ci.yml
vendored
@@ -3,12 +3,28 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- '*'
|
||||
pull_request_target:
|
||||
branches:
|
||||
- master
|
||||
env:
|
||||
NODE_VERSION: 18
|
||||
YARN_VERSION: 1.22.22
|
||||
REDASH_COOKIE_SECRET: "2H9gNG9obnAQ9qnR9BDTQUph6CbXKCzF"
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
DOCKER_BUILDKIT: 1
|
||||
FRONTEND_BUILD_MODE: 1
|
||||
INSTALL_GROUPS: main,all_ds,dev
|
||||
SERVER_MOUNT: /ignore
|
||||
PERCY_BRANCH: ${{github.head_ref || github.ref_name}}
|
||||
PERCY_COMMIT: ${{github.sha}}
|
||||
PERCY_PULL_REQUEST: ${{github.event.number}}
|
||||
COMMIT_INFO_BRANCH: ${{github.head_ref || github.ref_name}}
|
||||
COMMIT_INFO_MESSAGE: ${{github.event.head_commit.message}}
|
||||
COMMIT_INFO_AUTHOR: ${{github.event.pull_request.user.login}}
|
||||
COMMIT_INFO_SHA: ${{github.sha}}
|
||||
COMMIT_INFO_REMOTE: ${{github.server_url}}/${{github.repository}}
|
||||
jobs:
|
||||
backend-lint:
|
||||
runs-on: ubuntu-22.04
|
||||
@@ -23,7 +39,7 @@ jobs:
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.8'
|
||||
- run: sudo pip install black==23.1.0 ruff==0.0.287
|
||||
- run: sudo pip install black==24.3.0 ruff==0.1.9
|
||||
- run: ruff check .
|
||||
- run: black --check .
|
||||
|
||||
@@ -31,10 +47,7 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: backend-lint
|
||||
env:
|
||||
COMPOSE_FILE: .ci/compose.ci.yaml
|
||||
COMPOSE_PROJECT_NAME: redash
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
DOCKER_BUILDKIT: 1
|
||||
FRONTEND_BUILD_MODE: 0
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
@@ -46,31 +59,31 @@ jobs:
|
||||
- name: Build Docker Images
|
||||
run: |
|
||||
set -x
|
||||
docker compose build --build-arg install_groups="main,all_ds,dev" --build-arg skip_frontend_build=true
|
||||
docker compose build
|
||||
docker compose up -d
|
||||
sleep 10
|
||||
- name: Create Test Database
|
||||
run: docker compose -p redash run --rm postgres psql -h postgres -U postgres -c "create database tests;"
|
||||
run: docker compose run --rm postgres psql -h postgres -U postgres -c "create database tests;"
|
||||
- name: List Enabled Query Runners
|
||||
run: docker compose -p redash run --rm redash manage ds list_types
|
||||
run: docker compose run --rm server manage ds list_types
|
||||
- name: Run Tests
|
||||
run: docker compose -p redash run --name tests redash tests --junitxml=junit.xml --cov-report=xml --cov=redash --cov-config=.coveragerc tests/
|
||||
run: docker compose run --name tests server tests --junitxml=junit.xml --cov-report=xml --cov=redash --cov-config=.coveragerc tests/
|
||||
- name: Copy Test Results
|
||||
run: |
|
||||
mkdir -p /tmp/test-results/unit-tests
|
||||
docker cp tests:/app/coverage.xml ./coverage.xml
|
||||
docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
uses: codecov/codecov-action@v4
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-results
|
||||
name: backend-test-results
|
||||
path: /tmp/test-results
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage
|
||||
name: backend-coverage
|
||||
path: coverage.xml
|
||||
|
||||
frontend-lint:
|
||||
@@ -90,13 +103,14 @@ jobs:
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
yarn cache clean
|
||||
yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run Lint
|
||||
run: yarn lint:ci
|
||||
- name: Store Test Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-results
|
||||
name: frontend-test-results
|
||||
path: /tmp/test-results
|
||||
|
||||
frontend-unit-tests:
|
||||
@@ -117,24 +131,25 @@ jobs:
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
yarn cache clean
|
||||
yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Run App Tests
|
||||
run: yarn test
|
||||
- name: Run Visualizations Tests
|
||||
run: cd viz-lib && yarn test
|
||||
run: |
|
||||
cd viz-lib
|
||||
yarn test
|
||||
- run: yarn lint
|
||||
|
||||
frontend-e2e-tests:
|
||||
runs-on: ubuntu-22.04
|
||||
needs: frontend-lint
|
||||
env:
|
||||
COMPOSE_FILE: .ci/compose.cypress.yaml
|
||||
COMPOSE_PROJECT_NAME: cypress
|
||||
CYPRESS_INSTALL_BINARY: 0
|
||||
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
|
||||
PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
|
||||
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
INSTALL_GROUPS: main
|
||||
COMPOSE_PROFILES: e2e
|
||||
REDASH_PRODUCTION: "true"
|
||||
steps:
|
||||
- if: github.event.pull_request.mergeable == 'false'
|
||||
name: Exit if PR is not mergeable
|
||||
@@ -154,10 +169,12 @@ jobs:
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@$YARN_VERSION
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
yarn cache clean
|
||||
yarn --frozen-lockfile --network-concurrency 1
|
||||
- name: Setup Redash Server
|
||||
run: |
|
||||
set -x
|
||||
yarn build
|
||||
yarn cypress build
|
||||
yarn cypress start -- --skip-db-seed
|
||||
docker compose run cypress yarn cypress db-seed
|
||||
@@ -169,7 +186,12 @@ jobs:
|
||||
- name: Copy Code Coverage Results
|
||||
run: docker cp cypress:/usr/src/app/coverage ./coverage || true
|
||||
- name: Store Coverage Results
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: coverage
|
||||
name: frontend-coverage
|
||||
path: coverage
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: frontend
|
||||
path: client/dist
|
||||
retention-days: 1
|
||||
|
||||
126
.github/workflows/preview-image.yml
vendored
126
.github/workflows/preview-image.yml
vendored
@@ -1,15 +1,20 @@
|
||||
name: Preview Image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*-dev'
|
||||
workflow_run:
|
||||
workflows:
|
||||
- Tests
|
||||
types:
|
||||
- completed
|
||||
branches:
|
||||
- master
|
||||
|
||||
env:
|
||||
NODE_VERSION: 18
|
||||
DOCKER_REPO: redash
|
||||
|
||||
jobs:
|
||||
build-skip-check:
|
||||
runs-on: ubuntu-22.04
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
outputs:
|
||||
skip: ${{ steps.skip-check.outputs.skip }}
|
||||
steps:
|
||||
@@ -32,56 +37,115 @@ jobs:
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- build-skip-check
|
||||
outputs:
|
||||
version: ${{ steps.version.outputs.VERSION_TAG }}
|
||||
repo: ${{ steps.version.outputs.DOCKER_REPO }}
|
||||
if: needs.build-skip-check.outputs.skip == 'false'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
ref: ${{ github.event.push.after }}
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'yarn'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
npm install --global --force yarn@1.22.22
|
||||
yarn cache clean && yarn --frozen-lockfile --network-concurrency 1
|
||||
|
||||
name: frontend
|
||||
workflow: ci.yml
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
path: client/dist
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
|
||||
- name: Set version
|
||||
id: version
|
||||
run: |
|
||||
set -x
|
||||
.ci/update_version
|
||||
VERSION_TAG=$(jq -r .version package.json)
|
||||
echo "VERSION_TAG=$VERSION_TAG" >> "$GITHUB_OUTPUT"
|
||||
|
||||
VERSION=$(jq -r .version package.json)
|
||||
FULL_VERSION=${VERSION}-b${GITHUB_RUN_ID}.${GITHUB_RUN_NUMBER}
|
||||
sed -ri "s/^__version__ = ([A-Za-z0-9.-]*)'/__version__ = '${FULL_VERSION}'/" redash/__init__.py
|
||||
sed -i "s/dev/${GITHUB_SHA}/" client/app/version.json
|
||||
echo "VERSION_TAG=$FULL_VERSION" >> "$GITHUB_OUTPUT"
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
echo "SCOPE=${platform//\//-}" >> $GITHUB_ENV
|
||||
if [[ "${{ vars.DOCKER_REPO }}" != "" ]]; then
|
||||
echo "DOCKER_REPO=${{ vars.DOCKER_REPO }}" >> $GITHUB_ENV
|
||||
echo "DOCKER_REPO=${{ vars.DOCKER_REPO }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Build and push preview image to Docker Hub
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
id: build
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
redash/redash:preview
|
||||
redash/preview:${{ steps.version.outputs.VERSION_TAG }}
|
||||
context: .
|
||||
cache-from: type=gha,scope=${{ env.SCOPE }}
|
||||
cache-to: type=gha,mode=max,scope=${{ env.SCOPE }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
outputs: type=image,name=${{ env.DOCKER_REPO }}/redash,push-by-digest=true,name-canonical=true,push=true
|
||||
build-args: |
|
||||
test_all_deps=true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: linux/amd64
|
||||
FRONTEND_BUILD_MODE=1
|
||||
env:
|
||||
DOCKER_CONTENT_TRUST: true
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_PAIR }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- name: "Failure: output container logs to console"
|
||||
if: failure()
|
||||
run: docker compose logs
|
||||
publish-docker-manifest:
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- build-skip-check
|
||||
- build-docker-image
|
||||
if: needs.build-skip-check.outputs.skip == 'false'
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: digests-*
|
||||
path: /tmp/digests
|
||||
merge-multiple: true
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ needs.build-docker-image.outputs.repo }}/redash
|
||||
tags: preview
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASS }}
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ needs.build-docker-image.outputs.repo }}/redash@sha256:%s ' *)
|
||||
- name: Inspect image
|
||||
run: |
|
||||
REDASH_IMAGE="${{ needs.build-docker-image.outputs.repo }}/redash:${{ steps.meta.outputs.version }}"
|
||||
docker buildx imagetools inspect $REDASH_IMAGE
|
||||
- name: Push image ${{ needs.build-docker-image.outputs.repo }}/preview image
|
||||
run: |
|
||||
REDASH_IMAGE="${{ needs.build-docker-image.outputs.repo }}/redash:preview"
|
||||
PREVIEW_IMAGE="${{ needs.build-docker-image.outputs.repo }}/preview:${{ needs.build-docker-image.outputs.version }}"
|
||||
docker buildx imagetools create --tag $PREVIEW_IMAGE $REDASH_IMAGE
|
||||
|
||||
38
Dockerfile
38
Dockerfile
@@ -1,30 +1,38 @@
|
||||
FROM node:18-bookworm as frontend-builder
|
||||
|
||||
RUN npm install --global --force yarn@1.22.22
|
||||
|
||||
# Controls whether to build the frontend assets
|
||||
ARG skip_frontend_build
|
||||
ARG FRONTEND_BUILD_MODE=0
|
||||
|
||||
# MODE 0: create empty files. useful for backend tests
|
||||
FROM alpine:3.19 as frontend-builder-0
|
||||
RUN \
|
||||
mkdir -p /frontend/client/dist && \
|
||||
touch /frontend/client/dist/multi_org.html && \
|
||||
touch /frontend/client/dist/index.html
|
||||
|
||||
# MODE 1: copy static frontend from host, useful for CI to ignore building static content multiple times
|
||||
FROM alpine:3.19 as frontend-builder-1
|
||||
COPY client/dist /frontend/client/dist
|
||||
|
||||
# MODE 2: build static content in docker, can be used for a local development
|
||||
FROM node:18.1-bookworm as frontend-builder-2
|
||||
RUN npm install --global --force yarn@1.22.22
|
||||
ENV CYPRESS_INSTALL_BINARY=0
|
||||
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
|
||||
|
||||
RUN useradd -m -d /frontend redash
|
||||
USER redash
|
||||
|
||||
WORKDIR /frontend
|
||||
COPY --chown=redash package.json yarn.lock .yarnrc /frontend/
|
||||
COPY --chown=redash viz-lib /frontend/viz-lib
|
||||
COPY --chown=redash scripts /frontend/scripts
|
||||
|
||||
# Controls whether to instrument code for coverage information
|
||||
ARG code_coverage
|
||||
ENV BABEL_ENV=${code_coverage:+test}
|
||||
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn --frozen-lockfile --network-concurrency 1; fi
|
||||
|
||||
ARG CODE_COVERAGE
|
||||
ENV BABEL_ENV=${CODE_COVERAGE:+test}
|
||||
RUN yarn --frozen-lockfile --network-concurrency 1;
|
||||
COPY --chown=redash client /frontend/client
|
||||
COPY --chown=redash webpack.config.js /frontend/
|
||||
RUN if [ "x$skip_frontend_build" = "x" ] ; then yarn build; else mkdir -p /frontend/client/dist && touch /frontend/client/dist/multi_org.html && touch /frontend/client/dist/index.html; fi
|
||||
RUN yarn build
|
||||
|
||||
FROM frontend-builder-${FRONTEND_BUILD_MODE} as frontend-builder
|
||||
|
||||
FROM python:3.8-slim-bookworm
|
||||
|
||||
@@ -91,8 +99,8 @@ COPY pyproject.toml poetry.lock ./
|
||||
ARG POETRY_OPTIONS="--no-root --no-interaction --no-ansi"
|
||||
# for LDAP authentication, install with `ldap3` group
|
||||
# disabled by default due to GPL license conflict
|
||||
ARG install_groups="main,all_ds,dev"
|
||||
RUN /etc/poetry/bin/poetry install --only $install_groups $POETRY_OPTIONS
|
||||
ARG INSTALL_GROUPS="main,all_ds,dev"
|
||||
RUN /etc/poetry/bin/poetry install --only $INSTALL_GROUPS $POETRY_OPTIONS
|
||||
|
||||
COPY --chown=redash . /app
|
||||
COPY --from=frontend-builder --chown=redash /frontend/client/dist /app/client/dist
|
||||
|
||||
13
Makefile
13
Makefile
@@ -1,10 +1,13 @@
|
||||
.PHONY: compose_build up test_db create_database clean clean-all down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
.PHONY: compose_build up test_db create_database create_db clean clean-all down tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash
|
||||
|
||||
export COMPOSE_DOCKER_CLI_BUILD=1
|
||||
export DOCKER_BUILDKIT=1
|
||||
|
||||
compose_build: .env
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose build
|
||||
docker compose build
|
||||
|
||||
up:
|
||||
COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker compose up -d --build
|
||||
docker compose up -d --build
|
||||
|
||||
test_db:
|
||||
@for i in `seq 1 5`; do \
|
||||
@@ -13,9 +16,11 @@ test_db:
|
||||
done
|
||||
docker compose exec postgres sh -c 'psql -U postgres -c "drop database if exists tests;" && psql -U postgres -c "create database tests;"'
|
||||
|
||||
create_database: .env
|
||||
create_db: .env
|
||||
docker compose run server create_db
|
||||
|
||||
create_database: create_db
|
||||
|
||||
clean:
|
||||
docker compose down
|
||||
docker compose --project-name cypress down
|
||||
|
||||
@@ -1,25 +1,48 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
if [ -z $REDASH_REDIS_URL ]; then
|
||||
export REDASH_REDIS_URL=redis://:${REDASH_REDIS_PASSWORD}@${REDASH_REDIS_HOSTNAME}:${REDASH_REDIS_PORT}/${REDASH_REDIS_NAME}
|
||||
fi
|
||||
|
||||
if [ -z $REDASH_DATABASE_URL ]; then
|
||||
export REDASH_DATABASE_URL=postgresql://${REDASH_DATABASE_USER}:${REDASH_DATABASE_PASSWORD}@${REDASH_DATABASE_HOSTNAME}:${REDASH_DATABASE_PORT}/${REDASH_DATABASE_NAME}
|
||||
fi
|
||||
|
||||
scheduler() {
|
||||
echo "Starting RQ scheduler..."
|
||||
|
||||
exec /app/manage.py rq scheduler
|
||||
}
|
||||
|
||||
dev_scheduler() {
|
||||
echo "Starting dev RQ scheduler..."
|
||||
|
||||
exec watchmedo auto-restart --directory=./redash/ --pattern=*.py --recursive -- ./manage.py rq scheduler
|
||||
case $REDASH_PRODUCTION in
|
||||
true)
|
||||
echo "Starting RQ scheduler in production mode"
|
||||
exec ./manage.py rq scheduler
|
||||
;;
|
||||
*)
|
||||
echo "Starting RQ scheduler in dev mode"
|
||||
exec watchmedo auto-restart \
|
||||
--directory=./redash/ \
|
||||
--pattern=*.py \
|
||||
--recursive -- ./manage.py rq scheduler $QUEUES
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
worker() {
|
||||
echo "Starting RQ worker..."
|
||||
|
||||
export WORKERS_COUNT=${WORKERS_COUNT:-2}
|
||||
export QUEUES=${QUEUES:-}
|
||||
|
||||
case $REDASH_PRODUCTION in
|
||||
true)
|
||||
echo "Starting RQ worker in production mode"
|
||||
exec supervisord -c worker.conf
|
||||
;;
|
||||
*)
|
||||
echo "Starting RQ worker in dev mode"
|
||||
exec watchmedo auto-restart \
|
||||
--directory=./redash/ \
|
||||
--pattern=*.py \
|
||||
--recursive -- ./manage.py rq worker $QUEUES
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
workers_healthcheck() {
|
||||
@@ -35,22 +58,65 @@ workers_healthcheck() {
|
||||
fi
|
||||
}
|
||||
|
||||
dev_worker() {
|
||||
echo "Starting dev RQ worker..."
|
||||
|
||||
exec watchmedo auto-restart --directory=./redash/ --pattern=*.py --recursive -- ./manage.py rq worker $QUEUES
|
||||
}
|
||||
|
||||
server() {
|
||||
# Recycle gunicorn workers every n-th request. See http://docs.gunicorn.org/en/stable/settings.html#max-requests for more details.
|
||||
case $REDASH_PRODUCTION in
|
||||
true)
|
||||
echo "Starting Redash Server in production mode"
|
||||
MAX_REQUESTS=${MAX_REQUESTS:-1000}
|
||||
MAX_REQUESTS_JITTER=${MAX_REQUESTS_JITTER:-100}
|
||||
TIMEOUT=${REDASH_GUNICORN_TIMEOUT:-60}
|
||||
exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app --max-requests $MAX_REQUESTS --max-requests-jitter $MAX_REQUESTS_JITTER --timeout $TIMEOUT
|
||||
exec /usr/local/bin/gunicorn \
|
||||
-b 0.0.0.0:5000 \
|
||||
--name redash \
|
||||
-w${REDASH_WEB_WORKERS:-4} redash.wsgi:app \
|
||||
--max-requests $MAX_REQUESTS \
|
||||
--max-requests-jitter $MAX_REQUESTS_JITTER \
|
||||
--timeout $TIMEOUT
|
||||
;;
|
||||
*)
|
||||
echo "Starting Redash Server in a dev mode"
|
||||
export FLASK_DEBUG=1
|
||||
exec /app/manage.py runserver --debugger --reload -h 0.0.0.0
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
create_db() {
|
||||
exec /app/manage.py database create_tables
|
||||
REDASH_DATABASE_MIGRATE_TIMEOUT=${REDASH_DATABASE_UPGRADE_TIMEOUT:-600}
|
||||
REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS=${REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS:-5}
|
||||
REDASH_DATABASE_MIGRATE_RETRY_WAIT=${REDASH_DATABASE_MIGRATE_RETRY_WAIT:-10}
|
||||
ATTEMPTS=0
|
||||
while ((ATTEMPTS < REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS)); do
|
||||
echo "Starting attempt ${ATTEMPTS} of ${REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS}"
|
||||
ATTEMPTS=$((ATTEMPTS+1))
|
||||
timeout $REDASH_DATABASE_MIGRATE_TIMEOUT /app/manage.py database create_tables
|
||||
timeout $REDASH_DATABASE_MIGRATE_TIMEOUT /app/manage.py db upgrade
|
||||
STATUS=$(timeout $REDASH_DATABASE_MIGRATE_TIMEOUT /app/manage.py status 2>&1)
|
||||
RETCODE=$?
|
||||
echo "Return code: ${RETCODE}"
|
||||
echo "Status: ${STATUS}"
|
||||
case "$RETCODE" in
|
||||
0)
|
||||
exit 0
|
||||
;;
|
||||
124)
|
||||
echo "Status command timed out after ${REDASH_DATABASE_MIGRATE_TIMEOUT} seconds."
|
||||
;;
|
||||
esac
|
||||
case "$STATUS" in
|
||||
*sqlalchemy.exc.OperationalError*)
|
||||
echo "Database not yet functional, waiting."
|
||||
;;
|
||||
*sqlalchemy.exc.ProgrammingError*)
|
||||
echo "Database does not appear to be installed."
|
||||
;;
|
||||
esac
|
||||
echo "Waiting ${REDASH_DATABASE_MIGRATE_RETRY_WAIT} seconds before retrying."
|
||||
sleep ${REDASH_DATABASE_MIGRATE_RETRY_WAIT}
|
||||
done
|
||||
echo "Reached ${REDASH_DATABASE_MIGRATE_MAX_ATTEMPTS} attempts, giving up."
|
||||
exit 1
|
||||
}
|
||||
|
||||
help() {
|
||||
@@ -61,21 +127,16 @@ help() {
|
||||
|
||||
echo "server -- start Redash server (with gunicorn)"
|
||||
echo "worker -- start a single RQ worker"
|
||||
echo "dev_worker -- start a single RQ worker with code reloading"
|
||||
echo "scheduler -- start an rq-scheduler instance"
|
||||
echo "dev_scheduler -- start an rq-scheduler instance with code reloading"
|
||||
echo ""
|
||||
echo "shell -- open shell"
|
||||
echo "dev_server -- start Flask development server with debugger and auto reload"
|
||||
echo "debug -- start Flask development server with remote debugger via ptvsd"
|
||||
echo "create_db -- create database tables"
|
||||
echo "create_db -- create database tables and run migrations"
|
||||
echo "manage -- CLI to manage redash"
|
||||
echo "tests -- run tests"
|
||||
}
|
||||
|
||||
tests() {
|
||||
export REDASH_DATABASE_URL="postgresql://postgres@postgres/tests"
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
TEST_ARGS=tests/
|
||||
else
|
||||
@@ -101,22 +162,10 @@ case "$1" in
|
||||
shift
|
||||
scheduler
|
||||
;;
|
||||
dev_scheduler)
|
||||
shift
|
||||
dev_scheduler
|
||||
;;
|
||||
dev_worker)
|
||||
shift
|
||||
dev_worker
|
||||
;;
|
||||
celery_healthcheck)
|
||||
shift
|
||||
echo "DEPRECATED: Celery has been replaced with RQ and now performs healthchecks autonomously as part of the 'worker' entrypoint."
|
||||
;;
|
||||
dev_server)
|
||||
export FLASK_DEBUG=1
|
||||
exec /app/manage.py runserver --debugger --reload -h 0.0.0.0
|
||||
;;
|
||||
debug)
|
||||
export FLASK_DEBUG=1
|
||||
export REMOTE_DEBUG=1
|
||||
|
||||
@@ -43,18 +43,18 @@ function seedDatabase(seedValues) {
|
||||
|
||||
function buildServer() {
|
||||
console.log("Building the server...");
|
||||
execSync("docker compose -p cypress build", { stdio: "inherit" });
|
||||
execSync("docker compose build", { stdio: "inherit" });
|
||||
}
|
||||
|
||||
function startServer() {
|
||||
console.log("Starting the server...");
|
||||
execSync("docker compose -p cypress up -d", { stdio: "inherit" });
|
||||
execSync("docker compose -p cypress run server create_db", { stdio: "inherit" });
|
||||
execSync("docker compose up -d", { stdio: "inherit" });
|
||||
execSync("docker compose run server create_db", { stdio: "inherit" });
|
||||
}
|
||||
|
||||
function stopServer() {
|
||||
console.log("Stopping the server...");
|
||||
execSync("docker compose -p cypress down", { stdio: "inherit" });
|
||||
execSync("docker compose down", { stdio: "inherit" });
|
||||
}
|
||||
|
||||
function runCypressCI() {
|
||||
@@ -68,7 +68,7 @@ function runCypressCI() {
|
||||
}
|
||||
|
||||
execSync(
|
||||
"COMMIT_INFO_MESSAGE=$(git show -s --format=%s) docker compose run --name cypress cypress ./node_modules/.bin/percy exec -t 300 -- ./node_modules/.bin/cypress run $CYPRESS_OPTIONS",
|
||||
"docker compose run --name cypress cypress ./node_modules/.bin/percy exec -t 300 -- ./node_modules/.bin/cypress run $CYPRESS_OPTIONS",
|
||||
{ stdio: "inherit" }
|
||||
);
|
||||
}
|
||||
|
||||
22
compose.base.yaml
Normal file
22
compose.base.yaml
Normal file
@@ -0,0 +1,22 @@
|
||||
services:
|
||||
.redash:
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
FRONTEND_BUILD_MODE: ${FRONTEND_BUILD_MODE:-2}
|
||||
INSTALL_GROUPS: ${INSTALL_GROUPS:-main,all_ds,dev}
|
||||
CODE_COVERAGE: ${CODE_COVERAGE:-"false"}
|
||||
volumes:
|
||||
- $PWD:${SERVER_MOUNT:-/app}
|
||||
command: manage version
|
||||
environment:
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
|
||||
REDASH_RATELIMIT_ENABLED: "false"
|
||||
REDASH_MAIL_DEFAULT_SENDER: "redash@example.com"
|
||||
REDASH_MAIL_SERVER: "email"
|
||||
REDASH_MAIL_PORT: "1025"
|
||||
REDASH_ENFORCE_CSRF: "true"
|
||||
REDASH_COOKIE_SECRET: ${REDASH_COOKIE_SECRET}
|
||||
REDASH_PRODUCTION: ${REDASH_PRODUCTION:-"false"}
|
||||
76
compose.yaml
76
compose.yaml
@@ -1,52 +1,38 @@
|
||||
# This configuration file is for the **development** setup.
|
||||
# For a production example please refer to getredash/setup repository on GitHub.
|
||||
x-redash-service: &redash-service
|
||||
build:
|
||||
context: .
|
||||
args:
|
||||
skip_frontend_build: "true" # set to empty string to build
|
||||
volumes:
|
||||
- .:/app
|
||||
env_file:
|
||||
- .env
|
||||
x-redash-environment: &redash-environment
|
||||
REDASH_LOG_LEVEL: "INFO"
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
|
||||
REDASH_RATELIMIT_ENABLED: "false"
|
||||
REDASH_MAIL_DEFAULT_SENDER: "redash@example.com"
|
||||
REDASH_MAIL_SERVER: "email"
|
||||
REDASH_MAIL_PORT: 1025
|
||||
REDASH_ENFORCE_CSRF: "true"
|
||||
REDASH_GUNICORN_TIMEOUT: 60
|
||||
# Set secret keys in the .env file
|
||||
services:
|
||||
server:
|
||||
<<: *redash-service
|
||||
command: dev_server
|
||||
extends:
|
||||
file: compose.base.yaml
|
||||
service: .redash
|
||||
command: server
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
ports:
|
||||
- "5001:5000"
|
||||
- "5000:5000"
|
||||
- "5678:5678"
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
PYTHONUNBUFFERED: 0
|
||||
scheduler:
|
||||
<<: *redash-service
|
||||
command: dev_scheduler
|
||||
extends:
|
||||
file: compose.base.yaml
|
||||
service: .redash
|
||||
profiles:
|
||||
- e2e
|
||||
- local
|
||||
command: scheduler
|
||||
depends_on:
|
||||
- server
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
worker:
|
||||
<<: *redash-service
|
||||
command: dev_worker
|
||||
extends:
|
||||
file: compose.base.yaml
|
||||
service: .redash
|
||||
profiles:
|
||||
- e2e
|
||||
- local
|
||||
command: worker
|
||||
depends_on:
|
||||
- server
|
||||
environment:
|
||||
<<: *redash-environment
|
||||
PYTHONUNBUFFERED: 0
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
@@ -68,3 +54,27 @@ services:
|
||||
- "1080:1080"
|
||||
- "1025:1025"
|
||||
restart: unless-stopped
|
||||
cypress:
|
||||
ipc: host
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.cypress
|
||||
profiles:
|
||||
- e2e
|
||||
depends_on:
|
||||
- server
|
||||
- worker
|
||||
- scheduler
|
||||
environment:
|
||||
CYPRESS_baseUrl: "http://server:5000"
|
||||
PERCY_TOKEN: ${PERCY_TOKEN:-""}
|
||||
PERCY_BRANCH: ${PERCY_BRANCH:-""}
|
||||
PERCY_COMMIT: ${PERCY_COMMIT:-""}
|
||||
PERCY_PULL_REQUEST: ${PERCY_PULL_REQUEST:-""}
|
||||
COMMIT_INFO_BRANCH: ${COMMIT_INFO_BRANCH:-""}
|
||||
COMMIT_INFO_MESSAGE: ${COMMIT_INFO_MESSAGE:-""}
|
||||
COMMIT_INFO_AUTHOR: ${COMMIT_INFO_AUTHOR:-""}
|
||||
COMMIT_INFO_SHA: ${COMMIT_INFO_SHA:-""}
|
||||
COMMIT_INFO_REMOTE: ${COMMIT_INFO_REMOTE:-""}
|
||||
CYPRESS_PROJECT_ID: ${CYPRESS_PROJECT_ID:-""}
|
||||
CYPRESS_RECORD_KEY: ${CYPRESS_RECORD_KEY:-""}
|
||||
|
||||
@@ -3,6 +3,7 @@ This will eventually replace all the `to_dict` methods of the different model
|
||||
classes we have. This will ensure cleaner code and better
|
||||
separation of concerns.
|
||||
"""
|
||||
|
||||
from flask_login import current_user
|
||||
from funcy import project
|
||||
from rq.job import JobStatus
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
Some test cases around the Glue catalog.
|
||||
"""
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
import botocore
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
Some test cases for JSON api runner
|
||||
"""
|
||||
|
||||
from unittest import TestCase
|
||||
from urllib.parse import urlencode, urljoin
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""
|
||||
Some test cases for Trino.
|
||||
"""
|
||||
|
||||
from unittest import TestCase
|
||||
from unittest.mock import patch
|
||||
|
||||
|
||||
Reference in New Issue
Block a user