Compare commits

..

3 Commits

Author SHA1 Message Date
Arik Fraimovich
22aa64571a Set sslmode to prefer, otherwise it might fail with clusters that don't have the new certificate. 2017-10-22 13:27:12 +03:00
Arik Fraimovich
621405436d Update version & CircleCI configuration 2017-10-22 09:52:43 +03:00
Arik Fraimovich
4c6fdba1d1 Redshift: add support for the new ACM root CA. 2017-10-22 09:47:00 +03:00
1382 changed files with 36708 additions and 136469 deletions

View File

@@ -1,12 +0,0 @@
FROM cypress/browsers:node14.0.0-chrome84
ENV APP /usr/src/app
WORKDIR $APP
COPY package.json package-lock.json $APP/
COPY viz-lib $APP/viz-lib
RUN npm ci > /dev/null
COPY . $APP
RUN ./node_modules/.bin/cypress verify

View File

@@ -1,177 +0,0 @@
version: 2.0
build-docker-image-job: &build-docker-image-job
docker:
- image: circleci/node:12
steps:
- setup_remote_docker
- checkout
- run: sudo apt update
- run: sudo apt install python3-pip
- run: sudo pip3 install -r requirements_bundles.txt
- run: .circleci/update_version
- run: npm run bundle
- run: .circleci/docker_build
jobs:
backend-lint:
docker:
- image: circleci/python:3.7.0
steps:
- checkout
- run: sudo pip install flake8
- run: ./bin/flake8_tests.sh
backend-unit-tests:
environment:
COMPOSE_FILE: .circleci/docker-compose.circle.yml
COMPOSE_PROJECT_NAME: redash
docker:
- image: circleci/buildpack-deps:xenial
steps:
- setup_remote_docker
- checkout
- run:
name: Build Docker Images
command: |
set -x
docker-compose build --build-arg skip_ds_deps=true --build-arg skip_frontend_build=true
docker-compose up -d
sleep 10
- run:
name: Create Test Database
command: docker-compose run --rm postgres psql -h postgres -U postgres -c "create database tests;"
- run:
name: List Enabled Query Runners
command: docker-compose run --rm redash manage ds list_types
- run:
name: Run Tests
command: docker-compose run --name tests redash tests --junitxml=junit.xml --cov-report xml --cov=redash --cov-config .coveragerc tests/
- run:
name: Copy Test Results
command: |
mkdir -p /tmp/test-results/unit-tests
docker cp tests:/app/coverage.xml ./coverage.xml
docker cp tests:/app/junit.xml /tmp/test-results/unit-tests/results.xml
when: always
- store_test_results:
path: /tmp/test-results
- store_artifacts:
path: coverage.xml
frontend-lint:
environment:
CYPRESS_INSTALL_BINARY: 0
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
docker:
- image: circleci/node:12
steps:
- checkout
- run: mkdir -p /tmp/test-results/eslint
- run: npm ci
- run: npm run lint:ci
- store_test_results:
path: /tmp/test-results
frontend-unit-tests:
environment:
CYPRESS_INSTALL_BINARY: 0
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
docker:
- image: circleci/node:12
steps:
- checkout
- run: sudo apt update
- run: sudo apt install python3-pip
- run: sudo pip3 install -r requirements_bundles.txt
- run: npm ci
- run: npm run bundle
- run:
name: Run App Tests
command: npm test
- run:
name: Run Visualizations Tests
command: (cd viz-lib && npm test)
- run: npm run lint
frontend-e2e-tests:
environment:
COMPOSE_FILE: .circleci/docker-compose.cypress.yml
COMPOSE_PROJECT_NAME: cypress
PERCY_TOKEN_ENCODED: ZGRiY2ZmZDQ0OTdjMzM5ZWE0ZGQzNTZiOWNkMDRjOTk4Zjg0ZjMxMWRmMDZiM2RjOTYxNDZhOGExMjI4ZDE3MA==
CYPRESS_PROJECT_ID_ENCODED: OTI0Y2th
CYPRESS_RECORD_KEY_ENCODED: YzA1OTIxMTUtYTA1Yy00NzQ2LWEyMDMtZmZjMDgwZGI2ODgx
CYPRESS_INSTALL_BINARY: 0
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD: 1
docker:
- image: circleci/node:12
steps:
- setup_remote_docker
- checkout
- run:
name: Enable Code Coverage report for master branch
command: |
if [ "$CIRCLE_BRANCH" = "master" ]; then
echo 'export CODE_COVERAGE=true' >> $BASH_ENV
source $BASH_ENV
fi
- run:
name: Install npm dependencies
command: |
npm ci
- run:
name: Setup Redash server
command: |
npm run cypress build
npm run cypress start -- --skip-db-seed
docker-compose run cypress npm run cypress db-seed
- run:
name: Execute Cypress tests
command: npm run cypress run-ci
- run:
name: "Failure: output container logs to console"
command: |
docker-compose logs
when: on_fail
- run:
name: Copy Code Coverage results
command: |
docker cp cypress:/usr/src/app/coverage ./coverage || true
when: always
- store_artifacts:
path: coverage
build-docker-image: *build-docker-image-job
build-preview-docker-image: *build-docker-image-job
workflows:
version: 2
build:
jobs:
- backend-lint
- backend-unit-tests:
requires:
- backend-lint
- frontend-lint
- frontend-unit-tests:
requires:
- backend-lint
- frontend-lint
- frontend-e2e-tests:
requires:
- frontend-lint
- build-preview-docker-image:
requires:
- backend-unit-tests
- frontend-unit-tests
- frontend-e2e-tests
filters:
branches:
only:
- master
- hold:
type: approval
requires:
- backend-unit-tests
- frontend-unit-tests
- frontend-e2e-tests
filters:
branches:
only:
- /release\/.*/
- build-docker-image:
requires:
- hold

View File

@@ -1,22 +0,0 @@
version: '2.2'
services:
redash:
build: ../
command: manage version
depends_on:
- postgres
- redis
ports:
- "5000:5000"
environment:
PYTHONUNBUFFERED: 0
REDASH_LOG_LEVEL: "INFO"
REDASH_REDIS_URL: "redis://redis:6379/0"
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
redis:
image: redis:3.0-alpine
restart: unless-stopped
postgres:
image: postgres:9.5.6-alpine
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
restart: unless-stopped

View File

@@ -1,71 +0,0 @@
version: "2.2"
x-redash-service: &redash-service
build:
context: ../
args:
skip_dev_deps: "true"
skip_ds_deps: "true"
code_coverage: ${CODE_COVERAGE}
x-redash-environment: &redash-environment
REDASH_LOG_LEVEL: "INFO"
REDASH_REDIS_URL: "redis://redis:6379/0"
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
REDASH_RATELIMIT_ENABLED: "false"
REDASH_ENFORCE_CSRF: "true"
services:
server:
<<: *redash-service
command: server
depends_on:
- postgres
- redis
ports:
- "5000:5000"
environment:
<<: *redash-environment
PYTHONUNBUFFERED: 0
scheduler:
<<: *redash-service
command: scheduler
depends_on:
- server
environment:
<<: *redash-environment
worker:
<<: *redash-service
command: worker
depends_on:
- server
environment:
<<: *redash-environment
PYTHONUNBUFFERED: 0
cypress:
ipc: host
build:
context: ../
dockerfile: .circleci/Dockerfile.cypress
depends_on:
- server
- worker
- scheduler
environment:
CYPRESS_baseUrl: "http://server:5000"
CYPRESS_coverage: ${CODE_COVERAGE}
PERCY_TOKEN: ${PERCY_TOKEN}
PERCY_BRANCH: ${CIRCLE_BRANCH}
PERCY_COMMIT: ${CIRCLE_SHA1}
PERCY_PULL_REQUEST: ${CIRCLE_PR_NUMBER}
COMMIT_INFO_BRANCH: ${CIRCLE_BRANCH}
COMMIT_INFO_MESSAGE: ${COMMIT_INFO_MESSAGE}
COMMIT_INFO_AUTHOR: ${CIRCLE_USERNAME}
COMMIT_INFO_SHA: ${CIRCLE_SHA1}
COMMIT_INFO_REMOTE: ${CIRCLE_REPOSITORY_URL}
CYPRESS_PROJECT_ID: ${CYPRESS_PROJECT_ID}
CYPRESS_RECORD_KEY: ${CYPRESS_RECORD_KEY}
redis:
image: redis:3.0-alpine
restart: unless-stopped
postgres:
image: postgres:9.5.6-alpine
command: "postgres -c fsync=off -c full_page_writes=off -c synchronous_commit=OFF"
restart: unless-stopped

View File

@@ -1,17 +0,0 @@
#!/bin/bash
VERSION=$(jq -r .version package.json)
VERSION_TAG=$VERSION.b$CIRCLE_BUILD_NUM
docker login -u $DOCKER_USER -p $DOCKER_PASS
if [ $CIRCLE_BRANCH = master ] || [ $CIRCLE_BRANCH = preview-image ]
then
docker build --build-arg skip_dev_deps=true -t redash/redash:preview -t redash/preview:$VERSION_TAG .
docker push redash/redash:preview
docker push redash/preview:$VERSION_TAG
else
docker build --build-arg skip_dev_deps=true -t redash/redash:$VERSION_TAG .
docker push redash/redash:$VERSION_TAG
fi
echo "Built: $VERSION_TAG"

View File

@@ -1,9 +0,0 @@
#!/bin/bash
NAME=redash
VERSION=$(jq -r .version package.json)
FULL_VERSION=$VERSION+b$CIRCLE_BUILD_NUM
FILENAME=$NAME.$FULL_VERSION.tar.gz
mkdir -p /tmp/artifacts/
tar -zcv -f /tmp/artifacts/$FILENAME --exclude=".git" --exclude="optipng*" --exclude="cypress" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" *

View File

@@ -1,6 +0,0 @@
#!/bin/bash
VERSION=$(jq -r .version package.json)
FULL_VERSION=$VERSION+b$CIRCLE_BUILD_NUM
sed -ri "s/^__version__ = '([A-Za-z0-9.-]*)'/__version__ = '$FULL_VERSION'/" redash/__init__.py
sed -i "s/dev/$CIRCLE_SHA1/" client/app/version.json

22
.codeclimate.yml Normal file
View File

@@ -0,0 +1,22 @@
engines:
pep8:
enabled: true
eslint:
enabled: true
channel: "eslint-3"
config:
config: client/.eslintrc.js
checks:
import/no-unresolved:
enabled: false
ratings:
paths:
- "redash/**/*.py"
- "client/**/*.js"
exclude_paths:
- tests/**/*.py
- migrations/**/*.py
- old_migrations/**/*.py
- setup/**/*
- bin/**/*

View File

@@ -1,9 +1,5 @@
[run]
branch = True
source = redash
[report] [report]
omit = omit =
*/settings.py */settings.py
*/python?.?/* */python?.?/*
show_missing = True */site-packages/nose/*

View File

@@ -1,15 +1,4 @@
client/.tmp/ client/.tmp/
client/dist/
node_modules/ node_modules/
viz-lib/node_modules/
.tmp/ .tmp/
.venv/
venv/
.git/ .git/
/.codeclimate.yml
/.coverage
/coverage.xml
/.circleci/
/.github/
/netlify.toml
/setup/

View File

@@ -9,6 +9,6 @@ trim_trailing_whitespace = true
indent_style = space indent_style = space
indent_size = 4 indent_size = 4
[*.{js,jsx,css,less,html}] [*.{js,css,html}]
indent_style = space indent_style = space
indent_size = 2 indent_size = 2

24
.github/ISSUE_TEMPLATE.md vendored Normal file
View File

@@ -0,0 +1,24 @@
Welcome to Redash's GitHub repo! 👋🎉
Do you need help or have a question? Checkout the Support category in our discussion forum: https://discuss.redash.io/c/support.
Got an idea for a new feature? Check if it isn't on the roadmap already: http://bit.ly/redash-roadmap and start a new discussion in the features category: https://discuss.redash.io/c/feature-requests 🌟.
Found a bug? Please fill out the sections below... thank you 👍
### Issue Summary
A summary of the issue and the browser/OS environment in which it occurs.
### Steps to Reproduce
1. This is the first step
2. This is the second step, etc.
Any other info e.g. Why do you consider this to be a bug? What did you expect to happen instead?
### Technical details:
* Redash Version:
* Browser/OS:
* How did you install Redash:

View File

@@ -1,34 +0,0 @@
---
name: "\U0001F41B Bug report"
about: Report reproducible software issues so we can improve
---
<!--
We use GitHub only for bug reports 🐛
Anything else should be posted to https://discuss.redash.io 👫
🚨For support, help & questions use https://discuss.redash.io/c/support
💡For feature requests & ideas use https://discuss.redash.io/c/feature-requests
**Found a security vulnerability?** Please email security@redash.io to report any security vulnerabilities. We will acknowledge receipt of your vulnerability and strive to send you regular updates about our progress. If you're curious about the status of your disclosure please feel free to email us again. If you want to encrypt your disclosure email, you can use this PGP key.
-->
### Issue Summary
A summary of the issue and the browser/OS environment in which it occurs.
### Steps to Reproduce
1. This is the first step
2. This is the second step, etc.
Any other info e.g. Why do you consider this to be a bug? What did you expect to happen instead?
### Technical details:
* Redash Version:
* Browser/OS:
* How did you install Redash:

View File

@@ -1,17 +0,0 @@
---
name: "\U0001F4A1Anything else"
about: "For help, support, features & ideas - please use https://discuss.redash.io \U0001F46B "
labels: "Support Question"
---
We use GitHub only for bug reports 🐛
Anything else should be posted to https://discuss.redash.io 👫
🚨For support, help & questions use https://discuss.redash.io/c/support
💡For feature requests & ideas use https://discuss.redash.io/c/feature-requests
Alternatively, check out these resources below. Thanks! 😁.
- [Forum](https://disucss.redash.io)
- [Knowledge Base](https://redash.io/help)

View File

@@ -1,15 +0,0 @@
## What type of PR is this? (check all applicable)
<!-- Please leave only what's applicable -->
- [ ] Refactor
- [ ] Feature
- [ ] Bug Fix
- [ ] New Query Runner (Data Source)
- [ ] New Alert Destination
- [ ] Other
## Description
## Related Tickets & Documents
## Mobile & Desktop Screenshots/Recordings (if there are UI changes)

5
.github/config.yml vendored
View File

@@ -1,5 +0,0 @@
# https://github.com/behaviorbot/request-info?installation_id=189571
requestInfoLabelToAdd: needs-more-info
requestInfoReplyComment: >
We would appreciate it if you could provide us with more info about this issue/pr!

23
.github/support.yml vendored
View File

@@ -1,23 +0,0 @@
# Configuration for Support Requests - https://github.com/dessant/support-requests
# Label used to mark issues as support requests
supportLabel: Support Question
# Comment to post on issues marked as support requests, `{issue-author}` is an
# optional placeholder. Set to `false` to disable
supportComment: >
:wave: @{issue-author}, we use the issue tracker exclusively for bug reports
and planned work. However, this issue appears to be a support request.
Please use [our forum](https://discuss.redash.io) to get help.
# Close issues marked as support requests
close: true
# Lock issues marked as support requests
lock: false
# Assign `off-topic` as the reason for locking. Set to `false` to disable
setLockReason: true
# Repository to extend settings from
# _extends: repo

View File

@@ -1,7 +0,0 @@
# Configuration for weekly-digest - https://github.com/apps/weekly-digest
publishDay: mon
canPublishIssues: true
canPublishPullRequests: true
canPublishContributors: true
canPublishStargazers: true
canPublishCommits: true

19
.gitignore vendored
View File

@@ -1,22 +1,22 @@
.venv
venv/
.cache
.coverage.*
.coveralls.yml .coveralls.yml
.idea .idea
*.pyc *.pyc
.nyc_output
coverage
.coverage .coverage
coverage.xml
client/dist client/dist
.DS_Store .DS_Store
celerybeat-schedule*
.#* .#*
\#*# \#*#
*~ *~
_build _build
.vscode
# Vagrant related
.vagrant
Berksfile.lock
redash/dump.rdb
.env .env
.ruby-version
venv
dump.rdb dump.rdb
@@ -24,6 +24,3 @@ node_modules
.tmp .tmp
.sass-cache .sass-cache
npm-debug.log npm-debug.log
client/cypress/screenshots
client/cypress/videos

View File

@@ -1,63 +0,0 @@
enabled: true
auto: false
# Open Restyle PRs?
pull_requests: true
# Leave comments on the original PR linking to the Restyle PR?
comments: true
# Set commit statuses on the original PR?
statuses:
# Red status in the case of differences found
differences: true
# Green status in the case of no differences found
no_differences: true
# Red status if we encounter errors restyling
error: true
# Request review on the Restyle PR?
#
# Possible values:
#
# author: From the author of the original PR
# owner: From the owner of the repository
# none: Don't
#
# One value will apply to both origin and forked PRs, but you can also specify
# separate values.
#
# request_review:
# origin: author
# forked: owner
#
request_review: author
# Add labels to any created Restyle PRs
#
# These can be used to tell other automation to avoid our PRs.
#
labels: ["Skip CI"]
# Labels to ignore
#
# PRs with any of these labels will be ignored by Restyled.
#
# ignore_labels:
# - restyled-ignore
# Restylers to run, and how
restylers:
- name: black
image: restyled/restyler-black:v19.10b0
include:
- redash
- tests
- migrations/versions
- name: prettier
image: restyled/restyler-prettier:v1.19.1-2
include:
- client/app/**/*.js
- client/app/**/*.jsx
- client/cypress/**/*.js

File diff suppressed because it is too large Load Diff

View File

@@ -6,9 +6,11 @@ The following is a set of guidelines for contributing to Redash. These are guide
## Quick Links: ## Quick Links:
- [Feature Roadmap](https://trello.com/b/b2LUHU7A/redash-roadmap)
- [Feature Requests](https://discuss.redash.io/c/feature-requests) - [Feature Requests](https://discuss.redash.io/c/feature-requests)
- [Gitter Chat](https://gitter.im/getredash/redash) or [Slack](https://slack.redash.io)
- [Documentation](https://redash.io/help/) - [Documentation](https://redash.io/help/)
- [Blog](https://blog.redash.io/) - [Blog](http://blog.redash.io/)
- [Twitter](https://twitter.com/getredash) - [Twitter](https://twitter.com/getredash)
--- ---
@@ -46,32 +48,30 @@ When creating a new bug report, please make sure to:
If you would like to suggest an enhancement or ask for a new feature: If you would like to suggest an enhancement or ask for a new feature:
- Please check [the forum](https://discuss.redash.io/c/feature-requests/5) for existing threads about what you want to suggest/ask. If there is, feel free to upvote it to signal interest or add your comments. - Please check [the roadmap](https://trello.com/b/b2LUHU7A/redash-roadmap) for existing Trello card for what you want to suggest/ask. If there is, feel free to upvote it to signal interest or add your comments.
- If there is no open thread, you're welcome to start one to have a discussion about what you want to suggest. Try to provide as much details and context as possible and include information about *the problem you want to solve* rather only *your proposed solution*. - If there is no existing card, open a thread in [the forum](https://discuss.redash.io/c/feature-requests) to start a discussion about what you want to suggest. Try to provide as much details and context as possible and include information about *the problem you want to solve* rather only *your proposed solution*.
### Pull Requests ### Pull Requests
- **Code contributions are welcomed**. For big changes or significant features, it's usually better to reach out first and discuss what you want to implement and how (we recommend reading: [Pull Request First](https://medium.com/practical-blend/pull-request-first-f6bb667a9b6#.ozlqxvj36)). This to make sure that what you want to implement is aligned with our goals for the project and that no one else is already working on it. - **Code contributions are welcomed**. For big changes or significant features, it's usually better to reach out first and discuss what you want to implement and how (we recommend reading: [Pull Request First](https://medium.com/practical-blend/pull-request-first-f6bb667a9b6#.ozlqxvj36)). This to make sure that what you want to implement is aligned with our goals for the project and that no one else is already working on it.
- Include screenshots and animated GIFs in your pull request whenever possible. - Include screenshots and animated GIFs in your pull request whenever possible.
- Please add [documentation](#documentation) for new features or changes in functionality along with the code. - Please add [documentation](#documentation) for new features or changes in functionality along with the code.
- Please follow existing code style: - Please follow existing code style. We use PEP8 for Python and sensible style for JavaScript.
- Python: we use [Black](https://github.com/psf/black) to auto format the code.
- Javascript: we use [Prettier](https://github.com/prettier/prettier) to auto-format the code.
### Documentation ### Documentation
The project's documentation can be found at [https://redash.io/help/](https://redash.io/help/). The [documentation sources](https://github.com/getredash/website/tree/master/src/pages/kb) are hosted on GitHub. To contribute edits / new pages, you can use GitHub's interface. Click the "Edit on GitHub" link on the documentation page to quickly open the edit interface. The project's documentation can be found at [https://redash.io/help/](https://redash.io/help/). The [documentation sources](https://github.com/getredash/website/tree/master/user-guide) are hosted on GitHub. To contribute edits / new pages, you can use GitHub's interface. Click the "Edit on GitHub" link on the documentation page to quickly open the edit interface.
## Additional Notes ## Additional Notes
### Release Method ### Release Method
We publish a stable release every ~3-4 months, although the goal is to get to a stable release every month. We publish a stable release every ~2 months, although the goal is to get to a stable release every month. You can see the change log on [GitHub releases page](http://github.com/getredash/redash/releases).
Every build of the master branch updates the *redash/redash:preview* Docker Image. These releases are usually stable, but might contain regressions and therefore recommended for "advanced users" only. Every build of the master branch updates the latest *RC release*. These releases are usually stable, but might contain regressions and therefore recommended for "advanced users" only.
When we release a new stable release, we also update the *latest* Docker image tag, the EC2 AMIs and GCE images. When we release a new stable release, we also update the *latest* Docker image tag, the EC2 AMIs and GCE images.
## Code of Conduct ## Code of Conduct
This project adheres to the Contributor Covenant [code of conduct](https://redash.io/community/code_of_conduct). By participating, you are expected to uphold this code. Please report unacceptable behavior to team@redash.io. This project adheres to the Contributor Covenant [code of conduct](http://redash.io/community/code_of_conduct). By participating, you are expected to uphold this code. Please report unacceptable behavior to team@redash.io.

View File

@@ -1,94 +1,13 @@
FROM node:12 as frontend-builder FROM redash/base:latest
# Controls whether to build the frontend assets
ARG skip_frontend_build
ENV CYPRESS_INSTALL_BINARY=0
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1
RUN useradd -m -d /frontend redash
USER redash
WORKDIR /frontend
COPY --chown=redash package.json package-lock.json /frontend/
COPY --chown=redash viz-lib /frontend/viz-lib
# Controls whether to instrument code for coverage information
ARG code_coverage
ENV BABEL_ENV=${code_coverage:+test}
RUN if [ "x$skip_frontend_build" = "x" ] ; then npm ci --unsafe-perm; fi
COPY --chown=redash client /frontend/client
COPY --chown=redash webpack.config.js /frontend/
RUN if [ "x$skip_frontend_build" = "x" ] ; then npm run build; else mkdir -p /frontend/client/dist && touch /frontend/client/dist/multi_org.html && touch /frontend/client/dist/index.html; fi
FROM python:3.7-slim
EXPOSE 5000
# Controls whether to install extra dependencies needed for all data sources.
ARG skip_ds_deps
# Controls whether to install dev dependencies.
ARG skip_dev_deps
RUN useradd --create-home redash
# Ubuntu packages
RUN apt-get update && \
apt-get install -y \
curl \
gnupg \
build-essential \
pwgen \
libffi-dev \
sudo \
git-core \
wget \
# Postgres client
libpq-dev \
# ODBC support:
g++ unixodbc-dev \
# for SAML
xmlsec1 \
# Additional packages required for data sources:
libssl-dev \
default-libmysqlclient-dev \
freetds-dev \
libsasl2-dev \
unzip \
libsasl2-modules-gssapi-mit && \
# MSSQL ODBC Driver:
curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - && \
curl https://packages.microsoft.com/config/debian/10/prod.list > /etc/apt/sources.list.d/mssql-release.list && \
apt-get update && \
ACCEPT_EULA=Y apt-get install -y msodbcsql17 && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
ARG databricks_odbc_driver_url=https://databricks.com/wp-content/uploads/2.6.10.1010-2/SimbaSparkODBC-2.6.10.1010-2-Debian-64bit.zip
ADD $databricks_odbc_driver_url /tmp/simba_odbc.zip
RUN unzip /tmp/simba_odbc.zip -d /tmp/ \
&& dpkg -i /tmp/SimbaSparkODBC-*/*.deb \
&& echo "[Simba]\nDriver = /opt/simba/spark/lib/64/libsparkodbc_sb64.so" >> /etc/odbcinst.ini \
&& rm /tmp/simba_odbc.zip \
&& rm -rf /tmp/SimbaSparkODBC*
WORKDIR /app
# Disalbe PIP Cache and Version Check
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
ENV PIP_NO_CACHE_DIR=1
# We first copy only the requirements file, to avoid rebuilding on every file # We first copy only the requirements file, to avoid rebuilding on every file
# change. # change.
COPY requirements.txt requirements_bundles.txt requirements_dev.txt requirements_all_ds.txt ./ COPY requirements.txt requirements_dev.txt requirements_all_ds.txt ./
RUN if [ "x$skip_dev_deps" = "x" ] ; then pip install -r requirements.txt -r requirements_dev.txt; else pip install -r requirements.txt; fi RUN pip install -r requirements.txt -r requirements_dev.txt -r requirements_all_ds.txt
RUN if [ "x$skip_ds_deps" = "x" ] ; then pip install -r requirements_all_ds.txt ; else echo "Skipping pip install -r requirements_all_ds.txt" ; fi
COPY . /app COPY . ./
COPY --from=frontend-builder /frontend/client/dist /app/client/dist RUN npm install && npm run build && rm -rf node_modules
RUN chown -R redash /app RUN chown -R redash /app
USER redash USER redash
ENTRYPOINT ["/app/bin/docker-entrypoint"] ENTRYPOINT ["/app/bin/docker-entrypoint"]
CMD ["server"]

View File

@@ -1,4 +1,4 @@
Copyright (c) 2013-2020, Arik Fraimovich. Copyright (c) 2013-2017, Arik Fraimovich.
All rights reserved. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, Redistribution and use in source and binary forms, with or without modification,

View File

@@ -1,57 +1,20 @@
.PHONY: compose_build up test_db create_database clean down bundle tests lint backend-unit-tests frontend-unit-tests test build watch start redis-cli bash NAME=redash
VERSION=`python ./manage.py version`
FULL_VERSION=$(VERSION)+b$(CIRCLE_BUILD_NUM)
BASE_VERSION=$(shell python ./manage.py version | cut -d + -f 1)
# VERSION gets evaluated every time it's referenced, therefore we need to use VERSION here instead of FULL_VERSION.
FILENAME=$(CIRCLE_ARTIFACTS)/$(NAME).$(VERSION).tar.gz
compose_build: deps:
docker-compose build if [ -d "./client/app" ]; then npm install; fi
if [ -d "./client/app" ]; then npm run build; fi
up: pack:
docker-compose up -d --build sed -ri "s/^__version__ = '([0-9.]*)'/__version__ = '$(FULL_VERSION)'/" redash/__init__.py
tar -zcv -f $(FILENAME) --exclude="optipng*" --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="node_modules" *
test_db: upload:
@for i in `seq 1 5`; do \ python bin/release_manager.py $(CIRCLE_SHA1) $(BASE_VERSION) $(FILENAME)
if (docker-compose exec postgres sh -c 'psql -U postgres -c "select 1;"' 2>&1 > /dev/null) then break; \
else echo "postgres initializing..."; sleep 5; fi \
done
docker-compose exec postgres sh -c 'psql -U postgres -c "drop database if exists tests;" && psql -U postgres -c "create database tests;"'
create_database: test:
docker-compose run server create_db nosetests --with-coverage --cover-package=redash tests/
clean:
docker-compose down && docker-compose rm
down:
docker-compose down
bundle:
docker-compose run server bin/bundle-extensions
tests:
docker-compose run server tests
lint:
./bin/flake8_tests.sh
backend-unit-tests: up test_db
docker-compose run --rm --name tests server tests
frontend-unit-tests: bundle
CYPRESS_INSTALL_BINARY=0 PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 npm ci
npm run bundle
npm test
test: lint backend-unit-tests frontend-unit-tests
build: bundle
npm run build
watch: bundle
npm run watch
start: bundle
npm run start
redis-cli:
docker-compose run --rm redis redis-cli -h redis
bash:
docker-compose run --rm server bash

View File

@@ -1,96 +1,49 @@
<p align="center"> <p align="center">
<img title="Redash" src='https://redash.io/assets/images/logo.png' width="200px"/> <img title="Redash" src='https://redash.io/assets/images/logo.png' width="200px"/>
</p> </p>
<p align="center">
<img title="Build Status" src='https://circleci.com/gh/getredash/redash.png?circle-token=8a695aa5ec2cbfa89b48c275aea298318016f040'/>
</p>
[![Join the chat at https://gitter.im/getredash/redash](https://badges.gitter.im/getredash/redash.svg)](https://gitter.im/getredash/redash?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![Documentation](https://img.shields.io/badge/docs-redash.io/help-brightgreen.svg)](https://redash.io/help/) [![Documentation](https://img.shields.io/badge/docs-redash.io/help-brightgreen.svg)](https://redash.io/help/)
[![Datree](https://s3.amazonaws.com/catalog.static.datree.io/datree-badge-20px.svg)](https://datree.io/?src=badge)
[![Build Status](https://circleci.com/gh/getredash/redash.png?style=shield&circle-token=8a695aa5ec2cbfa89b48c275aea298318016f040)](https://circleci.com/gh/getredash/redash/tree/master)
Redash is designed to enable anyone, regardless of the level of technical sophistication, to harness the power of data big and small. SQL users leverage Redash to explore, query, visualize, and share data from any data sources. Their work in turn enables anybody in their organization to use the data. Every day, millions of users at thousands of organizations around the world use Redash to develop insights and make data-driven decisions. **_Redash_** is our take on freeing the data within our company in a way that will better fit our culture and usage patterns.
Redash features: Prior to **_Redash_**, we tried to use traditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one.
1. **Browser-based**: Everything in your browser, with a shareable URL. **_Redash_** was built to allow fast and easy access to billions of records, that we process and collect using Amazon Redshift ("petabyte scale data warehouse" that "speaks" PostgreSQL).
2. **Ease-of-use**: Become immediately productive with data without the need to master complex software. Today **_Redash_** has support for querying multiple databases, including: Redshift, Google BigQuery, PostgreSQL, MySQL, Graphite,
3. **Query editor**: Quickly compose SQL and NoSQL queries with a schema browser and auto-complete. Presto, Google Spreadsheets, Cloudera Impala, Hive and custom scripts.
4. **Visualization and dashboards**: Create [beautiful visualizations](https://redash.io/help/user-guide/visualizations/visualization-types) with drag and drop, and combine them into a single dashboard.
5. **Sharing**: Collaborate easily by sharing visualizations and their associated queries, enabling peer review of reports and queries.
6. **Schedule refreshes**: Automatically update your charts and dashboards at regular intervals you define.
7. **Alerts**: Define conditions and be alerted instantly when your data changes.
8. **REST API**: Everything that can be done in the UI is also available through REST API.
9. **Broad support for data sources**: Extensible data source API with native support for a long list of common databases and platforms.
<img src="https://raw.githubusercontent.com/getredash/website/8e820cd02c73a8ddf4f946a9d293c54fd3fb08b9/website/_assets/images/redash-anim.gif" width="80%"/> **_Redash_** consists of two parts:
1. **Query Editor**: think of [JS Fiddle](http://jsfiddle.net) for SQL queries. It's your way to share data in the organization in an open way, by sharing both the dataset and the query that generated it. This way everyone can peer review not only the resulting dataset but also the process that generated it. Also it's possible to fork it and generate new datasets and reach new insights.
2. **Dashboards/Visualizations**: once you have a dataset, you can create different visualizations out of it, and then combine several visualizations into a single dashboard. Currently it supports charts, pivot table and cohorts.
## Demo
<img src="https://cloud.githubusercontent.com/assets/71468/17391289/8e83878e-5a1d-11e6-8938-af9054a33b19.gif" width="60%"/>
You can try out the demo instance: http://demo.redash.io/ (login with any Google account).
## Getting Started ## Getting Started
* [Setting up Redash instance](https://redash.io/help/open-source/setup) (includes links to ready-made AWS/GCE images). * [Setting up Redash instance](https://redash.io/help-onpremise/setup/setting-up-redash-instance.html) (includes links to ready made AWS/GCE images).
* [Documentation](https://redash.io/help/). * [Documentation](https://redash.io/help/).
## Supported Data Sources
Redash supports more than 35 SQL and NoSQL [data sources](https://redash.io/help/data-sources/supported-data-sources). It can also be extended to support more. Below is a list of built-in sources:
- Amazon Athena
- Amazon DynamoDB
- Amazon Redshift
- Axibase Time Series Database
- Cassandra
- ClickHouse
- CockroachDB
- CSV
- Databricks (Apache Spark)
- DB2 by IBM
- Druid
- Elasticsearch
- Google Analytics
- Google BigQuery
- Google Spreadsheets
- Graphite
- Greenplum
- Hive
- Impala
- InfluxDB
- JIRA
- JSON
- Apache Kylin
- OmniSciDB (Formerly MapD)
- MemSQL
- Microsoft Azure Data Warehouse / Synapse
- Microsoft Azure SQL Database
- Microsoft SQL Server
- MongoDB
- MySQL
- Oracle
- PostgreSQL
- Presto
- Prometheus
- Python
- Qubole
- Rockset
- Salesforce
- ScyllaDB
- Shell Scripts
- Snowflake
- SQLite
- TreasureData
- Vertica
- Yandex AppMetrrica
- Yandex Metrica
## Getting Help ## Getting Help
* Issues: https://github.com/getredash/redash/issues * Issues: https://github.com/getredash/redash/issues
* Discussion Forum: https://discuss.redash.io/ * Discussion Forum: https://discuss.redash.io/
* Slack: http://slack.redash.io/
* Gitter (chat): https://gitter.im/getredash/redash
## Reporting Bugs and Contributing Code ## Reporting Bugs and Contributing Code
* Want to report a bug or request a feature? Please open [an issue](https://github.com/getredash/redash/issues/new). * Want to report a bug or request a feature? Please open [an issue](https://github.com/getredash/redash/issues/new).
* Want to help us build **_Redash_**? Fork the project, edit in a [dev environment](https://redash.io/help-onpremise/dev/guide.html) and make a pull request. We need all the help we can get! * Want to help us build **_Redash_**? Fork the project, edit in a [dev environment](https://redash.io/help-onpremise/setup/setting-up-development-environment-using-vagrant.html), and make a pull request. We need all the help we can get!
## Security
Please email security@redash.io to report any security vulnerabilities. We will acknowledge receipt of your vulnerability and strive to send you regular updates about our progress. If you're curious about the status of your disclosure please feel free to email us again. If you want to encrypt your disclosure email, you can use [this PGP key](https://keybase.io/arikfr/key.asc).
## License ## License

View File

@@ -1,5 +0,0 @@
# Security Policy
## Reporting a Vulnerability
Please email security@redash.io to report any security vulnerabilities. We will acknowledge receipt of your vulnerability and strive to send you regular updates about our progress. If you're curious about the status of your disclosure please feel free to email us again. If you want to encrypt your disclosure email, you can use [this PGP key](https://keybase.io/arikfr/key.asc).

View File

@@ -1,115 +0,0 @@
#!/usr/bin/env python3
"""Copy bundle extension files to the client/app/extension directory"""
import logging
import os
from pathlib import Path
from shutil import copy
from collections import OrderedDict as odict
import importlib_metadata
import importlib_resources
# Name of the subdirectory
BUNDLE_DIRECTORY = "bundle"
logger = logging.getLogger(__name__)
# Make a directory for extensions and set it as an environment variable
# to be picked up by webpack.
extensions_relative_path = Path("client", "app", "extensions")
extensions_directory = Path(__file__).parent.parent / extensions_relative_path
if not extensions_directory.exists():
extensions_directory.mkdir()
os.environ["EXTENSIONS_DIRECTORY"] = str(extensions_relative_path)
def entry_point_module(entry_point):
"""Returns the dotted module path for the given entry point"""
return entry_point.pattern.match(entry_point.value).group("module")
def load_bundles():
""""Load bundles as defined in Redash extensions.
The bundle entry point can be defined as a dotted path to a module
or a callable, but it won't be called but just used as a means
to find the files under its file system path.
The name of the directory it looks for files in is "bundle".
So a Python package with an extension bundle could look like this::
my_extensions/
├── __init__.py
└── wide_footer
├── __init__.py
└── bundle
├── extension.js
└── styles.css
and would then need to register the bundle with an entry point
under the "redash.bundles" group, e.g. in your setup.py::
setup(
# ...
entry_points={
"redash.bundles": [
"wide_footer = my_extensions.wide_footer",
]
# ...
},
# ...
)
"""
bundles = odict()
for entry_point in importlib_metadata.entry_points().get("redash.bundles", []):
logger.info('Loading Redash bundle "%s".', entry_point.name)
module = entry_point_module(entry_point)
# Try to get a list of bundle files
try:
bundle_dir = importlib_resources.files(module).joinpath(BUNDLE_DIRECTORY)
except (ImportError, TypeError):
# Module isn't a package, so can't have a subdirectory/-package
logger.error(
'Redash bundle module "%s" could not be imported: "%s"',
entry_point.name,
module,
)
continue
if not bundle_dir.is_dir():
logger.error(
'Redash bundle directory "%s" could not be found or is not a directory: "%s"',
entry_point.name,
bundle_dir,
)
continue
bundles[entry_point.name] = list(bundle_dir.rglob("*"))
return bundles
bundles = load_bundles().items()
if bundles:
print("Number of extension bundles found: {}".format(len(bundles)))
else:
print("No extension bundles found.")
for bundle_name, paths in bundles:
# Shortcut in case not paths were found for the bundle
if not paths:
print('No paths found for bundle "{}".'.format(bundle_name))
continue
# The destination for the bundle files with the entry point name as the subdirectory
destination = Path(extensions_directory, bundle_name)
if not destination.exists():
destination.mkdir()
# Copy the bundle directory from the module to its destination.
print('Copying "{}" bundle to {}:'.format(bundle_name, destination.resolve()))
for src_path in paths:
dest_path = destination / src_path.name
print(" - {} -> {}".format(src_path, dest_path))
copy(str(src_path), str(dest_path))

View File

@@ -1,42 +1,25 @@
#!/bin/bash #!/bin/bash
set -e set -e
scheduler() {
echo "Starting RQ scheduler..."
exec /app/manage.py rq scheduler
}
dev_scheduler() {
echo "Starting dev RQ scheduler..."
exec watchmedo auto-restart --directory=./redash/ --pattern=*.py --recursive -- ./manage.py rq scheduler
}
worker() { worker() {
echo "Starting RQ worker..." WORKERS_COUNT=${WORKERS_COUNT:-2}
QUEUES=${QUEUES:-queries,scheduled_queries,celery}
export WORKERS_COUNT=${WORKERS_COUNT:-2} echo "Starting $WORKERS_COUNT workers for queues: $QUEUES..."
export QUEUES=${QUEUES:-} exec /usr/local/bin/celery worker --app=redash.worker -c$WORKERS_COUNT -Q$QUEUES -linfo --maxtasksperchild=10 -Ofair
exec supervisord -c worker.conf
} }
dev_worker() { scheduler() {
echo "Starting dev RQ worker..." WORKERS_COUNT=${WORKERS_COUNT:-1}
QUEUES=${QUEUES:-celery}
exec watchmedo auto-restart --directory=./redash/ --pattern=*.py --recursive -- ./manage.py rq worker $QUEUES echo "Starting scheduler and $WORKERS_COUNT workers for queues: $QUEUES..."
exec /usr/local/bin/celery worker --app=redash.worker --beat -c$WORKERS_COUNT -Q$QUEUES -linfo --maxtasksperchild=10 -Ofair
} }
server() { server() {
# Recycle gunicorn workers every n-th request. See http://docs.gunicorn.org/en/stable/settings.html#max-requests for more details. exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app
MAX_REQUESTS=${MAX_REQUESTS:-1000}
MAX_REQUESTS_JITTER=${MAX_REQUESTS_JITTER:-100}
exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app --max-requests $MAX_REQUESTS --max-requests-jitter $MAX_REQUESTS_JITTER
}
create_db() {
exec /app/manage.py database create_tables
} }
help() { help() {
@@ -46,28 +29,18 @@ help() {
echo "" echo ""
echo "server -- start Redash server (with gunicorn)" echo "server -- start Redash server (with gunicorn)"
echo "worker -- start a single RQ worker" echo "worker -- start Celery worker"
echo "dev_worker -- start a single RQ worker with code reloading" echo "scheduler -- start Celery worker with a beat (scheduler) process"
echo "scheduler -- start an rq-scheduler instance"
echo "dev_scheduler -- start an rq-scheduler instance with code reloading"
echo "" echo ""
echo "shell -- open shell" echo "shell -- open shell"
echo "dev_server -- start Flask development server with debugger and auto reload" echo "dev_server -- start Flask development server with debugger and auto reload"
echo "debug -- start Flask development server with remote debugger via ptvsd"
echo "create_db -- create database tables" echo "create_db -- create database tables"
echo "manage -- CLI to manage redash" echo "manage -- CLI to manage redash"
echo "tests -- run tests"
} }
tests() { tests() {
export REDASH_DATABASE_URL="postgresql://postgres@postgres/tests" export REDASH_DATABASE_URL="postgresql://postgres@postgres/tests"
exec make test
if [ $# -eq 0 ]; then
TEST_ARGS=tests/
else
TEST_ARGS=$@
fi
exec pytest $TEST_ARGS
} }
case "$1" in case "$1" in
@@ -83,46 +56,23 @@ case "$1" in
shift shift
scheduler scheduler
;; ;;
dev_scheduler)
shift
dev_scheduler
;;
dev_worker)
shift
dev_worker
;;
celery_healthcheck)
shift
echo "DEPRECATED: Celery has been replaced with RQ and now performs healthchecks autonomously as part of the 'worker' entrypoint."
;;
dev_server) dev_server)
export FLASK_DEBUG=1
exec /app/manage.py runserver --debugger --reload -h 0.0.0.0 exec /app/manage.py runserver --debugger --reload -h 0.0.0.0
;; ;;
debug)
export FLASK_DEBUG=1
export REMOTE_DEBUG=1
exec /app/manage.py runserver --debugger --no-reload -h 0.0.0.0
;;
shell) shell)
exec /app/manage.py shell exec /app/manage.py shell
;; ;;
create_db) create_db)
create_db exec /app/manage.py database create_tables
;; ;;
manage) manage)
shift shift
exec /app/manage.py $* exec /app/manage.py $*
;; ;;
tests) tests)
shift tests
tests $@
;;
help)
shift
help
;; ;;
*) *)
exec "$@" help
;; ;;
esac esac

View File

@@ -1,9 +0,0 @@
#!/bin/sh
set -o errexit # fail the build if any task fails
flake8 --version ; pip --version
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics

View File

@@ -1,37 +0,0 @@
#!/bin/env python3
import sys
import re
import subprocess
def get_change_log(previous_sha):
args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', 'master...{}'.format(previous_sha)]
log = subprocess.check_output(args)
changes = []
for line in log.split('\n'):
try:
sha, subject, body, parents = line[1:-1].split('|')
except ValueError:
continue
try:
pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0]
pull_request = " #{}".format(pull_request)
except Exception as ex:
pull_request = ""
author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1]
changes.append("{}{}: {} ({})".format(sha, pull_request, body.strip(), author))
return changes
if __name__ == '__main__':
previous_sha = sys.argv[1]
changes = get_change_log(previous_sha)
for change in changes:
print(change)

18
bin/pre_compile Normal file
View File

@@ -0,0 +1,18 @@
#!/usr/bin/env bash
# Heroku pre_compile script
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
pushd $DIR/..
# heroku requires cffi to be in requirements.txt in order for libffi to be installed.
# https://github.com/heroku/heroku-buildpack-python/blob/master/bin/steps/cryptography
# to avoid making it a requirement for other build systems, we'll inject it now
# into the requirements.txt file
# Remove Heroku unsupported Python packages:
grep -v -E "^(pymssql|thrift|sasl|pyhive)" requirements_all_ds.txt >> requirements.txt
# make the heroku Procfile the active one
cp Procfile.heroku Procfile
popd

View File

@@ -1,10 +1,9 @@
#!/usr/bin/env python3
import os import os
import sys import sys
import json
import re import re
import subprocess import subprocess
import requests import requests
import simplejson
github_token = os.environ['GITHUB_TOKEN'] github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic') auth = (github_token, 'x-oauth-basic')
@@ -17,7 +16,7 @@ def _github_request(method, path, params=None, headers={}):
url = path url = path
if params is not None: if params is not None:
params = simplejson.dumps(params) params = json.dumps(params)
response = requests.request(method, url, data=params, auth=auth) response = requests.request(method, url, data=params, auth=auth)
return response return response
@@ -96,7 +95,7 @@ def get_changelog(commit_sha):
try: try:
pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0] pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0]
pull_request = " #{}".format(pull_request) pull_request = " #{}".format(pull_request)
except Exception as ex: except Exception, ex:
pull_request = "" pull_request = ""
author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1] author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1]
@@ -125,7 +124,7 @@ def update_release(version, build_filepath, commit_sha):
else: else:
release = create_release(version, commit_sha) release = create_release(version, commit_sha)
print("Using release id: {}".format(release['id'])) print "Using release id: {}".format(release['id'])
remove_previous_builds(release) remove_previous_builds(release)
response = upload_asset(release, build_filepath) response = upload_asset(release, build_filepath)
@@ -136,8 +135,8 @@ def update_release(version, build_filepath, commit_sha):
if response.status_code != 200: if response.status_code != 200:
raise exception_from_error("Failed updating release description", response) raise exception_from_error("Failed updating release description", response)
except Exception as ex: except Exception, ex:
print(ex) print ex
if __name__ == '__main__': if __name__ == '__main__':
commit_sha = sys.argv[1] commit_sha = sys.argv[1]

View File

@@ -1,5 +1,4 @@
#!/usr/bin/env python3 #!/usr/bin/env python
import urllib
import argparse import argparse
import os import os
import subprocess import subprocess
@@ -27,7 +26,7 @@ def run(cmd, cwd=None):
def confirm(question): def confirm(question):
reply = str(input(question + ' (y/n): ')).lower().strip() reply = str(raw_input(question + ' (y/n): ')).lower().strip()
if reply[0] == 'y': if reply[0] == 'y':
return True return True
@@ -75,13 +74,13 @@ class Release(namedtuple('Release', ('version', 'download_url', 'filename', 'des
def get_latest_release_from_ci(): def get_latest_release_from_ci():
response = requests.get('https://circleci.com/api/v1.1/project/github/getredash/redash/latest/artifacts?branch=master') response = requests.get('https://circleci.com/api/v1.1/project/github/getredash/redash/latest/artifacts')
if response.status_code != 200: if response.status_code != 200:
exit("Failed getting releases (status code: %s)." % response.status_code) exit("Failed getting releases (status code: %s)." % response.status_code)
tarball_asset = filter(lambda asset: asset['url'].endswith('.tar.gz'), response.json())[0] tarball_asset = filter(lambda asset: asset['url'].endswith('.tar.gz'), response.json())[0]
filename = urllib.unquote(tarball_asset['pretty_path'].split('/')[-1]) filename = tarball_asset['pretty_path'].replace('$CIRCLE_ARTIFACTS/', '')
version = filename.replace('redash.', '').replace('.tar.gz', '') version = filename.replace('redash.', '').replace('.tar.gz', '')
release = Release(version, tarball_asset['url'], filename, '') release = Release(version, tarball_asset['url'], filename, '')
@@ -204,9 +203,7 @@ def show_description_and_confirm(description):
def verify_newer_version(release): def verify_newer_version(release):
if not release.is_newer(current_version()): if not release.is_newer(current_version()):
red("The found release is not newer than your current deployed release ({}).".format(current_version())) red("The found release is not newer than your current deployed release ({}). Aborting upgrade.".format(current_version()))
if not confirm("Continue with upgrade?"):
red("Cancelling upgrade.")
exit(1) exit(1)

37
circle.yml Normal file
View File

@@ -0,0 +1,37 @@
machine:
services:
- docker
- redis
node:
version:
6.9.1
dependencies:
override:
- pip install --upgrade setuptools
- pip install -r requirements_dev.txt
- pip install -r requirements.txt
- make deps
cache_directories:
- node_modules/
test:
override:
- nosetests --with-xunit --xunit-file=$CIRCLE_TEST_REPORTS/junit.xml --with-coverage --cover-package=redash tests/
deployment:
github_and_docker:
branch: [master, /release.*/]
commands:
- make pack
# Skipping uploads for now, until master is stable.
# - make upload
#- echo "client/app" >> .dockerignore
#- docker pull redash/redash:latest
- docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS
- docker build -t redash/redash:$(./manage.py version | sed -e "s/\+/./") .
- docker push redash/redash:$(./manage.py version | sed -e "s/\+/./")
notify:
webhooks:
- url: https://webhooks.gitter.im/e/895d09c3165a0913ac2f
general:
branches:
ignore:
- gh-pages

View File

@@ -1,29 +1,4 @@
{ {
"presets": [ "presets": ["es2015", "stage-2"],
[ "plugins": ["transform-object-assign"]
"@babel/preset-env",
{
"exclude": ["@babel/plugin-transform-async-to-generator", "@babel/plugin-transform-arrow-functions"],
"corejs": "2",
"useBuiltIns": "usage"
}
],
"@babel/preset-react",
"@babel/preset-typescript"
],
"plugins": [
"@babel/plugin-proposal-class-properties",
"@babel/plugin-transform-object-assign",
[
"babel-plugin-transform-builtin-extend",
{
"globals": ["Error"]
}
]
],
"env": {
"test": {
"plugins": ["istanbul"]
}
}
} }

View File

@@ -1,4 +1,2 @@
build/*.js build/*.js
dist
config/*.js config/*.js
client/dist

View File

@@ -1,57 +1,15 @@
module.exports = { module.exports = {
root: true, root: true,
parser: "@typescript-eslint/parser", extends: 'airbnb-base',
extends: [
"react-app",
"plugin:compat/recommended",
"prettier",
// Remove any typescript-eslint rules that would conflict with prettier
"prettier/@typescript-eslint",
],
plugins: ["jest", "compat", "no-only-tests", "@typescript-eslint"],
settings: {
"import/resolver": "webpack",
},
env: { env: {
browser: true, "browser": true,
node: true, "node": true
}, },
rules: { rules: {
// allow debugger during development // allow debugger during development
"no-debugger": process.env.NODE_ENV === "production" ? 2 : 0, 'no-param-reassign': 0,
"jsx-a11y/anchor-is-valid": "off", 'no-mixed-operators': 0,
"no-restricted-imports": [ 'no-underscore-dangle': 0,
"error", 'no-debugger': process.env.NODE_ENV === 'production' ? 2 : 0
{ }
paths: [ }
{
name: "antd",
message: "Please use 'import XXX from antd/lib/XXX' import instead.",
},
{
name: "antd/lib",
message: "Please use 'import XXX from antd/lib/XXX' import instead.",
},
],
},
],
},
overrides: [
{
// Only run typescript-eslint on TS files
files: ["*.ts", "*.tsx", ".*.ts", ".*.tsx"],
extends: ["plugin:@typescript-eslint/recommended"],
rules: {
// Do not require functions (especially react components) to have explicit returns
"@typescript-eslint/explicit-function-return-type": "off",
// Do not require to type every import from a JS file to speed up development
"@typescript-eslint/no-explicit-any": "off",
// Do not complain about useless contructors in declaration files
"no-useless-constructor": "off",
"@typescript-eslint/no-useless-constructor": "error",
// Many API fields and generated types use camelcase
"@typescript-eslint/camelcase": "off",
},
},
],
};

View File

@@ -1,10 +0,0 @@
module.exports = {
extends: ["plugin:jest/recommended"],
plugins: ["jest"],
env: {
"jest/globals": true,
},
rules: {
"jest/no-focused-tests": "off",
},
};

View File

@@ -1,4 +0,0 @@
import { configure } from "enzyme";
import Adapter from "enzyme-adapter-react-16";
configure({ adapter: new Adapter() });

View File

@@ -1,5 +0,0 @@
import MockDate from "mockdate";
const date = new Date("2000-01-01T02:00:00.000");
MockDate.set(date);

View File

@@ -0,0 +1,704 @@
body {
padding-top: 50px;
}
body.headless {
padding-top: 0px;
padding-bottom: 0px;
}
body.headless nav.app-header {
display: none;
}
body.headless div#footer {
display: none;
}
a[ng-click] {
cursor: pointer;
}
a.link {
cursor: pointer;
}
a.page-title {
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
max-width: 400px;
}
a.navbar-brand {
padding: 5px 5px 0px 0px;
margin-left: 0px !important;
}
.navbar .fa {
font-size: 18px;
}
a.navbar-brand img {
height: 40px;
}
.avatar {
margin-top: 5px;
margin-bottom: 5px;
}
.avatar img {
width: 40px;
height: 40px;
}
#logout {
color: white;
position: relative;
left: -9px;
bottom: -11px;
}
.details-toggle {
cursor: pointer;
}
.details-toggle::before {
content: '▸';
margin-right: 5px;
}
.details-toggle.open::before {
content: '▾';
margin-right: 5px;
}
.edit-in-place span {
white-space: pre-line;
}
.edit-in-place span.editable {
cursor: pointer;
}
.edit-in-place span.editable:hover {
background: #FCFCA2;
}
.edit-in-place input,
.edit-in-place textarea {
display: none;
}
.edit-in-place.active span {
display: none;
}
.edit-in-place.active input,
.edit-in-place.active textarea {
display: inline-block;
}
.delete-button {
float: none !important;
}
.list-group-item.active button {
color: white;
}
.panel-heading > p:last-child {
margin-bottom: 0px;
}
.panel-heading > a,
.panel-heading .query-link {
color: inherit;
}
.panel-heading .query-link:hover {
text-decoration: underline;
}
.form-group.required .control-label:after {
content: "*";
color: red;
}
.form-group .help-block.error {
display: none;
}
.form-group.has-error .help-block.error {
display: block;
}
/* angular-growl */
.growl {
position: fixed;
bottom: 10px;
right: 10px;
float: right;
width: 250px;
z-index: 10000;
}
.growl-item.ng-enter,
.growl-item.ng-leave {
-webkit-transition: 0.5s linear all;
-moz-transition: 0.5s linear all;
-o-transition: 0.5s linear all;
transition: 0.5s linear all;
}
.growl-item.ng-enter,
.growl-item.ng-leave.ng-leave-active {
opacity: 0;
}
.growl-item.ng-leave,
.growl-item.ng-enter.ng-enter-active {
opacity: 1;
}
/* Gridster */
li.widget {
/*background-color:grey;*/
border-width: 1px;
border-style: solid;
border-color: grey;
opacity: 0.7;
cursor: move;
}
li.widget:hover {
opacity: 1.0 !important;
-webkit-transition: opacity .6s;
-moz-transition: opacity .6s;
-o-transition: opacity .6s;
-ms-transition: opacity .6s;
transition: opacity .6s;
}
.gridster .preview-holder {
border: none !important;
border-radius: 0 !important;
background: rgba(0, 0, 0, 0.5) !important;
}
.gridster li .heading {
border: #ddd;
background-color: #f5f5f5;
padding: 5px;
}
/* Editor */
.ace_editor {
border: 1px solid #eee;
height: 100%;
margin-bottom: 10px;
}
/* Support for Font-Awesome in btn-xs */
.btn-xs > .fa {
font-size: 14px;
top: 1px;
position: relative;
}
/* Because of ng-repeat we add span between the .dropdown-menu element and the li element, so we had
to add those CSS styles here. */
.dropdown-menu > span > li > a {
display: block;
padding: 3px 20px;
clear: both;
font-weight: normal;
line-height: 1.428571429;
color: #333333;
white-space: nowrap;
}
.dropdown-menu > span > li > a:hover,
.dropdown-menu > span > li > a:focus {
color: #ffffff;
text-decoration: none;
background-color: #428bca;
}
/* Dropdown submenus */
.dropdown-submenu {
position: relative;
}
.dropdown-submenu > .dropdown-menu {
top: 0;
left: 100%;
margin-top: -6px;
margin-left: -1px;
-webkit-border-radius: 0 6px 6px 6px;
-moz-border-radius: 0 6px 6px 6px;
border-radius: 0 6px 6px 6px;
}
.dropdown-submenu:hover > .dropdown-menu {
display: block;
}
.dropdown-submenu > a:after {
display: block;
content: " ";
float: right;
width: 0;
height: 0;
border-color: transparent;
border-style: solid;
border-width: 5px 0 5px 5px;
border-left-color: #cccccc;
margin-top: 5px;
margin-right: -10px;
}
.dropdown-submenu:hover > a:after {
/*border-left-color: #ffffff;*/
}
.dropdown-submenu.pull-left {
float: none;
}
.dropdown-submenu.pull-left > .dropdown-menu {
left: -100%;
margin-left: 10px;
-webkit-border-radius: 6px 0 6px 6px;
-moz-border-radius: 6px 0 6px 6px;
border-radius: 6px 0 6px 6px;
}
.rd-tab .remove {
cursor: pointer;
color: #A09797;
padding: 0 3px 1px 4px;
font-size: 11px;
}
.rd-tab .remove:hover {
color: white;
background-color: #FF8080;
border-radius: 50%;
}
.tab-nav > li.rd-tab-btn {
float: right;
padding-right: 10px;
padding-top: 10px;
}
/* light version of bootstrap's form-control */
.rd-form-control {
display: block;
padding: 6px 12px;
line-height: 1.428571429;
color: #555555;
vertical-align: middle;
background-color: #ffffff;
border: 1px solid #cccccc;
border-radius: 4px;
-webkit-box-shadow: none;
box-shadow: none;
-webkit-transition: border-color ease-in-out 0.15s, box-shadow ease-in-out 0.15s;
transition: border-color ease-in-out 0.15s, box-shadow ease-in-out 0.15s;
}
.rd-form-control {
width: 90%;
}
pivot-table-renderer > table, grid-renderer > div, visualization-renderer > div {
overflow: auto;
}
counter-renderer {
display: block;
text-align: center;
}
counter-renderer counter {
margin: 0 auto;
padding: 15px 50px;
display: block;;
}
counter-renderer value,
counter-renderer counter-target {
font-size: 80px;
display: block;
}
counter-renderer counter-target {
color: #ccc;
}
counter-renderer counter.positive value {
color: #5cb85c;
}
counter-renderer counter.negative value {
color: #d9534f;
margin-right: 15px;
}
counter-renderer counter-name {
font-size: 40px;
display: block;
}
.box {
font: 10px sans-serif;
}
.box line,
.box rect,
.box circle {
fill: #fff;
stroke: #000;
stroke-width: 1.5px;
}
.box .center {
stroke-dasharray: 3, 3;
}
.box .outlier {
fill: none;
stroke: #000;
}
.axis text {
font: 10px sans-serif;
}
.axis path,
.axis line {
fill: none;
stroke: #000;
shape-rendering: crispEdges;
}
.grid-background {
fill: #ddd;
}
.grid path,
.grid line {
fill: none;
stroke: #fff;
shape-rendering: crispEdges;
}
.grid .minor line {
stroke-opacity: .5;
}
.grid text {
display: none;
}
.iframe-container {
height: 100%;
}
.schema-container {
height: 100%;
z-index: 10;
background-color: white;
}
.schema-control {
display: flex;
padding: 5px 0;
}
.schema-control .form-control {
height: 30px;
margin-right: 5px;
}
.schema-browser {
height: calc(100% - 45px);
overflow-y: auto;
overflow-x: hidden;
border: 1px solid rgba(0,0,0,.15);
}
.parameter-label {
display: block;
}
div.table-name {
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
cursor: pointer;
padding: 0 10px;
}
div.table-name:hover {
background: #f4f4f4;
}
.blankslate {
text-align: center;
padding: 30px;
}
/* Footer */
.footer {
color: #818d9f;
padding-bottom: 30px;
}
.footer a {
color: #818d9f;
margin-left: 20px;
}
.col-table .missing-value {
color: #b94a48;
}
.col-table .super-small-input {
padding-left: 3px;
height: 24px;
}
.col-table .ui-select-toggle, .col-table .ui-select-search {
padding: 2px;
padding-left: 5px;
height: 24px;
}
.clearable button {
border-top-right-radius: 0;
border-bottom-right-radius: 0;
}
/* Immediately apply ng-cloak, instead of waiting for angular.js to load: */
[ng\:cloak], [ng-cloak], [data-ng-cloak], [x-ng-cloak], .ng-cloak, .x-ng-cloak {
display: none !important;
}
.voffset {
margin-top: 2px;
}
.voffset1 {
margin-top: 5px;
}
.voffset2 {
margin-top: 10px;
}
.voffset3 {
margin-top: 15px;
}
.voffset4 {
margin-top: 30px;
}
.voffset5 {
margin-top: 40px;
}
.voffset6 {
margin-top: 60px;
}
.voffset7 {
margin-top: 80px;
}
.voffset8 {
margin-top: 100px;
}
.voffset9 {
margin-top: 150px;
}
.overlay {
background-color: #808080;
width: 100%;
height: 100%;
position: absolute;
top: 0;
left: 0;
padding: 0;
z-index: 1000;
opacity: 0.8;
}
.container-fluid {
padding-left: 5px;
padding-right: 5px;
}
.modal-xl {
position: fixed;
top: 0;
right: 0;
bottom: 0;
left: 0;
overflow: hidden;
}
.modal-xl .modal-dialog {
position: fixed;
margin: 0;
width: 100%;
height: 100%;
padding: 0;
}
.modal-xl .modal-content {
position: absolute;
top: 0;
right: 0;
bottom: 0;
left: 0;
border: 2px solid #3c7dcf;
border-radius: 0;
box-shadow: none;
}
.modal-xl .modal-header {
position: absolute;
top: 0;
right: 0;
left: 0;
height: 50px;
padding: 10px;
border: 0;
}
.modal-xl .modal-body {
position: absolute;
top: 50px;
bottom: 60px;
width: 100%;
overflow: auto;
}
.modal-xl .modal-footer {
position: absolute;
right: 0;
bottom: 0;
left: 0;
height: 60px;
padding: 10px;
}
/* Bootstrap Overrides */
.flex-parent {
display: flex;
flex-direction: column;
justify-content: center;
}
.collapsing,
.collapse.in {
padding: 5px 10px;
transition: all 0.35s ease;
}
.schema-browser .collapse.in {
background: #f4f4f4;
}
.navbar .collapse.in {
background: #222;
}
/* Fixes for SuperFlat */
.table-hover > tbody > tr:hover {
background-color: #f4f4f4;
}
.dropdown-menu {
z-index: 1000000000;
}
.t-body a.actions {
font-size: 24px;
line-height: 100%;
padding: 4px 10px 3px;
display: block;
}
.t-body a.actions:hover,
.t-body a.actions.open > a {
background-color: rgba(0, 0, 0, 0.1);
}
/* ui-select adjustments for SuperFlat */
/* Same definition as .form-control */
.ui-select-toggle.btn-default {
height: 35px;
padding: 6px 12px;
font-size: 13px;
line-height: 1.42857143;
color: #9E9E9E;
background: #fff none;
border: 1px solid #e8e8e8;
border-radius: 5px;
-webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
-webkit-transition: border-color ease-in-out .15s, box-shadow ease-in-out .15s;
-o-transition: border-color ease-in-out .15s, box-shadow ease-in-out .15s;
transition: border-color ease-in-out .15s, box-shadow ease-in-out .15s;
}
.t-header.widget {
padding: 5px;
}
/* Sankey Visualization */
.sankey .node rect {
fill-opacity: .9;
shape-rendering: crispEdges;
stroke-width: 0;
}
.sankey .node text {
text-shadow: 0 1px 0 #fff;
}
.sankey .link {
fill: none;
stroke: #000;
stroke-opacity: .2;
}
/*Dashboard list view */
.m-2{
margin:2px;
}
.dropdown-menu > .disabled{
cursor: not-allowed;
}
/* The real magic ;) */
.dropdown-menu > .disabled > a{
pointer-events: none;
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" width="630" height="630" viewBox="0 0 630 630" version="1.1">
<g transform="translate(-56.611934,-184.36221)">
<path fill="#a7a7a7" d="m 58.467329,810.92426 c 0,-10.37237 6.53079,-28.55017 15.29935,-42.58418 9.70782,-15.53729 28.965401,-35.11964 51.655121,-52.52627 7.21357,-5.53395 6.57284,-5.08564 22.23877,-15.56023 20.2393,-13.53245 55.34935,-32.58361 79.80676,-43.30416 19.72995,-8.64834 57.2268,-21.58721 62.55974,-21.58721 0.76988,0 3.09659,-0.67892 5.17046,-1.50872 7.3197,-2.92876 12.5713,-16.1286 13.89202,-34.91737 l 0.64689,-9.20257 -8.38455,-10.04931 c -7.66622,-9.18836 -11.59308,-14.62897 -20.80286,-28.82203 -9.85543,-15.1881 -22.91997,-47.26171 -24.69185,-60.61889 -0.56037,-4.22429 -1.0976,-5.21546 -3.27999,-6.05157 -10.15146,-3.88918 -15.7489,-9.08881 -20.96084,-19.47118 -6.6162,-13.17971 -8.62087,-36.5618 -4.2711,-49.81738 2.29242,-6.98599 4.4873,-10.89589 8.72413,-15.54098 2.11744,-2.32146 2.22102,-2.9999 1.45041,-9.5 -2.58899,-21.83821 -3.34954,-41.36055 -2.18394,-56.05862 3.87891,-48.91259 20.17112,-81.47548 50.89033,-101.71339 16.68129,-10.98968 34.4196,-16.74492 62.2113,-20.1846 32.20647,-3.98609 68.82401,0.75436 93.8318,12.14731 14.67849,6.68717 28.98155,17.91433 38.99893,30.61215 19.81832,25.12131 29.57328,66.42856 26.24603,111.13853 -0.69821,9.38224 -1.63714,20.17477 -2.08651,23.9834 -0.81425,6.90129 -0.80559,6.93815 2.55469,10.86388 7.03777,8.22205 10.02312,18.44949 9.84447,33.72599 -0.27308,23.35114 -10.37432,43.49379 -24.44339,48.74202 l -5.34465,1.99373 -1.18738,6.3748 c -4.9831,26.75313 -22.71761,61.14702 -45.76986,88.76506 l -7.88572,9.44759 0.64805,9.21931 c 1.18682,16.88381 6.49256,31.6953 12.30203,34.34227 1.23595,0.56314 6.42637,1.99946 11.53427,3.19182 35.45428,8.27628 97.76078,37.16683 137.59386,63.80012 15.66594,10.47459 15.02521,10.02628 22.23877,15.56023 22.46534,17.23449 41.43241,36.56563 52.11597,53.1163 7.31528,11.33263 13.49882,27.98884 14.54335,39.17447 l 0.58435,6.25763 -313.14461,0 -313.144601,0 0,-3.43795 z"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

Some files were not shown because too many files have changed in this diff Show More