mirror of
https://github.com/getredash/redash.git
synced 2025-12-25 01:03:20 -05:00
Compare commits
283 Commits
v0.8.1-rc
...
v0.8.3.b11
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
32c0d3eb3d | ||
|
|
1bee22a578 | ||
|
|
6bb57508e1 | ||
|
|
2d34bf1c54 | ||
|
|
7e3856b4f5 | ||
|
|
189e105c68 | ||
|
|
378459d64f | ||
|
|
ab72531889 | ||
|
|
51deb8f75d | ||
|
|
68f6e9b5e5 | ||
|
|
fbfa76f4d6 | ||
|
|
28e8e049eb | ||
|
|
f1f9597998 | ||
|
|
0b389d51aa | ||
|
|
46f3e82571 | ||
|
|
5b64918379 | ||
|
|
7549f32d9a | ||
|
|
6f51776cbb | ||
|
|
ad0afd8f3e | ||
|
|
8863282e58 | ||
|
|
9c1fda488c | ||
|
|
995659ee0d | ||
|
|
ad2642e9e5 | ||
|
|
740b305910 | ||
|
|
ca8cca0a8c | ||
|
|
7c4410ac63 | ||
|
|
91a209ae82 | ||
|
|
60cdb85cc4 | ||
|
|
becb4decf1 | ||
|
|
5f33e7ea18 | ||
|
|
7675de4ec7 | ||
|
|
fe2aa71349 | ||
|
|
b7720f7001 | ||
|
|
3b24f56eba | ||
|
|
52b8e98b1a | ||
|
|
5fe9c2fcf0 | ||
|
|
816142aa54 | ||
|
|
f737be272f | ||
|
|
0343fa7980 | ||
|
|
0f9f9a24a0 | ||
|
|
5b9b18639b | ||
|
|
ce46295dd3 | ||
|
|
3781b0758e | ||
|
|
8d20180d40 | ||
|
|
a7b41327c6 | ||
|
|
4d415c0246 | ||
|
|
5331008e78 | ||
|
|
80783feda6 | ||
|
|
2f308c3fa6 | ||
|
|
a63055f7f0 | ||
|
|
ce884ba6d3 | ||
|
|
63765281fe | ||
|
|
47e79003e5 | ||
|
|
541060c62e | ||
|
|
3ba19fa80f | ||
|
|
f3ec0448f5 | ||
|
|
654349a7ae | ||
|
|
2b32de184e | ||
|
|
1fb57edd1f | ||
|
|
f6c65d139a | ||
|
|
4e59472238 | ||
|
|
feabc46da4 | ||
|
|
51a10e5a20 | ||
|
|
5bf370d0f0 | ||
|
|
5beec581d8 | ||
|
|
70080df534 | ||
|
|
0d4c3c329e | ||
|
|
76dfbad971 | ||
|
|
45a85c110f | ||
|
|
f77c0aeb1d | ||
|
|
b23e328f69 | ||
|
|
165d782b98 | ||
|
|
1bdc1bef73 | ||
|
|
e3b41b15d7 | ||
|
|
7a95dec33b | ||
|
|
a3d059041c | ||
|
|
3a6c1599f3 | ||
|
|
f92aa7b15f | ||
|
|
d823506e5b | ||
|
|
fc93de7aa2 | ||
|
|
a0cc25d174 | ||
|
|
df24bc3aae | ||
|
|
60c2cb0a75 | ||
|
|
ad19f2d304 | ||
|
|
3aa59a8152 | ||
|
|
32638aebed | ||
|
|
346ea66c9d | ||
|
|
d14b74b683 | ||
|
|
5d879ce358 | ||
|
|
b4da4359a8 | ||
|
|
7e08518a31 | ||
|
|
bea0e9aad0 | ||
|
|
a87179b68b | ||
|
|
91806eda44 | ||
|
|
d1fe3d63fd | ||
|
|
8408409ce2 | ||
|
|
6bbdd5eb44 | ||
|
|
34ba54397d | ||
|
|
ec79ce74d0 | ||
|
|
f324f1bf6f | ||
|
|
47cfb7d620 | ||
|
|
dab1a21b40 | ||
|
|
aa04a6e4a5 | ||
|
|
e0a43a32ab | ||
|
|
68001ae0f1 | ||
|
|
9d9501b158 | ||
|
|
67aecc0201 | ||
|
|
0bc9fc1ed5 | ||
|
|
b548cb1d8f | ||
|
|
eb5c4dd5f3 | ||
|
|
a07a9b9390 | ||
|
|
56ade4735c | ||
|
|
b8a9f1048a | ||
|
|
5b3bcff4f5 | ||
|
|
b41b21c69e | ||
|
|
172d57e82c | ||
|
|
f507da9df7 | ||
|
|
2e27e43357 | ||
|
|
8a0c287d05 | ||
|
|
664a1806bc | ||
|
|
9a0ccd1bb5 | ||
|
|
076fca0c5a | ||
|
|
59f099418a | ||
|
|
b9a0760d7e | ||
|
|
a0c26c64f0 | ||
|
|
5f47689553 | ||
|
|
a5bc90c816 | ||
|
|
39b8f40ad4 | ||
|
|
070caa6976 | ||
|
|
56b51f68bc | ||
|
|
799ce3e718 | ||
|
|
9b47f0d08a | ||
|
|
4f4dc135f5 | ||
|
|
4eb490a839 | ||
|
|
410c5671f0 | ||
|
|
fad8bd47e8 | ||
|
|
89f5074054 | ||
|
|
5826fbd05f | ||
|
|
ddab1c9493 | ||
|
|
f9d5fe235b | ||
|
|
afe64fe981 | ||
|
|
99efe497ee | ||
|
|
9e183f1500 | ||
|
|
4b17b9869e | ||
|
|
872d58688f | ||
|
|
37272dc2d9 | ||
|
|
1a3df37940 | ||
|
|
ddbf264020 | ||
|
|
e93b71af85 | ||
|
|
13184519c3 | ||
|
|
0f8da884f9 | ||
|
|
21de1d90e3 | ||
|
|
ed9eb691c1 | ||
|
|
d6c229759f | ||
|
|
f0b8dfb449 | ||
|
|
6f335d34b9 | ||
|
|
bed63083a7 | ||
|
|
9886f5b13b | ||
|
|
f0ee7a67d2 | ||
|
|
9c43e1540e | ||
|
|
b0cb2d3f1c | ||
|
|
b525ad0622 | ||
|
|
602b9128a7 | ||
|
|
45d3b18c0c | ||
|
|
b1918743f2 | ||
|
|
716f36ef9c | ||
|
|
62aa21cdc8 | ||
|
|
4e30fc1054 | ||
|
|
5a1d38c572 | ||
|
|
360b0da159 | ||
|
|
cc91981845 | ||
|
|
e19962d4e3 | ||
|
|
99b6f8955e | ||
|
|
cf6ce0599b | ||
|
|
a699c04ee1 | ||
|
|
a8d7547dc7 | ||
|
|
72804e6d80 | ||
|
|
e51db087c5 | ||
|
|
0e9607205b | ||
|
|
9f799f4bfe | ||
|
|
17e0bd4cd2 | ||
|
|
102038b129 | ||
|
|
c01d88cbea | ||
|
|
9d6d88ebff | ||
|
|
3f429ebcb7 | ||
|
|
c854ce3c10 | ||
|
|
ab6cc3f146 | ||
|
|
97d0035f4a | ||
|
|
8108bc7cb1 | ||
|
|
690cb2fccd | ||
|
|
515c45776e | ||
|
|
fc44dba2ef | ||
|
|
5329fe547c | ||
|
|
d6bb6d33a3 | ||
|
|
9832b7f72a | ||
|
|
2a6ed3ca52 | ||
|
|
2e78ef0128 | ||
|
|
d2d52d44f7 | ||
|
|
987f4bd356 | ||
|
|
0c8c196d65 | ||
|
|
9d703b44de | ||
|
|
fb00350c58 | ||
|
|
6cccd30553 | ||
|
|
0bbcb69197 | ||
|
|
b0eaffdf6c | ||
|
|
407a649d17 | ||
|
|
73bd83a527 | ||
|
|
72e48a191b | ||
|
|
11682b3779 | ||
|
|
a15d7964fa | ||
|
|
2feb8b81f5 | ||
|
|
6286024350 | ||
|
|
0b5dce0ebf | ||
|
|
32311c55e6 | ||
|
|
2ac795d6f7 | ||
|
|
d50af7dec9 | ||
|
|
20159a1c2a | ||
|
|
06400ed840 | ||
|
|
0ddc6cf135 | ||
|
|
46a008346f | ||
|
|
21c413f699 | ||
|
|
e7222944a5 | ||
|
|
f49839eadf | ||
|
|
aa1b72908b | ||
|
|
5dd457e5f1 | ||
|
|
a471134e07 | ||
|
|
8a8f91ee8f | ||
|
|
59aa218b24 | ||
|
|
5fd8dbe523 | ||
|
|
a08f3c7cd0 | ||
|
|
824d053ddd | ||
|
|
b6e61deb24 | ||
|
|
4f40b28120 | ||
|
|
5d1c75df1c | ||
|
|
28ccaedfff | ||
|
|
1ee05e12fd | ||
|
|
6f91849419 | ||
|
|
65cc67d1dd | ||
|
|
a8f6d9e45b | ||
|
|
2c39a2faae | ||
|
|
1052528a5f | ||
|
|
92cd2f1367 | ||
|
|
990717a43d | ||
|
|
a2608d6a44 | ||
|
|
dedae03c8c | ||
|
|
61f2be02b7 | ||
|
|
9eca43801a | ||
|
|
bcaefda600 | ||
|
|
42b0430866 | ||
|
|
445dbb5ade | ||
|
|
40ee0d8a6e | ||
|
|
a5b738a035 | ||
|
|
e893ab4519 | ||
|
|
8b569379bc | ||
|
|
bff3e7c3b2 | ||
|
|
3fbd0d9579 | ||
|
|
00f4ec16f8 | ||
|
|
6f24b31858 | ||
|
|
7a8844180b | ||
|
|
aefaf204a3 | ||
|
|
1527ea36b1 | ||
|
|
a71b83d98a | ||
|
|
7add6287dc | ||
|
|
d37b5ed075 | ||
|
|
23b8b77feb | ||
|
|
46f1478e0d | ||
|
|
47e6960b83 | ||
|
|
0990d93b03 | ||
|
|
bf88d8b578 | ||
|
|
384e756817 | ||
|
|
d2c46c99eb | ||
|
|
9c2858191f | ||
|
|
0473de7392 | ||
|
|
faece4f2c4 | ||
|
|
d100c915f4 | ||
|
|
ef3636145c | ||
|
|
6bd7dc9237 | ||
|
|
6210d6ab80 | ||
|
|
176fd16e95 | ||
|
|
75d3a63070 | ||
|
|
8c4a5a644e | ||
|
|
5b024a3518 | ||
|
|
d474267934 |
4
.dockerignore
Normal file
4
.dockerignore
Normal file
@@ -0,0 +1,4 @@
|
||||
rd_ui/.tmp/
|
||||
rd_ui/node_modules/
|
||||
.git/
|
||||
.vagrant/
|
||||
12
.env.example
12
.env.example
@@ -1,6 +1,6 @@
|
||||
export REDASH_STATIC_ASSETS_PATH="../rd_ui/app/"
|
||||
export REDASH_LOG_LEVEL="INFO"
|
||||
export REDASH_REDIS_URL=redis://localhost:6379/1
|
||||
export REDASH_DATABASE_URL="postgresql://redash"
|
||||
export REDASH_COOKIE_SECRET=veryverysecret
|
||||
export REDASH_GOOGLE_APPS_DOMAIN=
|
||||
REDASH_STATIC_ASSETS_PATH="../rd_ui/app/"
|
||||
REDASH_LOG_LEVEL="INFO"
|
||||
REDASH_REDIS_URL=redis://localhost:6379/1
|
||||
REDASH_DATABASE_URL="postgresql://redash"
|
||||
REDASH_COOKIE_SECRET=veryverysecret
|
||||
REDASH_GOOGLE_APPS_DOMAIN=
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -19,3 +19,6 @@ redash/dump.rdb
|
||||
venv
|
||||
|
||||
dump.rdb
|
||||
|
||||
# Docker related
|
||||
docker-compose.yml
|
||||
|
||||
41
Dockerfile
Normal file
41
Dockerfile
Normal file
@@ -0,0 +1,41 @@
|
||||
FROM ubuntu:trusty
|
||||
MAINTAINER Di Wu <diwu@yelp.com>
|
||||
|
||||
# Ubuntu packages
|
||||
RUN apt-get update && \
|
||||
apt-get install -y python-pip python-dev curl build-essential pwgen libffi-dev sudo git-core wget \
|
||||
# Postgres client
|
||||
libpq-dev \
|
||||
# Additional packages required for data sources:
|
||||
libssl-dev libmysqlclient-dev
|
||||
|
||||
# Users creation
|
||||
RUN useradd --system --comment " " --create-home redash
|
||||
|
||||
# Pip requirements for all data source types
|
||||
RUN pip install -U setuptools && \
|
||||
pip install supervisor==3.1.2
|
||||
|
||||
COPY . /opt/redash/current
|
||||
|
||||
# Setting working directory
|
||||
WORKDIR /opt/redash/current
|
||||
|
||||
# Install project specific dependencies
|
||||
RUN pip install -r requirements_all_ds.txt && \
|
||||
pip install -r requirements.txt
|
||||
|
||||
# Setup supervisord
|
||||
RUN mkdir -p /opt/redash/supervisord && \
|
||||
mkdir -p /opt/redash/logs && \
|
||||
cp /opt/redash/current/setup/docker/supervisord/supervisord.conf /opt/redash/supervisord/supervisord.conf
|
||||
|
||||
# Fix permissions
|
||||
RUN chown -R redash /opt/redash
|
||||
|
||||
# Expose ports
|
||||
EXPOSE 5000
|
||||
EXPOSE 9001
|
||||
|
||||
# Startup script
|
||||
CMD ["supervisord", "-c", "/opt/redash/supervisord/supervisord.conf"]
|
||||
28
README.md
28
README.md
@@ -1,8 +1,16 @@
|
||||
Some of you read the news about EverythingMe closing down. While more detailed announcement will come later (once more details are clear), **I just wanted to reassure you that you shouldn't worry -- this won't affect the future of re:dash.** I will keep maintaining re:dash, and might even be able to work more on it.
|
||||
|
||||
If you still have concerns, you're welcome to reach out to me directly -- arik@arikfr.com.
|
||||
|
||||
Arik.
|
||||
|
||||
---
|
||||
|
||||
<p align="center">
|
||||
<img title="re:dash" src='http://redash.io/static/img/redash_logo.png' width="200px"/>
|
||||
<img title="re:dash" src='http://redash.io/static/old_img/redash_logo.png' width="200px"/>
|
||||
</p>
|
||||
<p align="center">
|
||||
<img title="Build Status" src='https://circleci.com/gh/EverythingMe/redash.png?circle-token=8a695aa5ec2cbfa89b48c275aea298318016f040'/>
|
||||
<img title="Build Status" src='https://circleci.com/gh/getredash/redash.png?circle-token=8a695aa5ec2cbfa89b48c275aea298318016f040'/>
|
||||
</p>
|
||||
|
||||
**_re:dash_** is our take on freeing the data within our company in a way that will better fit our culture and usage patterns.
|
||||
@@ -22,31 +30,27 @@ Presto, Google Spreadsheets, Cloudera Impala, Hive and custom scripts.
|
||||
|
||||
## Demo
|
||||
|
||||

|
||||

|
||||
|
||||
You can try out the demo instance: http://demo.redash.io/ (login with any Google account).
|
||||
|
||||
## Getting Started
|
||||
|
||||
* [Setting up re:dash instance](http://redash.io/deployment/setup.html) (includes links to ready made AWS/GCE images).
|
||||
* Additional documentation in the [Wiki](https://github.com/everythingme/redash/wiki).
|
||||
* [Documentation](http://docs.redash.io).
|
||||
|
||||
|
||||
## Getting help
|
||||
|
||||
* [Google Group (mailing list)](https://groups.google.com/forum/#!forum/redash-users): the best place to get updates about new releases or ask general questions.
|
||||
* Find us [on gitter](https://gitter.im/EverythingMe/redash#) (chat).
|
||||
* Contact Arik, the maintainer directly: arik@everything.me.
|
||||
|
||||
## Roadmap
|
||||
|
||||
TBD.
|
||||
* Find us [on gitter](https://gitter.im/getredash/redash#) (chat).
|
||||
* Contact Arik, the maintainer directly: arik@redash.io.
|
||||
|
||||
## Reporting Bugs and Contributing Code
|
||||
|
||||
* Want to report a bug or request a feature? Please open [an issue](https://github.com/everythingme/redash/issues/new).
|
||||
* Want to report a bug or request a feature? Please open [an issue](https://github.com/getredash/redash/issues/new).
|
||||
* Want to help us build **_re:dash_**? Fork the project and make a pull request. We need all the help we can get!
|
||||
|
||||
## License
|
||||
|
||||
See [LICENSE](https://github.com/EverythingMe/redash/blob/master/LICENSE) file.
|
||||
See [LICENSE](https://github.com/getredash/redash/blob/master/LICENSE) file.
|
||||
|
||||
@@ -7,7 +7,7 @@ import requests
|
||||
|
||||
github_token = os.environ['GITHUB_TOKEN']
|
||||
auth = (github_token, 'x-oauth-basic')
|
||||
repo = 'EverythingMe/redash'
|
||||
repo = 'getredash/redash'
|
||||
|
||||
def _github_request(method, path, params=None, headers={}):
|
||||
if not path.startswith('https://api.github.com'):
|
||||
|
||||
13
circle.yml
13
circle.yml
@@ -1,15 +1,14 @@
|
||||
machine:
|
||||
services:
|
||||
- docker
|
||||
node:
|
||||
version:
|
||||
0.10.24
|
||||
0.12.4
|
||||
python:
|
||||
version:
|
||||
2.7.3
|
||||
dependencies:
|
||||
pre:
|
||||
- wget http://downloads.sourceforge.net/project/optipng/OptiPNG/optipng-0.7.5/optipng-0.7.5.tar.gz
|
||||
- tar xvf optipng-0.7.5.tar.gz
|
||||
- cd optipng-0.7.5; ./configure; make; sudo checkinstall -y;
|
||||
- make deps
|
||||
- pip install -r requirements_dev.txt
|
||||
- pip install -r requirements.txt
|
||||
@@ -22,10 +21,14 @@ test:
|
||||
post:
|
||||
- make pack
|
||||
deployment:
|
||||
github:
|
||||
github_and_docker:
|
||||
branch: master
|
||||
commands:
|
||||
- make upload
|
||||
- echo "rd_ui/app" >> .dockerignore
|
||||
- docker build -t redash/redash:$(./manage.py version | sed -e "s/\+/./") .
|
||||
- docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS
|
||||
- docker push redash/redash:$(./manage.py version | sed -e "s/\+/./")
|
||||
notify:
|
||||
webhooks:
|
||||
- url: https://webhooks.gitter.im/e/895d09c3165a0913ac2f
|
||||
|
||||
28
docker-compose-example.yml
Normal file
28
docker-compose-example.yml
Normal file
@@ -0,0 +1,28 @@
|
||||
redash:
|
||||
image: redash
|
||||
ports:
|
||||
- "5000:5000"
|
||||
links:
|
||||
- redis
|
||||
- postgres
|
||||
environment:
|
||||
REDASH_STATIC_ASSETS_PATH:"../rd_ui/app/"
|
||||
REDASH_LOG_LEVEL:"INFO"
|
||||
REDASH_REDIS_URL:redis://localhost:6379/0
|
||||
REDASH_DATABASE_URL:"postgresql://redash"
|
||||
REDASH_COOKIE_SECRET:veryverysecret
|
||||
REDASH_GOOGLE_APPS_DOMAIN:
|
||||
redis:
|
||||
image: redis:2.8
|
||||
postgres:
|
||||
image: postgres:9.3
|
||||
volumes:
|
||||
- /opt/postgres-data:/var/lib/postgresql/data
|
||||
redash-nginx:
|
||||
image: redash-nginx:1.0
|
||||
ports:
|
||||
- "80:80"
|
||||
volumes:
|
||||
- "../redash-nginx/nginx.conf:/etc/nginx/nginx.conf"
|
||||
links:
|
||||
- redash
|
||||
@@ -10,8 +10,8 @@ If one of the listed data source types isn't available when trying to create a n
|
||||
1. You installed required dependencies.
|
||||
2. If you've set custom value for the ``REDASH_ENABLED_QUERY_RUNNERS`` setting, it's included in the list.
|
||||
|
||||
PostgreSQL / Redshift
|
||||
---------------------
|
||||
PostgreSQL / Redshift / Greenplum
|
||||
---------------------------------
|
||||
|
||||
- **Options**:
|
||||
|
||||
@@ -180,6 +180,13 @@ VPN and with users you trust).
|
||||
You MUST make sure these modules are installed on the machine
|
||||
running the Celery workers.
|
||||
|
||||
Notes:
|
||||
|
||||
- For security, the python query runner is disabled by default.
|
||||
To enable, add ``redash.query_runner.python`` to the ``REDASH_ADDITIONAL_QUERY_RUNNERS`` environmental variable. If you used
|
||||
the bootstrap script, or one of the provided images, add to ``/opt/redash/.env`` file the line: ``export REDASH_ADDITIONAL_QUERY_RUNNERS=redash.query_runner.python``.
|
||||
|
||||
|
||||
Vertica
|
||||
-----
|
||||
|
||||
@@ -194,3 +201,18 @@ Vertica
|
||||
- **Additional requirements**:
|
||||
|
||||
- ``vertica-python`` python package
|
||||
|
||||
Oracle
|
||||
------
|
||||
|
||||
- **Options**
|
||||
|
||||
- DSN Service name
|
||||
- User
|
||||
- Password
|
||||
- Host
|
||||
- Port
|
||||
|
||||
- **Additional requirements**
|
||||
|
||||
- ``cx_Oracle`` python package. This requires the installation of the Oracle `instant client <http://www.oracle.com/technetwork/database/features/instant-client/index-097480.html>`__.
|
||||
|
||||
@@ -34,7 +34,7 @@ When query execution is done, the result gets stored to
|
||||
``query_results`` table. Also we check for all queries in the
|
||||
``queries`` table that have the same query hash and update their
|
||||
reference to the query result we just saved
|
||||
(`code <https://github.com/EverythingMe/redash/blob/master/redash/models.py#L235>`__).
|
||||
(`code <https://github.com/getredash/redash/blob/master/redash/models.py#L235>`__).
|
||||
|
||||
Client
|
||||
------
|
||||
@@ -69,7 +69,7 @@ Ideas on how to implement query parameters
|
||||
Client side only implementation
|
||||
-------------------------------
|
||||
|
||||
(This was actually implemented in. See pull request `#363 <https://github.com/EverythingMe/redash/pull/363>`__ for details.)
|
||||
(This was actually implemented in. See pull request `#363 <https://github.com/getredash/redash/pull/363>`__ for details.)
|
||||
|
||||
The basic idea of how to implement parametized queries is to treat the
|
||||
query as a template and merge it with parameters taken from query string
|
||||
|
||||
@@ -9,22 +9,22 @@ All data sources in re:dash return the following results in JSON format:
|
||||
"columns" : [
|
||||
{
|
||||
// Required: a unique identifier of the column name in this result
|
||||
"name" : "COLUMN_NAME",
|
||||
"name" : "COLUMN_NAME",
|
||||
// Required: friendly name of the column that will appear in the results
|
||||
"friendly_name" : "FRIENDLY_NAME",
|
||||
// Optional: If not specified sort might not work well.
|
||||
"friendly_name" : "FRIENDLY_NAME",
|
||||
// Optional: If not specified sort might not work well.
|
||||
// Supported types: integer, float, boolean, string (default), datetime (ISO-8601 text format)
|
||||
"type" : "VALUE_TYPE"
|
||||
"type" : "VALUE_TYPE"
|
||||
},
|
||||
...
|
||||
],
|
||||
"rows" : [
|
||||
{
|
||||
// name is the column name as it appears in the columns above.
|
||||
// name is the column name as it appears in the columns above.
|
||||
// VALUE is a valid JSON value. For dates its an ISO-8601 string.
|
||||
"name" : VALUE,
|
||||
"name2" : VALUE2
|
||||
},
|
||||
...
|
||||
]
|
||||
]
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ To get started with this box:
|
||||
1. Make sure you have recent version of
|
||||
`Vagrant <https://www.vagrantup.com/>`__ installed.
|
||||
2. Clone the re:dash repository:
|
||||
``git clone https://github.com/EverythingMe/redash.git``.
|
||||
``git clone https://github.com/getredash/redash.git``.
|
||||
3. Change dir into the repository (``cd redash``) and run run
|
||||
``vagrant up``. This might take some time the first time you run it,
|
||||
as it downloads the Vagrant virtual box.
|
||||
@@ -30,20 +30,7 @@ To get started with this box:
|
||||
|
||||
::
|
||||
|
||||
PYTHONPATH=. bin/run python migrations/0001_allow_delete_query.py
|
||||
PYTHONPATH=. bin/run python migrations/0002_fix_timestamp_fields.py
|
||||
PYTHONPATH=. bin/run python migrations/0003_update_data_source_config.py
|
||||
PYTHONPATH=. bin/run python migrations/0004_allow_null_in_event_user.py
|
||||
PYTHONPATH=. bin/run python migrations/0005_add_updated_at.py
|
||||
PYTHONPATH=. bin/run python migrations/0006_queries_last_edit_by.py
|
||||
PYTHONPATH=. bin/run python migrations/0007_add_schedule_to_queries.py
|
||||
PYTHONPATH=. bin/run python migrations/0008_make_ds_name_unique.py
|
||||
PYTHONPATH=. bin/run python migrations/0009_add_api_key_to_user.py
|
||||
PYTHONPATH=. bin/run python migrations/0010_create_alerts.py
|
||||
PYTHONPATH=. bin/run python migrations/0010_allow_deleting_datasources.py
|
||||
PYTHONPATH=. bin/run python migrations/0011_migrate_bigquery_to_json.py
|
||||
PYTHONPATH=. bin/run python migrations/0012_add_list_users_permission.py
|
||||
PYTHONPATH=. bin/run python migrations/0013_update_counter_options.py
|
||||
export PYTHONPATH=. && find migrations/ -type f | grep 00 --null | xargs -I file bin/run python file
|
||||
|
||||
9. Start the server and background workers with
|
||||
``bin/run honcho start -f Procfile.dev``.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.. image:: http://redash.io/static/img/redash_logo.png
|
||||
.. image:: http://redash.io/static/old_img/redash_logo.png
|
||||
:width: 200px
|
||||
|
||||
Open Source Data Collaboration and Visualization Platform
|
||||
@@ -21,7 +21,7 @@ Features
|
||||
Demo
|
||||
####
|
||||
|
||||
.. figure:: https://raw.github.com/EverythingMe/redash/screenshots/screenshots.gif
|
||||
.. figure:: https://raw.github.com/getredash/redash/screenshots/screenshots.gif
|
||||
:alt: Screenshots
|
||||
|
||||
You can try out the demo instance: `http://demo.redash.io`_ (login with any Google account).
|
||||
@@ -37,11 +37,11 @@ Getting Started
|
||||
Getting Help
|
||||
############
|
||||
|
||||
* Source: https://github.com/everythingme/redash
|
||||
* Issues: https://github.com/everythingme/redash/issues
|
||||
* Source: https://github.com/getredash/redash
|
||||
* Issues: https://github.com/getredash/redash/issues
|
||||
* Mailing List: https://groups.google.com/forum/#!forum/redash-users
|
||||
* Gitter (chat): https://gitter.im/EverythingMe/redash
|
||||
* Contact Arik, the maintainer directly: arik@everything.me.
|
||||
* Gitter (chat): https://gitter.im/getredash/redash
|
||||
* Contact Arik, the maintainer directly: arik@redash.io.
|
||||
|
||||
TOC
|
||||
###
|
||||
|
||||
@@ -2,7 +2,7 @@ Setting up re:dash instance
|
||||
###########################
|
||||
|
||||
The `provisioning
|
||||
script <https://github.com/EverythingMe/redash/blob/master/setup/bootstrap.sh>`__
|
||||
script <https://raw.githubusercontent.com/getredash/redash/master/setup/ubuntu/bootstrap.sh>`__
|
||||
works on Ubuntu 12.04, Ubuntu 14.04 and Debian Wheezy. This script
|
||||
installs all needed dependencies and creates basic setup.
|
||||
|
||||
@@ -12,6 +12,25 @@ Cloud. These images created with the same provision script using Packer.
|
||||
Create an instance
|
||||
==================
|
||||
|
||||
AWS
|
||||
---
|
||||
|
||||
Launch the instance with from the pre-baked AMI (for small deployments
|
||||
t2.micro should be enough):
|
||||
|
||||
- us-east-1: `ami-752c7f10 <https://console.aws.amazon.com/ec2/home?region=us-east-1#LaunchInstanceWizard:ami=ami-752c7f10>`__
|
||||
- us-west-1: `ami-b36babf7 <https://console.aws.amazon.com/ec2/home?region=us-west-1#LaunchInstanceWizard:ami=ami-b36babf7>`__
|
||||
- us-west-2: `ami-a0a04393 <https://console.aws.amazon.com/ec2/home?region=us-west-2#LaunchInstanceWizard:ami=ami-a0a04393>`__
|
||||
- eu-west-1: `ami-198cb16e <https://console.aws.amazon.com/ec2/home?region=eu-west-1#LaunchInstanceWizard:ami=ami-198cb16e>`__
|
||||
- eu-central-1: `ami-a81418b5 <https://console.aws.amazon.com/ec2/home?region=eu-central-1#LaunchInstanceWizard:ami=ami-a81418b5>`__
|
||||
- sa-east-1: `ami-2b52c336 <https://console.aws.amazon.com/ec2/home?region=sa-east-1#LaunchInstanceWizard:ami=ami-2b52c336>`__
|
||||
- ap-northeast-1: `ami-4898fb48 <https://console.aws.amazon.com/ec2/home?region=ap-northeast-1#LaunchInstanceWizard:ami=ami-4898fb48>`__
|
||||
- ap-southeast-2: `ami-7559134f <https://console.aws.amazon.com/ec2/home?region=ap-southeast-2#LaunchInstanceWizard:ami=ami-7559134f>`__
|
||||
- ap-southeast-1: `ami-a0786bf2 <https://console.aws.amazon.com/ec2/home?region=ap-southeast-1#LaunchInstanceWizard:ami=ami-a0786bf2>`__
|
||||
|
||||
|
||||
Now proceed to `"Setup" <#setup>`__.
|
||||
|
||||
Google Compute Engine
|
||||
---------------------
|
||||
|
||||
@@ -19,7 +38,7 @@ First, you need to add the images to your account:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ gcloud compute images create "redash-071-b1015" --source-uri gs://redash-images/redash.0.7.1.b1015.tar.gz
|
||||
$ gcloud compute images create "redash-081-b1110" --source-uri gs://redash-images/redash.0.8.1.b1110.tar.gz
|
||||
|
||||
Next you need to launch an instance using this image (n1-standard-1
|
||||
instance type is recommended). If you plan using re:dash with BigQuery,
|
||||
@@ -28,36 +47,19 @@ you can use a dedicated image which comes with BigQuery preconfigured
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ gcloud compute images create "redash-071-b1015-bq" --source-uri gs://redash-images/redash.0.7.1.b1015-bq.tar.gz
|
||||
$ gcloud compute images create "redash-081-b1110-bq" --source-uri gs://redash-images/redash.0.8.1.b1110-bq.tar.gz
|
||||
|
||||
Note that you need to launch this instance with BigQuery access:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ gcloud compute instances create <your_instance_name> --image redash-071-b1015-bq --scopes storage-ro,bigquery
|
||||
$ gcloud compute instances create <your_instance_name> --image redash-081-b1110-bq --scopes storage-ro,bigquery
|
||||
|
||||
(the same can be done from the web interface, just make sure to enable
|
||||
BigQuery access)
|
||||
|
||||
Now proceed to `"Setup" <#setup>`__.
|
||||
|
||||
AWS
|
||||
---
|
||||
|
||||
Launch the instance with from the pre-baked AMI (for small deployments
|
||||
t2.micro should be enough):
|
||||
|
||||
- us-east-1: `ami-95e04efe <https://console.aws.amazon.com/ec2/home?region=us-east-1#LaunchInstanceWizard:ami=ami-95e04efe>`__
|
||||
- us-west-2: `ami-01d8d331 <https://console.aws.amazon.com/ec2/home?region=us-west-2#LaunchInstanceWizard:ami=ami-01d8d331>`__
|
||||
- us-west-1: `ami-b35ea1f7 <https://console.aws.amazon.com/ec2/home?region=us-west-1#LaunchInstanceWizard:ami=ami-b35ea1f7>`__
|
||||
- eu-west-1: `ami-d46734a3 <https://console.aws.amazon.com/ec2/home?region=eu-west-1#LaunchInstanceWizard:ami=ami-d46734a3>`__
|
||||
- eu-central-1: `ami-7e494e63 <https://console.aws.amazon.com/ec2/home?region=eu-central-1#LaunchInstanceWizard:ami=ami-7e494e63>`__
|
||||
- ap-southeast-1: `ami-30343b62 <https://console.aws.amazon.com/ec2/home?region=ap-southeast-1#LaunchInstanceWizard:ami=ami-30343b62>`__
|
||||
- ap-southeast-2: `ami-53357669 <https://console.aws.amazon.com/ec2/home?region=ap-southeast-2#LaunchInstanceWizard:ami=ami-53357669>`__
|
||||
- ap-northeast-1: `ami-4253ea42 <https://console.aws.amazon.com/ec2/home?region=ap-northeast-1#LaunchInstanceWizard:ami=ami-4253ea42>`__
|
||||
- sa-east-1: `ami-b170f9ac <https://console.aws.amazon.com/ec2/home?region=sa-east-1#LaunchInstanceWizard:ami=ami-b170f9ac>`__
|
||||
|
||||
Now proceed to `"Setup" <#setup>`__.
|
||||
|
||||
Other
|
||||
-----
|
||||
@@ -128,6 +130,32 @@ to create new data source connection.
|
||||
See :doc:`documentation </datasources>` for the different options.
|
||||
Your instance comes ready with dependencies needed to setup supported sources.
|
||||
|
||||
Mail Configuration
|
||||
------------------
|
||||
|
||||
For the system to be able to send emails (for example when alerts trigger), you need to set the mail server to use and the
|
||||
host name of your re:dash server. If you're using one of our images, you can do this by editing the `.env` file:
|
||||
|
||||
.. code::
|
||||
|
||||
# Note that not all values are required, as they have default values.
|
||||
|
||||
export REDASH_MAIL_SERVER="" # default: localhost
|
||||
export REDASH_MAIL_PORT="" # default: 25
|
||||
export REDASH_MAIL_USE_TLS="" # default: False
|
||||
export REDASH_MAIL_USE_SSL="" # default: False
|
||||
export REDASH_MAIL_USERNAME="" # default: None
|
||||
export REDASH_MAIL_PASSWORD="" # default: None
|
||||
export REDASH_MAIL_DEFAULT_SENDER="" # Email address to send from
|
||||
|
||||
export REDASH_HOST="" # base address of your re:dash instance, for example: "https://demo.redash.io"
|
||||
|
||||
- Note that not all values are required, as there are default values.
|
||||
- It's recommended to use some mail service, like `Amazon SES <https://aws.amazon.com/ses/>`__, `Mailgun <http://www.mailgun.com/>`__
|
||||
or `Mandrill <http://mandrillapp.com>`__ to send emails to ensure deliverability.
|
||||
|
||||
To test email configuration, you can run `bin/run ./manage.py send_test_mail` (from `/opt/redash/current`).
|
||||
|
||||
How to upgrade?
|
||||
---------------
|
||||
|
||||
|
||||
@@ -14,8 +14,8 @@ How to run the Fabric script
|
||||
1. Install Fabric: ``pip install fabric requests`` (needed only once)
|
||||
2. Download the ``fabfile.py`` from the gist.
|
||||
3. Run the script:
|
||||
``fab -H{your re:dash host} -u{the ssh user for this host} -i{path to key file for passwordless login} deploy_latest_release``
|
||||
|
||||
``fab -H{your re:dash host} -u{the ssh user for this host} -i{path to key file for passwordless login} deploy_latest_release``
|
||||
|
||||
``-i`` is optional and it is only needed in case you're using private-key based authentication (and didn't add the key file to your authentication agent or set its path in your SSH config).
|
||||
|
||||
What the Fabric script does
|
||||
@@ -25,7 +25,7 @@ Even if you didn't use the image, it's very likely you can reuse most of
|
||||
this script with small modifications. What this script does is:
|
||||
|
||||
1. Find the URL of the latest release tarball (from `GitHub releases
|
||||
page <github.com/everythingme/redash/releases>`__).
|
||||
page <github.com/getredash/redash/releases>`__).
|
||||
2. Download it.
|
||||
3. Create new directory for this version (for example:
|
||||
``/opt/redash/redash.0.5.0.b685``).
|
||||
|
||||
@@ -46,3 +46,27 @@ Simple query on a logstash ElasticSearch instance:
|
||||
"size" : 250,
|
||||
"sort" : "@timestamp:asc"
|
||||
}
|
||||
|
||||
Simple query on a ElasticSearch instance:
|
||||
==================================================
|
||||
|
||||
|
||||
- Query the index named "twitter"
|
||||
- Filter by user equal "kimchy"
|
||||
- Return the fields: "@timestamp", "tweet" and "user"
|
||||
- Return up to 15 results
|
||||
- Sort by @timestamp ascending
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"index" : "twitter",
|
||||
"query" : {
|
||||
"match": {
|
||||
"user" : "kimchy"
|
||||
}
|
||||
},
|
||||
"fields" : ["@timestamp", "tweet", "user"],
|
||||
"size" : 15,
|
||||
"sort" : "@timestamp:asc"
|
||||
}
|
||||
|
||||
10
migrations/0014_migrate_existing_es_to_kibana.py
Normal file
10
migrations/0014_migrate_existing_es_to_kibana.py
Normal file
@@ -0,0 +1,10 @@
|
||||
__author__ = 'lior'
|
||||
|
||||
from redash.models import DataSource
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
for ds in DataSource.all():
|
||||
if ds.type == 'elasticsearch':
|
||||
ds.type = 'kibana'
|
||||
ds.save()
|
||||
6
migrations/0015_add_schedule_query_permission.py
Normal file
6
migrations/0015_add_schedule_query_permission.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
default_group = models.Group.get(models.Group.name=='default')
|
||||
default_group.permissions.append('schedule_query')
|
||||
default_group.save()
|
||||
10
migrations/0016_add_alert_subscriber.py
Normal file
10
migrations/0016_add_alert_subscriber.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from redash.models import db, Alert, AlertSubscription
|
||||
|
||||
if __name__ == '__main__':
|
||||
with db.database.transaction():
|
||||
# There was an AWS/GCE image created without this table, to make sure this exists we run this migration.
|
||||
if not AlertSubscription.table_exists():
|
||||
AlertSubscription.create_table()
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
@@ -236,17 +236,6 @@ module.exports = function (grunt) {
|
||||
// dist: {}
|
||||
// },
|
||||
|
||||
imagemin: {
|
||||
dist: {
|
||||
files: [{
|
||||
expand: true,
|
||||
cwd: '<%= yeoman.app %>/images',
|
||||
src: '{,*/}*.{png,jpg,jpeg,gif}',
|
||||
dest: '<%= yeoman.dist %>/images'
|
||||
}]
|
||||
}
|
||||
},
|
||||
|
||||
svgmin: {
|
||||
dist: {
|
||||
files: [{
|
||||
@@ -313,6 +302,11 @@ module.exports = function (grunt) {
|
||||
'images/{,*/}*.{webp}',
|
||||
'fonts/*'
|
||||
]
|
||||
}, {
|
||||
expand: true,
|
||||
cwd: '<%= yeoman.app %>/images',
|
||||
dest: '<%= yeoman.dist %>/images',
|
||||
src: ['*']
|
||||
}, {
|
||||
expand: true,
|
||||
cwd: '.tmp/images',
|
||||
@@ -348,7 +342,6 @@ module.exports = function (grunt) {
|
||||
],
|
||||
dist: [
|
||||
'copy:styles',
|
||||
'imagemin',
|
||||
'svgmin'
|
||||
]
|
||||
},
|
||||
|
||||
@@ -44,7 +44,6 @@
|
||||
{% raw %}
|
||||
<div class="collapse navbar-collapse navbar-ex1-collapse">
|
||||
<ul class="nav navbar-nav">
|
||||
<li class="active" ng-show="pageTitle"><a class="page-title" ng-bind="pageTitle"></a></li>
|
||||
<li class="dropdown" ng-show="groupedDashboards.length > 0 || otherDashboards.length > 0 || currentUser.hasPermission('create_dashboard')" dropdown>
|
||||
<a href="#" class="dropdown-toggle" dropdown-toggle><span class="fa fa-tachometer"></span> <b class="caret"></b></a>
|
||||
<ul class="dropdown-menu" dropdown-menu>
|
||||
@@ -96,7 +95,7 @@
|
||||
<a ng-href="/users/{{currentUser.id}}">
|
||||
<div class="row">
|
||||
<div class="col-sm-2">
|
||||
<img src="{{currentUser.gravatar_url}}" size="40px" class="img-circle"/>
|
||||
<img ng-src="{{currentUser.gravatar_url}}" size="40px" class="img-circle"/>
|
||||
</div>
|
||||
<div class="col-sm-10">
|
||||
<p><strong>{{currentUser.name}}</strong></p>
|
||||
@@ -121,6 +120,22 @@
|
||||
<edit-dashboard-form dashboard="newDashboard" id="new_dashboard_dialog"></edit-dashboard-form>
|
||||
<div ng-view></div>
|
||||
|
||||
{% raw %}
|
||||
<div class="container-fluid footer">
|
||||
<hr/>
|
||||
<div class="container">
|
||||
<div class="row">
|
||||
<a href="http://redash.io">re:dash</a> <span ng-bind="version"></span>
|
||||
<small ng-if="newVersionAvailable" ng-cloak class="ng-cloak"><a href="http://version.redash.io/">(new re:dash version available)</a></small>
|
||||
<div class="pull-right">
|
||||
<a href="http://docs.redash.io/">Docs</a>
|
||||
<a href="http://github.com/getredash/redash">Contribute</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endraw %}
|
||||
|
||||
<script src="/bower_components/jquery/jquery.js"></script>
|
||||
|
||||
<!-- build:js /scripts/plugins.js -->
|
||||
@@ -160,11 +175,13 @@
|
||||
<script src="/bower_components/bucky/bucky.js"></script>
|
||||
<script src="/bower_components/pace/pace.js"></script>
|
||||
<script src="/bower_components/mustache/mustache.js"></script>
|
||||
<script src="/bower_components/canvg/rgbcolor.js"></script>
|
||||
<script src="/bower_components/canvg/rgbcolor.js"></script>
|
||||
<script src="/bower_components/canvg/StackBlur.js"></script>
|
||||
<script src="/bower_components/canvg/canvg.js"></script>
|
||||
<script src="/bower_components/leaflet/dist/leaflet.js"></script>
|
||||
<script src="/bower_components/angular-bootstrap-show-errors/src/showErrors.js"></script>
|
||||
<script src="/bower_components/d3/d3.min.js"></script>
|
||||
<script src="/bower_components/angular-ui-sortable/sortable.js"></script>
|
||||
<!-- endbuild -->
|
||||
|
||||
<!-- build:js({.tmp,app}) /scripts/scripts.js -->
|
||||
@@ -185,8 +202,11 @@
|
||||
<script src="/scripts/visualizations/cohort.js"></script>
|
||||
<script src="/scripts/visualizations/map.js"></script>
|
||||
<script src="/scripts/visualizations/counter.js"></script>
|
||||
<script src="/scripts/visualizations/boxplot.js"></script>
|
||||
<script src="/scripts/visualizations/box.js"></script>
|
||||
<script src="/scripts/visualizations/table.js"></script>
|
||||
<script src="/scripts/visualizations/pivot.js"></script>
|
||||
<script src="/scripts/visualizations/date_range_selector.js"></script>
|
||||
<script src="/scripts/directives/directives.js"></script>
|
||||
<script src="/scripts/directives/query_directives.js"></script>
|
||||
<script src="/scripts/directives/data_source_directives.js"></script>
|
||||
@@ -197,7 +217,7 @@
|
||||
|
||||
<script>
|
||||
// TODO: move currentUser & features to be an Angular service
|
||||
var featureFlags = {{ features|safe }};
|
||||
var clientConfig = {{ client_config|safe }};
|
||||
var currentUser = {{ user|safe }};
|
||||
|
||||
currentUser.canEdit = function(object) {
|
||||
|
||||
@@ -10,6 +10,7 @@ angular.module('redash', [
|
||||
'angular-growl',
|
||||
'angularMoment',
|
||||
'ui.bootstrap',
|
||||
'ui.sortable',
|
||||
'smartTable.table',
|
||||
'ngResource',
|
||||
'ngRoute',
|
||||
@@ -19,7 +20,7 @@ angular.module('redash', [
|
||||
'ngSanitize'
|
||||
]).config(['$routeProvider', '$locationProvider', '$compileProvider', 'growlProvider', 'uiSelectConfig',
|
||||
function ($routeProvider, $locationProvider, $compileProvider, growlProvider, uiSelectConfig) {
|
||||
if (featureFlags.clientSideMetrics) {
|
||||
if (clientConfig.clientSideMetrics) {
|
||||
Bucky.setOptions({
|
||||
host: '/api/metrics'
|
||||
});
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
if (!value) {
|
||||
return "-";
|
||||
}
|
||||
return value.toDate().toLocaleString();
|
||||
|
||||
return value.format(clientConfig.dateTimeFormat);
|
||||
};
|
||||
|
||||
var QuerySearchCtrl = function($scope, $location, $filter, Events, Query) {
|
||||
@@ -150,14 +151,16 @@
|
||||
}
|
||||
|
||||
var MainCtrl = function ($scope, $location, Dashboard, notifications) {
|
||||
if (featureFlags.clientSideMetrics) {
|
||||
$scope.version = clientConfig.version;
|
||||
$scope.newVersionAvailable = clientConfig.newVersionAvailable && currentUser.hasPermission("admin");
|
||||
|
||||
if (clientConfig.clientSideMetrics) {
|
||||
$scope.$on('$locationChangeSuccess', function(event, newLocation, oldLocation) {
|
||||
// This will be called once per actual page load.
|
||||
Bucky.sendPagePerformance();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
$scope.dashboards = [];
|
||||
$scope.reloadDashboards = function () {
|
||||
Dashboard.query(function (dashboards) {
|
||||
|
||||
@@ -16,7 +16,9 @@
|
||||
var w = new Widget(widget);
|
||||
|
||||
if (w.visualization) {
|
||||
promises.push(w.getQuery().getQueryResult().toPromise());
|
||||
var queryResult = w.getQuery().getQueryResult();
|
||||
if (angular.isDefined(queryResult))
|
||||
promises.push(queryResult.toPromise());
|
||||
}
|
||||
|
||||
return w;
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
saveQuery = $scope.saveQuery;
|
||||
|
||||
$scope.sourceMode = true;
|
||||
$scope.canEdit = currentUser.canEdit($scope.query) || featureFlags.allowAllToEditQueries;
|
||||
$scope.canEdit = currentUser.canEdit($scope.query) || clientConfig.allowAllToEditQueries;
|
||||
$scope.isDirty = false;
|
||||
|
||||
$scope.newVisualization = undefined;
|
||||
|
||||
@@ -19,14 +19,35 @@
|
||||
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
|
||||
}
|
||||
|
||||
var getDataSourceId = function() {
|
||||
// Try to get the query's data source id
|
||||
var dataSourceId = $scope.query.data_source_id;
|
||||
|
||||
// If there is no source yet, then parse what we have in localStorage
|
||||
// e.g. `null` -> `NaN`, malformed data -> `NaN`, "1" -> 1
|
||||
if (dataSourceId === undefined) {
|
||||
dataSourceId = parseInt(localStorage.lastSelectedDataSourceId, 10);
|
||||
}
|
||||
|
||||
// If we had an invalid value in localStorage (e.g. nothing, deleted source), then use the first data source
|
||||
var isValidDataSourceId = !isNaN(dataSourceId) && _.some($scope.dataSources, function(ds) {
|
||||
return ds.id == dataSourceId;
|
||||
});
|
||||
if (!isValidDataSourceId) {
|
||||
dataSourceId = $scope.dataSources[0].id;
|
||||
}
|
||||
|
||||
// Return our data source id
|
||||
return dataSourceId;
|
||||
}
|
||||
|
||||
$scope.dataSource = {};
|
||||
$scope.query = $route.current.locals.query;
|
||||
|
||||
var updateSchema = function() {
|
||||
$scope.hasSchema = false;
|
||||
$scope.editorSize = "col-md-12";
|
||||
var dataSourceId = $scope.query.data_source_id || $scope.dataSources[0].id;
|
||||
DataSource.getSchema({id: dataSourceId}, function(data) {
|
||||
DataSource.getSchema({id: getDataSourceId()}, function(data) {
|
||||
if (data && data.length > 0) {
|
||||
$scope.schema = data;
|
||||
_.each(data, function(table) {
|
||||
@@ -48,12 +69,14 @@
|
||||
|
||||
$scope.isQueryOwner = (currentUser.id === $scope.query.user.id) || currentUser.hasPermission('admin');
|
||||
$scope.canViewSource = currentUser.hasPermission('view_source');
|
||||
$scope.canExecuteQuery = currentUser.hasPermission('execute_query');
|
||||
$scope.canScheduleQuery = currentUser.hasPermission('schedule_query');
|
||||
|
||||
$scope.dataSources = DataSource.query(function(dataSources) {
|
||||
updateSchema();
|
||||
|
||||
if ($scope.query.isNew()) {
|
||||
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
|
||||
$scope.query.data_source_id = getDataSourceId();
|
||||
$scope.dataSource = _.find(dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
|
||||
}
|
||||
});
|
||||
@@ -104,9 +127,14 @@
|
||||
};
|
||||
|
||||
$scope.executeQuery = function() {
|
||||
if (!$scope.canExecuteQuery) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!$scope.query.query) {
|
||||
return;
|
||||
}
|
||||
|
||||
getQueryResult(0);
|
||||
$scope.lockButton(true);
|
||||
$scope.cancelling = false;
|
||||
@@ -146,6 +174,7 @@
|
||||
|
||||
$scope.updateDataSource = function() {
|
||||
Events.record(currentUser, 'update_data_source', 'query', $scope.query.id);
|
||||
localStorage.lastSelectedDataSourceId = $scope.query.data_source_id;
|
||||
|
||||
$scope.query.latest_query_data = null;
|
||||
$scope.query.latest_query_data_id = null;
|
||||
@@ -212,7 +241,7 @@
|
||||
});
|
||||
|
||||
$scope.openScheduleForm = function() {
|
||||
if (!$scope.isQueryOwner) {
|
||||
if (!$scope.isQueryOwner || !$scope.canScheduleQuery) {
|
||||
return;
|
||||
};
|
||||
|
||||
|
||||
@@ -142,6 +142,11 @@
|
||||
|
||||
$scope.setType = function (type) {
|
||||
$scope.type = type;
|
||||
if (type == 'textbox') {
|
||||
$scope.widgetSizes.push({name: 'Hidden', value: 0});
|
||||
} else if ($scope.widgetSizes.length > 2) {
|
||||
$scope.widgetSizes.pop();
|
||||
}
|
||||
};
|
||||
|
||||
var reset = function() {
|
||||
@@ -186,7 +191,6 @@
|
||||
|
||||
$scope.saveWidget = function() {
|
||||
$scope.saveInProgress = true;
|
||||
|
||||
var widget = new Widget({
|
||||
'visualization_id': $scope.selectedVis && $scope.selectedVis.id,
|
||||
'dashboard_id': $scope.dashboard.id,
|
||||
|
||||
@@ -49,6 +49,10 @@
|
||||
prop.type = 'file';
|
||||
}
|
||||
|
||||
if (prop.type == 'boolean') {
|
||||
prop.type = 'checkbox';
|
||||
}
|
||||
|
||||
prop.required = _.contains(type.configuration_schema.required, name);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -281,4 +281,34 @@
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
directives.directive('onDestroy', function () {
|
||||
/* This directive can be used to invoke a callback when an element is destroyed,
|
||||
A useful example is the following:
|
||||
<div ng-if="includeText" on-destroy="form.text = null;">
|
||||
<input type="text" ng-model="form.text">
|
||||
</div>
|
||||
*/
|
||||
return {
|
||||
restrict: "A",
|
||||
scope: {
|
||||
onDestroy: "&",
|
||||
},
|
||||
link: function(scope, elem, attrs) {
|
||||
console.log(scope.onDestroy);
|
||||
scope.$on('$destroy', function() {
|
||||
scope.onDestroy();
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
directives.directive('colorBox', function () {
|
||||
return {
|
||||
restrict: "E",
|
||||
scope: {color: "="},
|
||||
template: "<span style='width: 12px; height: 12px; background-color: {{color}}; display: inline-block; margin-right: 5px;'></span>"
|
||||
};
|
||||
});
|
||||
|
||||
})();
|
||||
|
||||
@@ -48,6 +48,9 @@ angular.module('redash.filters', []).
|
||||
|
||||
.filter('colWidth', function () {
|
||||
return function (widgetWidth) {
|
||||
if (widgetWidth == 0) {
|
||||
return 0;
|
||||
}
|
||||
if (widgetWidth == 1) {
|
||||
return 6;
|
||||
}
|
||||
@@ -79,7 +82,7 @@ angular.module('redash.filters', []).
|
||||
}
|
||||
|
||||
var html = marked(text);
|
||||
if (featureFlags.allowScriptsInUserInput) {
|
||||
if (clientConfig.allowScriptsInUserInput) {
|
||||
html = $sce.trustAsHtml(html);
|
||||
}
|
||||
|
||||
@@ -94,4 +97,21 @@ angular.module('redash.filters', []).
|
||||
}
|
||||
return $sce.trustAsHtml(text);
|
||||
}
|
||||
}]);
|
||||
}])
|
||||
|
||||
.filter('remove', function() {
|
||||
return function(items, item) {
|
||||
if (items == undefined)
|
||||
return items;
|
||||
if (item instanceof Array) {
|
||||
var notEquals = function(other) { return item.indexOf(other) == -1; }
|
||||
} else {
|
||||
var notEquals = function(other) { return item != other; }
|
||||
}
|
||||
var filtered = [];
|
||||
for (var i = 0; i < items.length; i++)
|
||||
if (notEquals(items[i]))
|
||||
filtered.push(items[i])
|
||||
return filtered;
|
||||
};
|
||||
});
|
||||
|
||||
@@ -11,6 +11,11 @@
|
||||
'Light Blue': '#92A8CD',
|
||||
'Lilac': '#A47D7C',
|
||||
'Light Green': '#B5CA92',
|
||||
'Brown':'#A52A2A',
|
||||
'Black':'#000000',
|
||||
'Gray':'#808080',
|
||||
'Pink':'#FFC0CB',
|
||||
'Dark Blue':'#00008b'
|
||||
};
|
||||
|
||||
Highcharts.setOptions({
|
||||
@@ -50,7 +55,7 @@
|
||||
;
|
||||
|
||||
if (moment.isMoment(this.x)) {
|
||||
var s = '<b>' + this.x.toDate().toLocaleString() + '</b>',
|
||||
var s = '<b>' + this.x.format(clientConfig.dateTimeFormat) + '</b>',
|
||||
pointsCount = this.points.length;
|
||||
|
||||
$.each(this.points, function (i, point) {
|
||||
@@ -92,19 +97,6 @@
|
||||
buttons: {
|
||||
contextButton: {
|
||||
menuItems: [
|
||||
{
|
||||
text: 'Toggle % Stacking',
|
||||
onclick: function () {
|
||||
var newStacking = "normal";
|
||||
if (this.series[0].options.stacking == "normal") {
|
||||
newStacking = "percent";
|
||||
}
|
||||
|
||||
_.each(this.series, function (series) {
|
||||
series.update({stacking: newStacking}, true);
|
||||
});
|
||||
}
|
||||
},
|
||||
{
|
||||
text: 'Select All',
|
||||
onclick: function () {
|
||||
@@ -266,6 +258,9 @@
|
||||
};
|
||||
|
||||
var chartOptions = $.extend(true, {}, defaultOptions, chartsDefaults);
|
||||
chartOptions.plotOptions.series = {
|
||||
turboThreshold: clientConfig.highChartsTurboThreshold
|
||||
}
|
||||
|
||||
// $timeout makes sure that this function invoked after the DOM ready. When draw/init
|
||||
// invoked after the DOM is ready, we see first an empty HighCharts objects and later
|
||||
|
||||
@@ -10,7 +10,7 @@ function getNestedValue (obj, keys) {
|
||||
function getKeyFromObject(obj, key) {
|
||||
var value = obj[key];
|
||||
|
||||
if ((!_.include(obj, key) && _.string.include(key, '.'))) {
|
||||
if ((!_.has(obj, key) && _.string.include(key, '.'))) {
|
||||
var keys = key.split(".");
|
||||
|
||||
value = getNestedValue(obj, keys);
|
||||
@@ -248,7 +248,12 @@ function getKeyFromObject(obj, key) {
|
||||
element.html(column.cellTemplate);
|
||||
compile(element.contents())(childScope);
|
||||
} else {
|
||||
element.html(sanitize(scope.formatedValue));
|
||||
if (typeof scope.formatedValue === 'string' || scope.formatedValue instanceof String) {
|
||||
element.html(sanitize(scope.formatedValue));
|
||||
} else {
|
||||
element.text(scope.formatedValue);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -713,7 +718,7 @@ angular.module("partials/smartTable.html", []).run(["$templateCache", function (
|
||||
" </tbody>\n" +
|
||||
" <tfoot ng-show=\"isPaginationEnabled\">\n" +
|
||||
" <tr class=\"smart-table-footer-row\">\n" +
|
||||
" <td colspan=\"{{columns.length}}\">\n" +
|
||||
" <td class=\"text-center\" colspan=\"{{columns.length}}\">\n" +
|
||||
" <div pagination-smart-table=\"\" num-pages=\"numberOfPages\" max-size=\"maxSize\" current-page=\"currentPage\"></div>\n" +
|
||||
" </td>\n" +
|
||||
" </tr>\n" +
|
||||
|
||||
@@ -43,10 +43,10 @@
|
||||
if (angular.isNumber(v)) {
|
||||
columnTypes[k] = 'float';
|
||||
} else if (_.isString(v) && v.match(/^\d{4}-\d{2}-\d{2}T/)) {
|
||||
row[k] = moment(v);
|
||||
row[k] = moment.utc(v);
|
||||
columnTypes[k] = 'datetime';
|
||||
} else if (_.isString(v) && v.match(/^\d{4}-\d{2}-\d{2}/)) {
|
||||
row[k] = moment(v);
|
||||
} else if (_.isString(v) && v.match(/^\d{4}-\d{2}-\d{2}$/)) {
|
||||
row[k] = moment.utc(v);
|
||||
columnTypes[k] = 'date';
|
||||
} else if (typeof(v) == 'object' && v !== null) {
|
||||
row[k] = JSON.stringify(v);
|
||||
@@ -186,9 +186,38 @@
|
||||
}
|
||||
|
||||
return this.filteredData;
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function to add a point into a series, also checks whether the point is within dateRange
|
||||
*/
|
||||
QueryResult.prototype._addPointToSeriesIfInDateRange = function (point, seriesCollection, seriesName, dateRange) {
|
||||
if (dateRange && moment.isMoment(point.x)) {
|
||||
// if dateRange is provided and x Axis is of type datetime
|
||||
if (point.x.isBefore(dateRange.min) || point.x.isAfter(dateRange.max)) {
|
||||
// if the point's date isn't within dateRange, then we will not add this point to series
|
||||
return;
|
||||
}
|
||||
}
|
||||
this._addPointToSeries(point, seriesCollection, seriesName);
|
||||
}
|
||||
|
||||
QueryResult.prototype.getChartData = function (mapping) {
|
||||
/**
|
||||
* Helper function to add a point into a series
|
||||
*/
|
||||
QueryResult.prototype._addPointToSeries = function (point, seriesCollection, seriesName) {
|
||||
if (seriesCollection[seriesName] == undefined) {
|
||||
seriesCollection[seriesName] = {
|
||||
name: seriesName,
|
||||
type: 'column',
|
||||
data: []
|
||||
};
|
||||
}
|
||||
|
||||
seriesCollection[seriesName]['data'].push(point);
|
||||
};
|
||||
|
||||
QueryResult.prototype.getChartData = function (mapping, dateRange) {
|
||||
var series = {};
|
||||
|
||||
_.each(this.getData(), function (row) {
|
||||
@@ -199,7 +228,7 @@
|
||||
|
||||
_.each(row, function (value, definition) {
|
||||
var name = definition.split("::")[0] || definition.split("__")[0];
|
||||
var type = definition.split("::")[1] || definition.split("__")[0];
|
||||
var type = definition.split("::")[1] || definition.split("__")[1];
|
||||
if (mapping) {
|
||||
type = mapping[definition];
|
||||
}
|
||||
@@ -229,26 +258,15 @@
|
||||
}
|
||||
});
|
||||
|
||||
var addPointToSeries = function (seriesName, point) {
|
||||
if (series[seriesName] == undefined) {
|
||||
series[seriesName] = {
|
||||
name: seriesName,
|
||||
type: 'column',
|
||||
data: []
|
||||
}
|
||||
}
|
||||
|
||||
series[seriesName]['data'].push(point);
|
||||
}
|
||||
|
||||
if (seriesName === undefined) {
|
||||
_.each(yValues, function (yValue, seriesName) {
|
||||
addPointToSeries(seriesName, {'x': xValue, 'y': yValue});
|
||||
});
|
||||
} else {
|
||||
addPointToSeries(seriesName, point);
|
||||
this._addPointToSeriesIfInDateRange({'x': xValue, 'y': yValue}, series, seriesName, dateRange);
|
||||
}.bind(this));
|
||||
}
|
||||
});
|
||||
else {
|
||||
this._addPointToSeriesIfInDateRange(point, series, seriesName, dateRange);
|
||||
}
|
||||
}.bind(this));
|
||||
|
||||
return _.values(series);
|
||||
};
|
||||
|
||||
307
rd_ui/app/scripts/visualizations/box.js
Normal file
307
rd_ui/app/scripts/visualizations/box.js
Normal file
@@ -0,0 +1,307 @@
|
||||
(function() {
|
||||
|
||||
// Inspired by http://informationandvisualization.de/blog/box-plot
|
||||
d3.box = function() {
|
||||
var width = 1,
|
||||
height = 1,
|
||||
duration = 0,
|
||||
domain = null,
|
||||
value = Number,
|
||||
whiskers = boxWhiskers,
|
||||
quartiles = boxQuartiles,
|
||||
tickFormat = null;
|
||||
|
||||
// For each small multiple…
|
||||
function box(g) {
|
||||
g.each(function(d, i) {
|
||||
d = d.map(value).sort(d3.ascending);
|
||||
var g = d3.select(this),
|
||||
n = d.length,
|
||||
min = d[0],
|
||||
max = d[n - 1];
|
||||
|
||||
// Compute quartiles. Must return exactly 3 elements.
|
||||
var quartileData = d.quartiles = quartiles(d);
|
||||
|
||||
// Compute whiskers. Must return exactly 2 elements, or null.
|
||||
var whiskerIndices = whiskers && whiskers.call(this, d, i),
|
||||
whiskerData = whiskerIndices && whiskerIndices.map(function(i) { return d[i]; });
|
||||
|
||||
// Compute outliers. If no whiskers are specified, all data are "outliers".
|
||||
// We compute the outliers as indices, so that we can join across transitions!
|
||||
var outlierIndices = whiskerIndices
|
||||
? d3.range(0, whiskerIndices[0]).concat(d3.range(whiskerIndices[1] + 1, n))
|
||||
: d3.range(n);
|
||||
|
||||
// Compute the new x-scale.
|
||||
var x1 = d3.scale.linear()
|
||||
.domain(domain && domain.call(this, d, i) || [min, max])
|
||||
.range([height, 0]);
|
||||
|
||||
// Retrieve the old x-scale, if this is an update.
|
||||
var x0 = this.__chart__ || d3.scale.linear()
|
||||
.domain([0, Infinity])
|
||||
.range(x1.range());
|
||||
|
||||
// Stash the new scale.
|
||||
this.__chart__ = x1;
|
||||
|
||||
// Note: the box, median, and box tick elements are fixed in number,
|
||||
// so we only have to handle enter and update. In contrast, the outliers
|
||||
// and other elements are variable, so we need to exit them! Variable
|
||||
// elements also fade in and out.
|
||||
|
||||
// Update center line: the vertical line spanning the whiskers.
|
||||
var center = g.selectAll("line.center")
|
||||
.data(whiskerData ? [whiskerData] : []);
|
||||
|
||||
center.enter().insert("line", "rect")
|
||||
.attr("class", "center")
|
||||
.attr("x1", width / 2)
|
||||
.attr("y1", function(d) { return x0(d[0]); })
|
||||
.attr("x2", width / 2)
|
||||
.attr("y2", function(d) { return x0(d[1]); })
|
||||
.style("opacity", 1e-6)
|
||||
.transition()
|
||||
.duration(duration)
|
||||
.style("opacity", 1)
|
||||
.attr("y1", function(d) { return x1(d[0]); })
|
||||
.attr("y2", function(d) { return x1(d[1]); });
|
||||
|
||||
center.transition()
|
||||
.duration(duration)
|
||||
.style("opacity", 1)
|
||||
.attr("y1", function(d) { return x1(d[0]); })
|
||||
.attr("y2", function(d) { return x1(d[1]); });
|
||||
|
||||
center.exit().transition()
|
||||
.duration(duration)
|
||||
.style("opacity", 1e-6)
|
||||
.attr("y1", function(d) { return x1(d[0]); })
|
||||
.attr("y2", function(d) { return x1(d[1]); })
|
||||
.remove();
|
||||
|
||||
// Update innerquartile box.
|
||||
var box = g.selectAll("rect.box")
|
||||
.data([quartileData]);
|
||||
|
||||
box.enter().append("rect")
|
||||
.attr("class", "box")
|
||||
.attr("x", 0)
|
||||
.attr("y", function(d) { return x0(d[2]); })
|
||||
.attr("width", width)
|
||||
.attr("height", function(d) { return x0(d[0]) - x0(d[2]); })
|
||||
.transition()
|
||||
.duration(duration)
|
||||
.attr("y", function(d) { return x1(d[2]); })
|
||||
.attr("height", function(d) { return x1(d[0]) - x1(d[2]); });
|
||||
|
||||
box.transition()
|
||||
.duration(duration)
|
||||
.attr("y", function(d) { return x1(d[2]); })
|
||||
.attr("height", function(d) { return x1(d[0]) - x1(d[2]); });
|
||||
|
||||
box.exit().remove()
|
||||
|
||||
// Update median line.
|
||||
var medianLine = g.selectAll("line.median")
|
||||
.data([quartileData[1]]);
|
||||
|
||||
medianLine.enter().append("line")
|
||||
.attr("class", "median")
|
||||
.attr("x1", 0)
|
||||
.attr("y1", x0)
|
||||
.attr("x2", width)
|
||||
.attr("y2", x0)
|
||||
.transition()
|
||||
.duration(duration)
|
||||
.attr("y1", x1)
|
||||
.attr("y2", x1);
|
||||
|
||||
medianLine.transition()
|
||||
.duration(duration)
|
||||
.attr("y1", x1)
|
||||
.attr("y2", x1);
|
||||
|
||||
medianLine.exit().remove()
|
||||
|
||||
// Update whiskers.
|
||||
var whisker = g.selectAll("line.whisker")
|
||||
.data(whiskerData || []);
|
||||
|
||||
whisker.enter().insert("line", "circle, text")
|
||||
.attr("class", "whisker")
|
||||
.attr("x1", 0)
|
||||
.attr("y1", x0)
|
||||
.attr("x2", width)
|
||||
.attr("y2", x0)
|
||||
.style("opacity", 1e-6)
|
||||
.transition()
|
||||
.duration(duration)
|
||||
.attr("y1", x1)
|
||||
.attr("y2", x1)
|
||||
.style("opacity", 1);
|
||||
|
||||
whisker.transition()
|
||||
.duration(duration)
|
||||
.attr("y1", x1)
|
||||
.attr("y2", x1)
|
||||
.style("opacity", 1);
|
||||
|
||||
whisker.exit().transition()
|
||||
.duration(duration)
|
||||
.attr("y1", x1)
|
||||
.attr("y2", x1)
|
||||
.style("opacity", 1e-6)
|
||||
.remove();
|
||||
|
||||
// Update outliers.
|
||||
var outlier = g.selectAll("circle.outlier")
|
||||
.data(outlierIndices, Number);
|
||||
|
||||
outlier.enter().insert("circle", "text")
|
||||
.attr("class", "outlier")
|
||||
.attr("r", 5)
|
||||
.attr("cx", width / 2)
|
||||
.attr("cy", function(i) { return x0(d[i]); })
|
||||
.style("opacity", 1e-6)
|
||||
.transition()
|
||||
.duration(duration)
|
||||
.attr("cy", function(i) { return x1(d[i]); })
|
||||
.style("opacity", 1);
|
||||
|
||||
outlier.transition()
|
||||
.duration(duration)
|
||||
.attr("cy", function(i) { return x1(d[i]); })
|
||||
.style("opacity", 1);
|
||||
|
||||
outlier.exit().transition()
|
||||
.duration(duration)
|
||||
.attr("cy", function(i) { return x1(d[i]); })
|
||||
.style("opacity", 1e-6)
|
||||
.remove();
|
||||
|
||||
// Compute the tick format.
|
||||
var format = tickFormat || x1.tickFormat(8);
|
||||
|
||||
// Update box ticks.
|
||||
var boxTick = g.selectAll("text.box")
|
||||
.data(quartileData);
|
||||
|
||||
boxTick.enter().append("text")
|
||||
.attr("class", "box")
|
||||
.attr("dy", ".3em")
|
||||
.attr("dx", function(d, i) { return i & 1 ? 6 : -6 })
|
||||
.attr("x", function(d, i) { return i & 1 ? width : 0 })
|
||||
.attr("y", x0)
|
||||
.attr("text-anchor", function(d, i) { return i & 1 ? "start" : "end"; })
|
||||
.text(format)
|
||||
.transition()
|
||||
.duration(duration)
|
||||
.attr("y", x1);
|
||||
|
||||
boxTick.transition()
|
||||
.duration(duration)
|
||||
.text(format)
|
||||
.attr("y", x1);
|
||||
|
||||
boxTick.exit().remove()
|
||||
|
||||
// Update whisker ticks. These are handled separately from the box
|
||||
// ticks because they may or may not exist, and we want don't want
|
||||
// to join box ticks pre-transition with whisker ticks post-.
|
||||
var whiskerTick = g.selectAll("text.whisker")
|
||||
.data(whiskerData || []);
|
||||
|
||||
whiskerTick.enter().append("text")
|
||||
.attr("class", "whisker")
|
||||
.attr("dy", ".3em")
|
||||
.attr("dx", 6)
|
||||
.attr("x", width)
|
||||
.attr("y", x0)
|
||||
.text(format)
|
||||
.style("opacity", 1e-6)
|
||||
.transition()
|
||||
.duration(duration)
|
||||
.attr("y", x1)
|
||||
.style("opacity", 1);
|
||||
|
||||
whiskerTick.transition()
|
||||
.duration(duration)
|
||||
.text(format)
|
||||
.attr("y", x1)
|
||||
.style("opacity", 1);
|
||||
|
||||
whiskerTick.exit().transition()
|
||||
.duration(duration)
|
||||
.attr("y", x1)
|
||||
.style("opacity", 1e-6)
|
||||
.remove();
|
||||
});
|
||||
d3.timer.flush();
|
||||
}
|
||||
|
||||
box.width = function(x) {
|
||||
if (!arguments.length) return width;
|
||||
width = x;
|
||||
return box;
|
||||
};
|
||||
|
||||
box.height = function(x) {
|
||||
if (!arguments.length) return height;
|
||||
height = x;
|
||||
return box;
|
||||
};
|
||||
|
||||
box.tickFormat = function(x) {
|
||||
if (!arguments.length) return tickFormat;
|
||||
tickFormat = x;
|
||||
return box;
|
||||
};
|
||||
|
||||
box.duration = function(x) {
|
||||
if (!arguments.length) return duration;
|
||||
duration = x;
|
||||
return box;
|
||||
};
|
||||
|
||||
box.domain = function(x) {
|
||||
if (!arguments.length) return domain;
|
||||
domain = x == null ? x : d3.functor(x);
|
||||
return box;
|
||||
};
|
||||
|
||||
box.value = function(x) {
|
||||
if (!arguments.length) return value;
|
||||
value = x;
|
||||
return box;
|
||||
};
|
||||
|
||||
box.whiskers = function(x) {
|
||||
if (!arguments.length) return whiskers;
|
||||
whiskers = x;
|
||||
return box;
|
||||
};
|
||||
|
||||
box.quartiles = function(x) {
|
||||
if (!arguments.length) return quartiles;
|
||||
quartiles = x;
|
||||
return box;
|
||||
};
|
||||
|
||||
return box;
|
||||
};
|
||||
|
||||
function boxWhiskers(d) {
|
||||
return [0, d.length - 1];
|
||||
}
|
||||
|
||||
function boxQuartiles(d) {
|
||||
return [
|
||||
d3.quantile(d, .25),
|
||||
d3.quantile(d, .5),
|
||||
d3.quantile(d, .75)
|
||||
];
|
||||
}
|
||||
|
||||
})();
|
||||
173
rd_ui/app/scripts/visualizations/boxplot.js
Normal file
173
rd_ui/app/scripts/visualizations/boxplot.js
Normal file
@@ -0,0 +1,173 @@
|
||||
(function() {
|
||||
var module = angular.module('redash.visualization');
|
||||
|
||||
module.config(['VisualizationProvider', function(VisualizationProvider) {
|
||||
var renderTemplate =
|
||||
'<boxplot-renderer ' +
|
||||
'options="visualization.options" query-result="queryResult">' +
|
||||
'</boxplot-renderer>';
|
||||
|
||||
var editTemplate = '<boxplot-editor></boxplot-editor>';
|
||||
|
||||
VisualizationProvider.registerVisualization({
|
||||
type: 'BOXPLOT',
|
||||
name: 'Boxplot',
|
||||
renderTemplate: renderTemplate,
|
||||
editorTemplate: editTemplate
|
||||
});
|
||||
}
|
||||
]);
|
||||
module.directive('boxplotRenderer', function() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
templateUrl: '/views/visualizations/boxplot.html',
|
||||
link: function($scope, elm, attrs) {
|
||||
|
||||
function iqr(k) {
|
||||
return function(d, i) {
|
||||
var q1 = d.quartiles[0],
|
||||
q3 = d.quartiles[2],
|
||||
iqr = (q3 - q1) * k,
|
||||
i = -1,
|
||||
j = d.length;
|
||||
while (d[++i] < q1 - iqr);
|
||||
while (d[--j] > q3 + iqr);
|
||||
return [i, j];
|
||||
};
|
||||
};
|
||||
|
||||
$scope.$watch('[queryResult && queryResult.getData(), visualization.options]', function () {
|
||||
|
||||
var data = $scope.queryResult.getData();
|
||||
var parentWidth = d3.select(elm[0].parentNode).node().getBoundingClientRect().width;
|
||||
var margin = {top: 10, right: 50, bottom: 40, left: 50, inner: 25},
|
||||
width = parentWidth - margin.right - margin.left
|
||||
height = 500 - margin.top - margin.bottom;
|
||||
|
||||
var min = Infinity,
|
||||
max = -Infinity;
|
||||
var mydata = [];
|
||||
var value = 0;
|
||||
var d = [];
|
||||
var xAxisLabel = $scope.visualization.options.xAxisLabel;
|
||||
var yAxisLabel = $scope.visualization.options.yAxisLabel;
|
||||
|
||||
var columns = $scope.queryResult.columnNames;
|
||||
var xscale = d3.scale.ordinal()
|
||||
.domain(columns)
|
||||
.rangeBands([0, parentWidth-margin.left-margin.right]);
|
||||
|
||||
if (columns.length > 1){
|
||||
boxWidth = Math.min(xscale(columns[1]),120.0);
|
||||
} else {
|
||||
boxWidth=120.0;
|
||||
};
|
||||
margin.inner = boxWidth/3.0;
|
||||
|
||||
_.each(columns, function(column, i){
|
||||
d = mydata[i] = [];
|
||||
_.each(data, function (row) {
|
||||
value = row[column];
|
||||
d.push(value);
|
||||
if (value > max) max = Math.ceil(value);
|
||||
if (value < min) min = Math.floor(value);
|
||||
});
|
||||
});
|
||||
|
||||
var yscale = d3.scale.linear()
|
||||
.domain([min*0.99,max*1.01])
|
||||
.range([height, 0]);
|
||||
|
||||
var chart = d3.box()
|
||||
.whiskers(iqr(1.5))
|
||||
.width(boxWidth-2*margin.inner)
|
||||
.height(height)
|
||||
.domain([min*0.99,max*1.01]);
|
||||
var xAxis = d3.svg.axis()
|
||||
.scale(xscale)
|
||||
.orient("bottom");
|
||||
|
||||
|
||||
var yAxis = d3.svg.axis()
|
||||
.scale(yscale)
|
||||
.orient("left");
|
||||
|
||||
var xLines = d3.svg.axis()
|
||||
.scale(xscale)
|
||||
.tickSize(height)
|
||||
.orient("bottom");
|
||||
|
||||
var yLines = d3.svg.axis()
|
||||
.scale(yscale)
|
||||
.tickSize(width)
|
||||
.orient("right");
|
||||
|
||||
var barOffset = function(i){
|
||||
return xscale(columns[i]) + (xscale(columns[1]) - margin.inner)/2.0;
|
||||
};
|
||||
|
||||
d3.select(elm[0]).selectAll("svg").remove();
|
||||
|
||||
var plot = d3.select(elm[0])
|
||||
.append("svg")
|
||||
.attr("width",parentWidth)
|
||||
.attr("height",height + margin.bottom + margin.top)
|
||||
.append("g")
|
||||
.attr("width",parentWidth-margin.left-margin.right)
|
||||
.attr("transform", "translate(" + margin.left + "," + margin.top + ")")
|
||||
|
||||
d3.select("svg").append("text")
|
||||
.attr("class", "box")
|
||||
.attr("x", parentWidth/2.0)
|
||||
.attr("text-anchor", "middle")
|
||||
.attr("y", height+margin.bottom)
|
||||
.text(xAxisLabel)
|
||||
|
||||
d3.select("svg").append("text")
|
||||
.attr("class", "box")
|
||||
.attr("transform","translate(10,"+(height+margin.top+margin.bottom)/2.0+")rotate(-90)")
|
||||
.attr("text-anchor", "middle")
|
||||
.text(yAxisLabel)
|
||||
|
||||
plot.append("rect")
|
||||
.attr("class", "grid-background")
|
||||
.attr("width", width)
|
||||
.attr("height", height);
|
||||
|
||||
plot.append("g")
|
||||
.attr("class","grid")
|
||||
.call(yLines)
|
||||
|
||||
plot.append("g")
|
||||
.attr("class","grid")
|
||||
.call(xLines)
|
||||
|
||||
plot.append("g")
|
||||
.attr("class", "x axis")
|
||||
.attr("transform", "translate(0," + height + ")")
|
||||
.call(xAxis);
|
||||
|
||||
plot.append("g")
|
||||
.attr("class", "y axis")
|
||||
.call(yAxis);
|
||||
|
||||
plot.selectAll(".box").data(mydata)
|
||||
.enter().append("g")
|
||||
.attr("class", "box")
|
||||
.attr("width", boxWidth)
|
||||
.attr("height", height)
|
||||
.attr("transform", function(d,i) { return "translate(" + barOffset(i) + "," + 0 + ")"; } )
|
||||
.call(chart);
|
||||
}, true);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
module.directive('boxplotEditor', function() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
templateUrl: '/views/visualizations/boxplot_editor.html'
|
||||
};
|
||||
});
|
||||
|
||||
})();
|
||||
@@ -3,12 +3,16 @@
|
||||
|
||||
chartVisualization.config(['VisualizationProvider', function (VisualizationProvider) {
|
||||
var renderTemplate = '<chart-renderer options="visualization.options" query-result="queryResult"></chart-renderer>';
|
||||
var editTemplate = '<chart-editor></chart-editor>';
|
||||
var editTemplate = '<chart-editor options="visualization.options" query-result="queryResult"></chart-editor>';
|
||||
|
||||
var defaultOptions = {
|
||||
'series': {
|
||||
// 'type': 'column',
|
||||
'stacking': null
|
||||
}
|
||||
globalSeriesType: 'column',
|
||||
sortX: true,
|
||||
yAxis: [{type: 'linear'}, {type: 'linear', opposite: true}],
|
||||
xAxis: {type: 'datetime', labels: {enabled: true}},
|
||||
series: {stacking: null},
|
||||
seriesOptions: {},
|
||||
columnMapping: {}
|
||||
};
|
||||
|
||||
VisualizationProvider.registerVisualization({
|
||||
@@ -27,30 +31,62 @@
|
||||
queryResult: '=',
|
||||
options: '=?'
|
||||
},
|
||||
template: "<chart options='chartOptions' series='chartSeries' class='graph'></chart>",
|
||||
templateUrl: '/views/visualizations/chart.html',
|
||||
replace: false,
|
||||
controller: ['$scope', function ($scope) {
|
||||
$scope.chartSeries = [];
|
||||
$scope.chartOptions = {};
|
||||
$scope.dateRangeEnabled = function() {
|
||||
return $scope.options.xAxis && $scope.options.xAxis.type === 'datetime';
|
||||
}
|
||||
$scope.dateRange = { min: moment('1970-01-01'), max: moment() };
|
||||
|
||||
var reloadData = function(data) {
|
||||
/**
|
||||
* Update date range by finding date extremes
|
||||
*
|
||||
* ISSUE: chart.getExtreme() does not support getting Moment object out of box
|
||||
* TODO: Find a faster way to do this
|
||||
*/
|
||||
var setDateRangeToExtreme = function (allSeries) {
|
||||
if ($scope.dateRangeEnabled() && allSeries && allSeries.length > 0) {
|
||||
$scope.dateRange = {
|
||||
min: moment.min.apply(undefined, _.map(allSeries, function (series) {
|
||||
return moment.min(_.pluck(series.data, 'x'));
|
||||
})),
|
||||
max: moment.max.apply(undefined, _.map(allSeries, function (series) {
|
||||
return moment.max(_.pluck(series.data, 'x'));
|
||||
}))
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
var reloadData = function(data, options) {
|
||||
options = options || {};
|
||||
if (!data || ($scope.queryResult && $scope.queryResult.getData()) == null) {
|
||||
$scope.chartSeries.splice(0, $scope.chartSeries.length);
|
||||
} else {
|
||||
$scope.chartSeries.splice(0, $scope.chartSeries.length);
|
||||
var allSeries = $scope.queryResult.getChartData($scope.options.columnMapping);
|
||||
if (!options.preventSetExtreme) {
|
||||
setDateRangeToExtreme(allSeries);
|
||||
}
|
||||
var allSeries = $scope.queryResult.getChartData(
|
||||
$scope.options.columnMapping,
|
||||
$scope.dateRangeEnabled() ? $scope.dateRange : null
|
||||
);
|
||||
|
||||
_.each($scope.queryResult.getChartData($scope.options.columnMapping), function (s) {
|
||||
_.each(allSeries, function (series) {
|
||||
var additional = {'stacking': 'normal'};
|
||||
if ('globalSeriesType' in $scope.options) {
|
||||
additional['type'] = $scope.options.globalSeriesType;
|
||||
}
|
||||
if ($scope.options.seriesOptions && $scope.options.seriesOptions[s.name]) {
|
||||
additional = $scope.options.seriesOptions[s.name];
|
||||
if ($scope.options.seriesOptions && $scope.options.seriesOptions[series.name]) {
|
||||
additional = $scope.options.seriesOptions[series.name];
|
||||
if (!additional.name || additional.name == "") {
|
||||
additional.name = s.name;
|
||||
additional.name = series.name;
|
||||
}
|
||||
}
|
||||
$scope.chartSeries.push(_.extend(s, additional));
|
||||
$scope.chartSeries.push(_.extend(series, additional));
|
||||
});
|
||||
};
|
||||
};
|
||||
@@ -73,6 +109,22 @@
|
||||
$scope.$watch('queryResult && queryResult.getData()', function (data) {
|
||||
reloadData(data);
|
||||
});
|
||||
|
||||
$scope.$watch('dateRange.min', function(minDateRange, oldMinDateRange) {
|
||||
if (!minDateRange.isSame(oldMinDateRange)) {
|
||||
reloadData(true, {
|
||||
preventSetExtreme: true
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
$scope.$watch('dateRange.max', function (maxDateRange, oldMaxDateRange) {
|
||||
if (!maxDateRange.isSame(oldMaxDateRange)) {
|
||||
reloadData(true, {
|
||||
preventSetExtreme: true
|
||||
});
|
||||
}
|
||||
});
|
||||
}]
|
||||
};
|
||||
});
|
||||
@@ -81,178 +133,135 @@
|
||||
return {
|
||||
restrict: 'E',
|
||||
templateUrl: '/views/visualizations/chart_editor.html',
|
||||
scope: {
|
||||
queryResult: '=',
|
||||
options: '=?'
|
||||
},
|
||||
link: function (scope, element, attrs) {
|
||||
scope.palette = ColorPalette;
|
||||
|
||||
scope.seriesTypes = {
|
||||
'Line': 'line',
|
||||
'Column': 'column',
|
||||
'Area': 'area',
|
||||
'Scatter': 'scatter',
|
||||
'Pie': 'pie'
|
||||
};
|
||||
|
||||
scope.globalSeriesType = scope.visualization.options.globalSeriesType || 'column';
|
||||
scope.colors = _.extend({'Automatic': null}, ColorPalette);
|
||||
|
||||
scope.stackingOptions = {
|
||||
"None": "none",
|
||||
"Normal": "normal",
|
||||
"Percent": "percent"
|
||||
'Disabled': null,
|
||||
'Enabled': 'normal',
|
||||
'Percent': 'percent'
|
||||
};
|
||||
|
||||
scope.xAxisOptions = {
|
||||
"Date/Time": "datetime",
|
||||
"Linear": "linear",
|
||||
"Category": "category"
|
||||
scope.chartTypes = {
|
||||
'line': {name: 'Line', icon: 'line-chart'},
|
||||
'column': {name: 'Bar', icon: 'bar-chart'},
|
||||
'area': {name: 'Area', icon: 'area-chart'},
|
||||
'pie': {name: 'Pie', icon: 'pie-chart'},
|
||||
'scatter': {name: 'Scatter', icon: 'circle-o'}
|
||||
};
|
||||
|
||||
scope.xAxisType = "datetime";
|
||||
scope.stacking = "none";
|
||||
scope.chartTypeChanged = function() {
|
||||
_.each(scope.options.seriesOptions, function(options) {
|
||||
options.type = scope.options.globalSeriesType;
|
||||
});
|
||||
}
|
||||
|
||||
scope.xAxisScales = ['datetime', 'linear', 'logarithmic', 'category'];
|
||||
scope.yAxisScales = ['linear', 'logarithmic'];
|
||||
|
||||
scope.columnTypes = {
|
||||
"X": "x",
|
||||
"Y": "y",
|
||||
"Series": "series",
|
||||
"Unused": "unused"
|
||||
};
|
||||
|
||||
scope.series = [];
|
||||
|
||||
scope.columnTypeSelection = {};
|
||||
|
||||
var chartOptionsUnwatch = null,
|
||||
columnsWatch = null;
|
||||
|
||||
scope.$watch('globalSeriesType', function(type, old) {
|
||||
scope.visualization.options.globalSeriesType = type;
|
||||
|
||||
if (type && old && type !== old && scope.visualization.options.seriesOptions) {
|
||||
_.each(scope.visualization.options.seriesOptions, function(sOptions) {
|
||||
sOptions.type = type;
|
||||
var refreshColumns = function() {
|
||||
scope.columns = scope.queryResult.getColumns();
|
||||
scope.columnNames = _.pluck(scope.columns, 'name');
|
||||
if (scope.columnNames.length > 0)
|
||||
_.each(_.difference(_.keys(scope.options.columnMapping), scope.columnNames), function(column) {
|
||||
delete scope.options.columnMapping[column];
|
||||
});
|
||||
};
|
||||
refreshColumns();
|
||||
|
||||
var refreshColumnsAndForm = function() {
|
||||
refreshColumns();
|
||||
if (!scope.queryResult.getData() || scope.queryResult.getData().length == 0 || scope.columns.length == 0)
|
||||
return;
|
||||
scope.form.yAxisColumns = _.intersection(scope.form.yAxisColumns, scope.columnNames);
|
||||
if (!_.contains(scope.columnNames, scope.form.xAxisColumn))
|
||||
scope.form.xAxisColumn = undefined;
|
||||
if (!_.contains(scope.columnNames, scope.form.groupby))
|
||||
scope.form.groupby = undefined;
|
||||
}
|
||||
|
||||
var refreshSeries = function() {
|
||||
var seriesNames = _.pluck(scope.queryResult.getChartData(scope.options.columnMapping), 'name');
|
||||
var existing = _.keys(scope.options.seriesOptions);
|
||||
_.each(_.difference(seriesNames, existing), function(name) {
|
||||
scope.options.seriesOptions[name] = {
|
||||
'type': scope.options.globalSeriesType,
|
||||
'yAxis': 0,
|
||||
};
|
||||
scope.form.seriesList.push(name);
|
||||
});
|
||||
_.each(_.difference(existing, seriesNames), function(name) {
|
||||
scope.form.seriesList = _.without(scope.form.seriesList, name)
|
||||
delete scope.options.seriesOptions[name];
|
||||
});
|
||||
};
|
||||
|
||||
scope.$watch('options.columnMapping', refreshSeries, true);
|
||||
|
||||
scope.$watch(function() {return [scope.queryResult.getId(), scope.queryResult.status]}, function(changed) {
|
||||
if (!changed[0]) {
|
||||
return;
|
||||
}
|
||||
refreshColumnsAndForm();
|
||||
refreshSeries();
|
||||
}, true);
|
||||
|
||||
scope.form = {
|
||||
yAxisColumns: [],
|
||||
seriesList: _.sortBy(_.keys(scope.options.seriesOptions), function(name) {
|
||||
return scope.options.seriesOptions[name].zIndex;
|
||||
})
|
||||
};
|
||||
|
||||
scope.$watchCollection('form.seriesList', function(value, old) {
|
||||
_.each(value, function(name, index) {
|
||||
scope.options.seriesOptions[name].zIndex = index;
|
||||
scope.options.seriesOptions[name].index = 0; // is this needed?
|
||||
});
|
||||
});
|
||||
|
||||
var setColumnRole = function(role, column) {
|
||||
scope.options.columnMapping[column] = role;
|
||||
}
|
||||
var unsetColumn = function(column) {
|
||||
setColumnRole('unused', column);
|
||||
}
|
||||
|
||||
scope.$watchCollection('form.yAxisColumns', function(value, old) {
|
||||
_.each(old, unsetColumn);
|
||||
_.each(value, _.partial(setColumnRole, 'y'));
|
||||
});
|
||||
|
||||
scope.$watch('form.xAxisColumn', function(value, old) {
|
||||
if (old !== undefined)
|
||||
unsetColumn(old);
|
||||
if (value !== undefined)
|
||||
setColumnRole('x', value);
|
||||
});
|
||||
|
||||
scope.$watch('form.groupby', function(value, old) {
|
||||
if (old !== undefined)
|
||||
unsetColumn(old)
|
||||
if (value !== undefined) {
|
||||
setColumnRole('series', value);
|
||||
}
|
||||
});
|
||||
|
||||
scope.$watch('visualization.type', function (visualizationType) {
|
||||
if (visualizationType == 'CHART') {
|
||||
if (scope.visualization.options.series.stacking === null) {
|
||||
scope.stacking = "none";
|
||||
} else if (scope.visualization.options.series.stacking === undefined) {
|
||||
scope.stacking = "normal";
|
||||
} else {
|
||||
scope.stacking = scope.visualization.options.series.stacking;
|
||||
}
|
||||
|
||||
if (scope.visualization.options.sortX === undefined) {
|
||||
scope.visualization.options.sortX = true;
|
||||
}
|
||||
|
||||
var refreshSeries = function() {
|
||||
scope.series = _.map(scope.queryResult.getChartData(scope.visualization.options.columnMapping), function (s) { return s.name; });
|
||||
|
||||
// TODO: remove uneeded ones?
|
||||
if (scope.visualization.options.seriesOptions == undefined) {
|
||||
scope.visualization.options.seriesOptions = {
|
||||
type: scope.globalSeriesType
|
||||
};
|
||||
};
|
||||
|
||||
_.each(scope.series, function(s, i) {
|
||||
if (scope.visualization.options.seriesOptions[s] == undefined) {
|
||||
scope.visualization.options.seriesOptions[s] = {'type': scope.visualization.options.globalSeriesType, 'yAxis': 0};
|
||||
}
|
||||
scope.visualization.options.seriesOptions[s].zIndex = scope.visualization.options.seriesOptions[s].zIndex === undefined ? i : scope.visualization.options.seriesOptions[s].zIndex;
|
||||
scope.visualization.options.seriesOptions[s].index = scope.visualization.options.seriesOptions[s].index === undefined ? i : scope.visualization.options.seriesOptions[s].index;
|
||||
});
|
||||
scope.zIndexes = _.range(scope.series.length);
|
||||
scope.yAxes = [[0, 'left'], [1, 'right']];
|
||||
};
|
||||
|
||||
var initColumnMapping = function() {
|
||||
scope.columns = scope.queryResult.getColumns();
|
||||
|
||||
if (scope.visualization.options.columnMapping == undefined) {
|
||||
scope.visualization.options.columnMapping = {};
|
||||
}
|
||||
|
||||
scope.columnTypeSelection = scope.visualization.options.columnMapping;
|
||||
|
||||
_.each(scope.columns, function(column) {
|
||||
var definition = column.name.split("::"),
|
||||
definedColumns = _.keys(scope.visualization.options.columnMapping);
|
||||
|
||||
if (_.indexOf(definedColumns, column.name) != -1) {
|
||||
// Skip already defined columns.
|
||||
return;
|
||||
};
|
||||
|
||||
if (definition.length == 1) {
|
||||
scope.columnTypeSelection[column.name] = scope.visualization.options.columnMapping[column.name] = 'unused';
|
||||
} else if (definition == 'multi-filter') {
|
||||
scope.columnTypeSelection[column.name] = scope.visualization.options.columnMapping[column.name] = 'series';
|
||||
} else if (_.indexOf(_.values(scope.columnTypes), definition[1]) != -1) {
|
||||
scope.columnTypeSelection[column.name] = scope.visualization.options.columnMapping[column.name] = definition[1];
|
||||
} else {
|
||||
scope.columnTypeSelection[column.name] = scope.visualization.options.columnMapping[column.name] = 'unused';
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
columnsWatch = scope.$watch('queryResult.getId()', function(id) {
|
||||
if (!id) {
|
||||
return;
|
||||
}
|
||||
|
||||
initColumnMapping();
|
||||
refreshSeries();
|
||||
});
|
||||
|
||||
scope.$watchCollection('columnTypeSelection', function(selections) {
|
||||
_.each(scope.columnTypeSelection, function(type, name) {
|
||||
scope.visualization.options.columnMapping[name] = type;
|
||||
});
|
||||
|
||||
refreshSeries();
|
||||
});
|
||||
|
||||
chartOptionsUnwatch = scope.$watch("stacking", function (stacking) {
|
||||
if (stacking == "none") {
|
||||
scope.visualization.options.series.stacking = null;
|
||||
} else {
|
||||
scope.visualization.options.series.stacking = stacking;
|
||||
}
|
||||
});
|
||||
|
||||
scope.visualization.options.xAxis = scope.visualization.options.xAxis || {};
|
||||
scope.visualization.options.xAxis.labels = scope.visualization.options.xAxis.labels || {};
|
||||
if (scope.visualization.options.xAxis.labels.enabled === undefined) {
|
||||
scope.visualization.options.xAxis.labels.enabled = true;
|
||||
}
|
||||
|
||||
scope.xAxisType = (scope.visualization.options.xAxis && scope.visualization.options.xAxis.type) || scope.xAxisType;
|
||||
|
||||
xAxisUnwatch = scope.$watch("xAxisType", function (xAxisType) {
|
||||
scope.visualization.options.xAxis = scope.visualization.options.xAxis || {};
|
||||
scope.visualization.options.xAxis.type = xAxisType;
|
||||
});
|
||||
} else {
|
||||
if (chartOptionsUnwatch) {
|
||||
chartOptionsUnwatch();
|
||||
chartOptionsUnwatch = null;
|
||||
}
|
||||
|
||||
if (columnsWatch) {
|
||||
columnWatch();
|
||||
columnWatch = null;
|
||||
}
|
||||
|
||||
if (xAxisUnwatch) {
|
||||
xAxisUnwatch();
|
||||
xAxisUnwatch = null;
|
||||
}
|
||||
}
|
||||
});
|
||||
if (scope.columnNames)
|
||||
_.each(scope.options.columnMapping, function(value, key) {
|
||||
if (scope.columnNames.length > 0 && !_.contains(scope.columnNames, key))
|
||||
return;
|
||||
if (value == 'x')
|
||||
scope.form.xAxisColumn = key;
|
||||
else if (value == 'y')
|
||||
scope.form.yAxisColumns.push(key);
|
||||
else if (value == 'series')
|
||||
scope.form.groupby = key;
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,67 +1,87 @@
|
||||
(function () {
|
||||
var cohortVisualization = angular.module('redash.visualization');
|
||||
var cohortVisualization = angular.module('redash.visualization');
|
||||
|
||||
cohortVisualization.config(['VisualizationProvider', function(VisualizationProvider) {
|
||||
VisualizationProvider.registerVisualization({
|
||||
type: 'COHORT',
|
||||
name: 'Cohort',
|
||||
renderTemplate: '<cohort-renderer options="visualization.options" query-result="queryResult"></cohort-renderer>'
|
||||
});
|
||||
}]);
|
||||
cohortVisualization.config(['VisualizationProvider', function (VisualizationProvider) {
|
||||
|
||||
cohortVisualization.directive('cohortRenderer', function() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
queryResult: '='
|
||||
},
|
||||
template: "",
|
||||
replace: false,
|
||||
link: function($scope, element, attrs) {
|
||||
$scope.$watch('queryResult && queryResult.getData()', function (data) {
|
||||
if (!data) {
|
||||
return;
|
||||
}
|
||||
var editTemplate = '<cohort-editor></cohort-editor>';
|
||||
var defaultOptions = {
|
||||
'timeInterval': 'daily'
|
||||
};
|
||||
|
||||
if ($scope.queryResult.getData() == null) {
|
||||
|
||||
} else {
|
||||
var sortedData = _.sortBy($scope.queryResult.getData(),function(r) {
|
||||
return r['date'] + r['day_number'] ;
|
||||
});
|
||||
|
||||
var grouped = _.groupBy(sortedData, "date");
|
||||
var maxColumns = _.reduce(grouped, function(memo, data){
|
||||
return (data.length > memo)? data.length : memo;
|
||||
}, 0);
|
||||
var data = _.map(grouped, function(values, date) {
|
||||
var row = [values[0].total];
|
||||
_.each(values, function(value) { row.push(value.value); });
|
||||
_.each(_.range(values.length, maxColumns), function() { row.push(null); });
|
||||
return row;
|
||||
});
|
||||
|
||||
var initialDate = moment(sortedData[0].date).toDate(),
|
||||
container = angular.element(element)[0];
|
||||
|
||||
Cornelius.draw({
|
||||
initialDate: initialDate,
|
||||
container: container,
|
||||
cohort: data,
|
||||
title: null,
|
||||
timeInterval: 'daily',
|
||||
labels: {
|
||||
time: 'Activation Day',
|
||||
people: 'Users'
|
||||
},
|
||||
formatHeaderLabel: function (i) {
|
||||
return "Day " + (i - 1);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
VisualizationProvider.registerVisualization({
|
||||
type: 'COHORT',
|
||||
name: 'Cohort',
|
||||
renderTemplate: '<cohort-renderer options="visualization.options" query-result="queryResult"></cohort-renderer>',
|
||||
editorTemplate: editTemplate,
|
||||
defaultOptions: defaultOptions
|
||||
});
|
||||
}]);
|
||||
|
||||
}());
|
||||
cohortVisualization.directive('cohortRenderer', function () {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
queryResult: '=',
|
||||
options: '='
|
||||
},
|
||||
template: "",
|
||||
replace: false,
|
||||
link: function ($scope, element, attrs) {
|
||||
$scope.options.timeInterval = $scope.options.timeInterval || 'daily';
|
||||
|
||||
var updateCohort = function () {
|
||||
if ($scope.queryResult.getData() === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
var sortedData = _.sortBy($scope.queryResult.getData(), function (r) {
|
||||
return r['date'] + r['day_number'];
|
||||
});
|
||||
|
||||
var grouped = _.groupBy(sortedData, "date");
|
||||
|
||||
var maxColumns = _.reduce(grouped, function (memo, data) {
|
||||
return (data.length > memo) ? data.length : memo;
|
||||
}, 0);
|
||||
|
||||
var data = _.map(grouped, function (values, date) {
|
||||
var row = [values[0].total];
|
||||
_.each(values, function (value) {
|
||||
row.push(value.value);
|
||||
});
|
||||
_.each(_.range(values.length, maxColumns), function () {
|
||||
row.push(null);
|
||||
});
|
||||
return row;
|
||||
});
|
||||
|
||||
var initialDate = moment(sortedData[0].date).toDate(),
|
||||
container = angular.element(element)[0];
|
||||
|
||||
Cornelius.draw({
|
||||
initialDate: initialDate,
|
||||
container: container,
|
||||
cohort: data,
|
||||
title: null,
|
||||
timeInterval: $scope.options.timeInterval,
|
||||
labels: {
|
||||
time: 'Time',
|
||||
people: 'Users',
|
||||
weekOf: 'Week of'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
$scope.$watch('queryResult && queryResult.getData()', updateCohort);
|
||||
$scope.$watch('options.timeInterval', updateCohort);
|
||||
}
|
||||
}
|
||||
});
|
||||
cohortVisualization.directive('cohortEditor', function () {
|
||||
return {
|
||||
restrict: 'E',
|
||||
templateUrl: '/views/visualizations/cohort_editor.html'
|
||||
}
|
||||
});
|
||||
|
||||
}());
|
||||
|
||||
43
rd_ui/app/scripts/visualizations/date_range_selector.js
Normal file
43
rd_ui/app/scripts/visualizations/date_range_selector.js
Normal file
@@ -0,0 +1,43 @@
|
||||
(function (window) {
|
||||
var module = angular.module('redash.visualization');
|
||||
|
||||
module.directive('dateRangeSelector', [function () {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
dateRange: "="
|
||||
},
|
||||
templateUrl: '/views/visualizations/date_range_selector.html',
|
||||
replace: true,
|
||||
controller: ['$scope', function ($scope) {
|
||||
$scope.dateRangeHuman = {
|
||||
min: null,
|
||||
max: null
|
||||
};
|
||||
|
||||
$scope.$watch('dateRange', function (dateRange, oldDateRange, scope) {
|
||||
scope.dateRangeHuman.min = dateRange.min.format('YYYY-MM-DD');
|
||||
scope.dateRangeHuman.max = dateRange.max.format('YYYY-MM-DD');
|
||||
});
|
||||
|
||||
$scope.$watch('dateRangeHuman', function (dateRangeHuman, oldDateRangeHuman, scope) {
|
||||
var newDateRangeMin = moment.utc(dateRangeHuman.min);
|
||||
var newDateRangeMax = moment.utc(dateRangeHuman.max);
|
||||
if (!newDateRangeMin ||
|
||||
!newDateRangeMax ||
|
||||
!newDateRangeMin.isValid() ||
|
||||
!newDateRangeMax.isValid() ||
|
||||
newDateRangeMin.isAfter(newDateRangeMax)) {
|
||||
// Prevent invalid date input
|
||||
// No need to show up a notification to user here, it will be too noisy.
|
||||
// Instead, simply preventing changes to the scope silently.
|
||||
scope.dateRangeHuman = oldDateRangeHuman;
|
||||
return;
|
||||
}
|
||||
scope.dateRange.min = newDateRangeMin;
|
||||
scope.dateRange.max = newDateRangeMax;
|
||||
}, true);
|
||||
}]
|
||||
}
|
||||
}]);
|
||||
})(window);
|
||||
@@ -79,14 +79,14 @@
|
||||
} else if (columnType === 'date') {
|
||||
columnDefinition.formatFunction = function (value) {
|
||||
if (value && moment.isMoment(value)) {
|
||||
return value.toDate().toLocaleDateString();
|
||||
return value.format(clientConfig.dateFormat);
|
||||
}
|
||||
return value;
|
||||
};
|
||||
} else if (columnType === 'datetime') {
|
||||
columnDefinition.formatFunction = function (value) {
|
||||
if (value && moment.isMoment(value)) {
|
||||
return value.toDate().toLocaleString();
|
||||
return value.format(clientConfig.dateTimeFormat);
|
||||
}
|
||||
return value;
|
||||
};
|
||||
|
||||
@@ -356,10 +356,56 @@ counter-renderer counter-name {
|
||||
display: block;
|
||||
}
|
||||
|
||||
.box {
|
||||
font: 10px sans-serif;
|
||||
}
|
||||
|
||||
.box line,
|
||||
.box rect,
|
||||
.box circle {
|
||||
fill: #fff;
|
||||
stroke: #000;
|
||||
stroke-width: 1.5px;
|
||||
}
|
||||
|
||||
.box .center {
|
||||
stroke-dasharray: 3,3;
|
||||
}
|
||||
|
||||
.box .outlier {
|
||||
fill: none;
|
||||
stroke: #000;
|
||||
}
|
||||
.axis text {
|
||||
font: 10px sans-serif;
|
||||
}
|
||||
|
||||
.axis path,
|
||||
.axis line {
|
||||
fill: none;
|
||||
stroke: #000;
|
||||
shape-rendering: crispEdges;
|
||||
}
|
||||
|
||||
.grid-background {
|
||||
fill: #ddd;
|
||||
}
|
||||
.grid path,
|
||||
.grid line {
|
||||
fill: none;
|
||||
stroke: #fff;
|
||||
shape-rendering: crispEdges;
|
||||
}
|
||||
.grid .minor line {
|
||||
stroke-opacity: .5;
|
||||
}
|
||||
.grid text {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.rd-widget-textbox p {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.iframe-container {
|
||||
height: 100%;
|
||||
}
|
||||
@@ -386,16 +432,54 @@ div.table-name {
|
||||
padding: 30px;
|
||||
}
|
||||
|
||||
/*
|
||||
bootstrap's hidden-xs class adds display:block when not hidden
|
||||
use this class when you need to keep the original display value
|
||||
*/
|
||||
@media (max-width: 767px) {
|
||||
.rd-hidden-xs {
|
||||
display: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
.log-container {
|
||||
margin-bottom: 50px;
|
||||
}
|
||||
|
||||
/* Footer */
|
||||
|
||||
.footer {
|
||||
color: #818d9f;
|
||||
padding-bottom: 30px;
|
||||
}
|
||||
|
||||
.footer a {
|
||||
color: #818d9f;
|
||||
margin-left: 20px;
|
||||
}
|
||||
|
||||
.col-table .missing-value {
|
||||
color: #b94a48;
|
||||
}
|
||||
|
||||
.col-table .super-small-input {
|
||||
padding-left: 3px;
|
||||
height: 24px;
|
||||
}
|
||||
|
||||
.col-table .ui-select-toggle, .col-table .ui-select-search {
|
||||
padding: 2px;
|
||||
padding-left: 5px;
|
||||
height: 24px;
|
||||
}
|
||||
|
||||
.clearable button {
|
||||
border-top-right-radius: 0;
|
||||
border-bottom-right-radius: 0;
|
||||
}
|
||||
|
||||
/* Immediately apply ng-cloak, instead of waiting for angular.js to load: */
|
||||
[ng\:cloak], [ng-cloak], [data-ng-cloak], [x-ng-cloak], .ng-cloak, .x-ng-cloak {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
/* Smart Table */
|
||||
|
||||
.smart-table {
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
.smart-table .pagination {
|
||||
margin-bottom: 5px;
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
@@ -58,7 +58,7 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="panel panel-default rd-widget-textbox" ng-if="type=='textbox'" ng-mouseenter="showControls = true" ng-mouseleave="showControls = false">
|
||||
<div class="panel panel-default rd-widget-textbox" ng-hide="widget.width == 0" ng-if="type=='textbox'" ng-mouseenter="showControls = true" ng-mouseleave="showControls = false">
|
||||
<div class="panel-body">
|
||||
<div class="row">
|
||||
<div class="col-lg-11">
|
||||
|
||||
@@ -10,9 +10,9 @@
|
||||
<div class="form-group" ng-class='{"has-error": !inner.input.$valid}' ng-form="inner" ng-repeat="(name, input) in type.configuration_schema.properties">
|
||||
<label>{{input.title || name | capitalize}}</label>
|
||||
<input name="input" type="{{input.type}}" class="form-control" ng-model="dataSource.options[name]" ng-required="input.required"
|
||||
ng-if="input.type !== 'file'" accesskey="tab">
|
||||
ng-if="input.type !== 'file'" accesskey="tab" placeholder="{{input.default}}">
|
||||
|
||||
<input name="input" type="file" class="form-control" ng-model="files[name]" ng-required="input.required"
|
||||
<input name="input" type="file" class="form-control" ng-model="files[name]" ng-required="input.required && !dataSource.options[name]"
|
||||
base-sixty-four-input
|
||||
ng-if="input.type === 'file'">
|
||||
</div>
|
||||
|
||||
@@ -17,4 +17,5 @@
|
||||
<a href="/admin/status" class="list-group-item">Status</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
</div>
|
||||
|
||||
<div class="col-lg-2">
|
||||
<div class="rd-hidden-xs pull-right">
|
||||
<div class="pull-right">
|
||||
<query-source-link></query-source-link>
|
||||
</div>
|
||||
</div>
|
||||
@@ -63,12 +63,12 @@
|
||||
<div ng-class="editorSize">
|
||||
<div>
|
||||
<p>
|
||||
<button type="button" class="btn btn-primary btn-xs" ng-disabled="queryExecuting" ng-click="executeQuery()">
|
||||
<button type="button" class="btn btn-primary btn-xs" ng-disabled="queryExecuting" ng-click="executeQuery()" ng-if="canExecuteQuery">
|
||||
<span class="glyphicon glyphicon-play"></span> Execute
|
||||
</button>
|
||||
<query-formatter></query-formatter>
|
||||
<span class="pull-right">
|
||||
<button class="btn btn-xs btn-default rd-hidden-xs" ng-click="duplicateQuery()">
|
||||
<button class="btn btn-xs btn-default" ng-click="duplicateQuery()">
|
||||
<span class="glyphicon glyphicon-share-alt"></span> Fork
|
||||
</button>
|
||||
|
||||
@@ -103,7 +103,7 @@
|
||||
</div>
|
||||
<hr ng-if="sourceMode">
|
||||
<div class="row">
|
||||
<div class="col-lg-3 rd-hidden-xs">
|
||||
<div class="col-lg-3">
|
||||
<p>
|
||||
<span class="glyphicon glyphicon-user"></span>
|
||||
<span class="text-muted">Created By </span>
|
||||
@@ -148,7 +148,7 @@
|
||||
<p>
|
||||
<a class="btn btn-primary btn-sm" ng-disabled="queryExecuting || !queryResult.getData()" query-result-link target="_self">
|
||||
<span class="glyphicon glyphicon-cloud-download"></span>
|
||||
<span class="rd-hidden-xs">Download Dataset</span>
|
||||
<span>Download Dataset</span>
|
||||
</a>
|
||||
|
||||
<a class="btn btn-warning btn-sm" ng-disabled="queryExecuting" data-toggle="modal" data-target="#archive-confirmation-modal"
|
||||
@@ -213,7 +213,7 @@
|
||||
<span class="remove" ng-click="deleteVisualization($event, vis)" ng-show="canEdit"> ×</span>
|
||||
</rd-tab>
|
||||
<rd-tab tab-id="add" name="+ New Visualization" removeable="true" ng-show="canEdit"></rd-tab>
|
||||
<li ng-if="!sourceMode" class="rd-tab-btn"><button class="btn btn-sm btn-default" ng-click="executeQuery()" ng-disabled="queryExecuting" title="Refresh Dataset"><span class="glyphicon glyphicon-refresh"></span></button></li>
|
||||
<li ng-if="!sourceMode && canExecuteQuery" class="rd-tab-btn"><button class="btn btn-sm btn-default" ng-click="executeQuery()" ng-disabled="queryExecuting" title="Refresh Dataset"><span class="glyphicon glyphicon-refresh"></span></button></li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
<span class="help-block error" ng-if="userForm.passwordRepeat.$error.compareTo">Passwords don't match.</span>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<button class="btn btn-primary">Save</button>
|
||||
<button class="btn btn-primary">Create</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
2
rd_ui/app/views/visualizations/boxplot.html
Normal file
2
rd_ui/app/views/visualizations/boxplot.html
Normal file
@@ -0,0 +1,2 @@
|
||||
<boxplot>
|
||||
</boxplot>
|
||||
15
rd_ui/app/views/visualizations/boxplot_editor.html
Normal file
15
rd_ui/app/views/visualizations/boxplot_editor.html
Normal file
@@ -0,0 +1,15 @@
|
||||
<div class="form-horizontal">
|
||||
<div class="form-group">
|
||||
<label class="col-lg-6">X Axis Label</label>
|
||||
<div class="col-lg-6">
|
||||
<input type="text" ng-model="visualization.options.xAxisLabel" class="form-control"></input>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label class="col-lg-6">Y Axis Label</label>
|
||||
<div class="col-lg-6">
|
||||
<input type="text" ng-model="visualization.options.yAxisLabel" class="form-control"></input>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
8
rd_ui/app/views/visualizations/chart.html
Normal file
8
rd_ui/app/views/visualizations/chart.html
Normal file
@@ -0,0 +1,8 @@
|
||||
<div>
|
||||
<section class="clearfix">
|
||||
<date-range-selector ng-if="dateRangeEnabled()" date-range='dateRange' class='pull-right'></date-range-selector>
|
||||
</section>
|
||||
<section>
|
||||
<chart options='chartOptions' series='chartSeries' class='graph'></chart>
|
||||
</section>
|
||||
</div>
|
||||
@@ -1,151 +1,236 @@
|
||||
<div class="form-horizontal">
|
||||
<div class="panel panel-default">
|
||||
<form class="form-horizontal" name="chartEditor">
|
||||
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<div class="form-group row">
|
||||
|
||||
<label class="control-label col-sm-5">Chart Type</label>
|
||||
<div class="col-sm-7" ng-if="chartTypes"><!--the if is a weird workaround-->
|
||||
<ui-select ng-model="options.globalSeriesType" on-select="chartTypeChanged()">
|
||||
<ui-select-match placeholder="Choose chart type..."><i class="fa fa-{{$select.selected.value.icon}}"></i> {{$select.selected.value.name}}</ui-select-match>
|
||||
<ui-select-choices repeat="info.chartType as (chartType, info) in chartTypes">
|
||||
<div><i class="fa fa-{{info.value.icon}}"></i><span> </span><span ng-bind-html="info.value.name | highlight: $select.search"></span></div>
|
||||
</ui-select-choices>
|
||||
</ui-select>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="col-md-6">
|
||||
<div class="form-group row">
|
||||
|
||||
<label class="control-label col-sm-5">Stacking</label>
|
||||
|
||||
<div class="col-sm-7" ng-if="stackingOptions"><!--the if is a weird workaround-->
|
||||
<ui-select ng-model="options.series.stacking" ng-disabled="['line', 'area', 'column'].indexOf(options.globalSeriesType) == -1">
|
||||
<ui-select-match placeholder="Choose Stacking...">{{$select.selected.key | capitalize}}</ui-select-match>
|
||||
<ui-select-choices repeat="value.value as (key, value) in stackingOptions">
|
||||
<div ng-bind-html="value.key | highlight: $select.search"></div>
|
||||
</ui-select-choices>
|
||||
</ui-select>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<div class="form-group row" ng-class="{'has-error': chartEditor.xAxisColumn.$invalid}">
|
||||
|
||||
<label class="control-label col-sm-5">X Column</label>
|
||||
|
||||
<div class="col-sm-7">
|
||||
<ui-select name="xAxisColumn" required ng-model="form.xAxisColumn">
|
||||
<ui-select-match placeholder="Choose column...">{{$select.selected}}</ui-select-match>
|
||||
<ui-select-choices repeat="column in columnNames | remove:form.yAxisColumns | remove:form.groupby">
|
||||
<span ng-bind-html="column | highlight: $select.search"></span><span> </span><small class="text-muted" ng-bind="columns[column].type"></small>
|
||||
</ui-select-choices>
|
||||
</ui-select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-6">
|
||||
<div class="form-group row">
|
||||
|
||||
<label class="control-label col-sm-5">Group by</label>
|
||||
|
||||
<div class="col-sm-7">
|
||||
|
||||
<ui-select name="groupby" ng-model="form.groupby" class="clearable">
|
||||
<ui-select-match allow-clear="true" placeholder="Choose column...">{{$select.selected}}</ui-select-match>
|
||||
<ui-select-choices repeat="column in columnNames | remove:form.yAxisColumns | remove:form.xAxisColumn">
|
||||
<span ng-bind-html="column | highlight: $select.search"></span><span> </span><small class="text-muted" ng-bind="columns[column].type"></small>
|
||||
</ui-select-choices>
|
||||
</ui-select>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<!-- not using regular validation (chartEditor.yAxisColumns.$invalid) due to a bug in ui-select with multiple choices-->
|
||||
<div class="form-group row" ng-class="{'has-error': !form.yAxisColumns || form.yAxisColumns.length == 0}">
|
||||
|
||||
<label class="control-label col-sm-5">Y Columns</label>
|
||||
|
||||
<div class="col-sm-7">
|
||||
|
||||
<ui-select multiple name="yAxisColumns" required ng-model="form.yAxisColumns">
|
||||
<ui-select-match placeholder="Choose columns...">{{$item}}</ui-select-match>
|
||||
<ui-select-choices repeat="column in columnNames | remove:form.groupby | remove:form.xAxisColumn">
|
||||
<span ng-bind-html="column | highlight: $select.search"></span><span> </span><small class="text-muted" ng-bind="columns[column].type"></small>
|
||||
</ui-select-choices>
|
||||
</ui-select>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<div class="panel panel-default">
|
||||
|
||||
<div class="panel-heading">
|
||||
<h3 class="panel-title">X Axis</h3>
|
||||
</div>
|
||||
|
||||
<div class="panel-body">
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-2">Stacking</label>
|
||||
<div class="row">
|
||||
|
||||
<div class="col-sm-10">
|
||||
<select required ng-model="stacking"
|
||||
ng-options="value as key for (key, value) in stackingOptions"
|
||||
class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-2">X Axis Type</label>
|
||||
|
||||
<div class="col-sm-10">
|
||||
<select required ng-model="xAxisType" ng-options="value as key for (key, value) in xAxisOptions"
|
||||
class="form-control"></select>
|
||||
<div class="col-md-6">
|
||||
<div class="form-group row">
|
||||
<label class="control-label col-sm-3">Scale</label>
|
||||
<div class="col-sm-9">
|
||||
<ui-select ng-model="options.xAxis.type">
|
||||
<ui-select-match placeholder="Choose Scale...">{{$select.selected | capitalize}}</ui-select-match>
|
||||
<ui-select-choices repeat="scaleType in xAxisScales">
|
||||
<div ng-bind-html="scaleType | capitalize | highlight: $select.search"></div>
|
||||
</ui-select-choices>
|
||||
</ui-select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-2">Series Type</label>
|
||||
|
||||
<div class="col-sm-10">
|
||||
<select required ng-options="value as key for (key, value) in seriesTypes"
|
||||
ng-model="globalSeriesType" class="form-control"></select>
|
||||
<div class="col-md-6">
|
||||
<div class="form-group row">
|
||||
<label class="control-label col-sm-8">Sort Values</label>
|
||||
<div class="col-sm-4">
|
||||
<input type="checkbox" ng-model="options.sortX">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-2">y Axis min</label>
|
||||
</div>
|
||||
<div class="row">
|
||||
|
||||
<div class="col-sm-10">
|
||||
<input name="yAxisMin" type="number" class="form-control"
|
||||
ng-model="visualization.options.yAxis.min"
|
||||
placeholder="Auto">
|
||||
<div class="col-md-6">
|
||||
<div class="form-group row">
|
||||
<label class="control-label col-sm-3">Name</label>
|
||||
<div class="col-sm-9">
|
||||
<input ng-model="options.xAxis.title.text" type="text" class="form-control"></input>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-2">y Axis max</label>
|
||||
|
||||
<div class="col-sm-10">
|
||||
<input name="yAxisMin" type="number" class="form-control"
|
||||
ng-model="visualization.options.yAxis.max"
|
||||
placeholder="Auto">
|
||||
<div class="col-md-6">
|
||||
<div class="form-group row">
|
||||
<label class="control-label col-sm-8">Show Labels</label>
|
||||
<div class="col-sm-4">
|
||||
<input type="checkbox" ng-model="options.xAxis.labels.enabled">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-2">Sort X Values</label>
|
||||
|
||||
<div class="col-sm-10">
|
||||
<input name="sortX" type="checkbox" class="form-control"
|
||||
ng-model="visualization.options.sortX">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-2">Show X Axis Labels</label>
|
||||
|
||||
<div class="col-sm-10">
|
||||
<input name="sortX" type="checkbox" class="form-control"
|
||||
ng-model="visualization.options.xAxis.labels.enabled">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div ng-repeat="yAxis in options.yAxis" class="col-md-3">
|
||||
<div class="panel panel-default">
|
||||
|
||||
<div class="row">
|
||||
<div class="col-lg-6">
|
||||
<div class="list-group">
|
||||
<div class="list-group-item active">
|
||||
Columns Mapping
|
||||
</div>
|
||||
<div class="list-group-item">
|
||||
<div class="form-group" ng-repeat="column in columns">
|
||||
<label class="control-label col-sm-4">{{column.name}}</label>
|
||||
|
||||
<div class="col-sm-8">
|
||||
<select ng-options="value as key for (key, value) in columnTypes" class="form-control"
|
||||
ng-model="columnTypeSelection[column.name]"></select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="panel-heading">
|
||||
<h3 class="panel-title">{{$index == 0 ? 'Left' : 'Right'}} Y Axis</h3>
|
||||
</div>
|
||||
|
||||
<div class="col-lg-6" ng-if="series.length > 0">
|
||||
<div class="list-group" ng-repeat="seriesName in series">
|
||||
<div class="list-group-item active">
|
||||
{{seriesName}}
|
||||
</div>
|
||||
<div class="list-group-item">
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-3">Type</label>
|
||||
|
||||
<div class="col-sm-9">
|
||||
<select required ng-model="visualization.options.seriesOptions[seriesName].type"
|
||||
ng-options="value as key for (key, value) in seriesTypes"
|
||||
class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-3">zIndex</label>
|
||||
|
||||
<div class="col-sm-9">
|
||||
<select required ng-model="visualization.options.seriesOptions[seriesName].zIndex"
|
||||
ng-options="o as o for o in zIndexes"
|
||||
class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-3">Index</label>
|
||||
|
||||
<div class="col-sm-9">
|
||||
<select required ng-model="visualization.options.seriesOptions[seriesName].index"
|
||||
ng-options="o as o for o in zIndexes"
|
||||
class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-3">y Axis</label>
|
||||
|
||||
<div class="col-sm-9">
|
||||
<select required ng-model="visualization.options.seriesOptions[seriesName].yAxis"
|
||||
ng-options="o[0] as o[1] for o in yAxes"
|
||||
class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-3">Name</label>
|
||||
|
||||
<div class="col-sm-9">
|
||||
<input name="seriesName" type="text" class="form-control"
|
||||
ng-model="visualization.options.seriesOptions[seriesName].name"
|
||||
placeholder="{{seriesName}}">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-3">Color</label>
|
||||
|
||||
<div class="col-sm-9">
|
||||
<select class="form-control" ng-model="visualization.options.seriesOptions[seriesName].color" ng-options="val as key for (key,val) in palette"></select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="panel-body">
|
||||
<div class="form-group row">
|
||||
<label class="control-label col-sm-3">Scale</label>
|
||||
<div class="col-sm-9">
|
||||
<ui-select ng-model="yAxis.type">
|
||||
<ui-select-match placeholder="Choose Scale...">{{$select.selected | capitalize}}</ui-select-match>
|
||||
<ui-select-choices repeat="scaleType in yAxisScales">
|
||||
<div ng-bind-html="scaleType | capitalize | highlight: $select.search"></div>
|
||||
</ui-select-choices>
|
||||
</ui-select>
|
||||
</div>
|
||||
|
||||
|
||||
</div>
|
||||
<div class="form-group row">
|
||||
<label class="control-label col-sm-3">Name</label>
|
||||
<div class="col-sm-9">
|
||||
<input ng-model="yAxis.title.text" type="text" class="form-control"></input>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
|
||||
<div class="panel panel-default">
|
||||
|
||||
<div class="panel-heading"><h3 class="panel-title">Series Options</h3></div>
|
||||
|
||||
<div>
|
||||
<table class="table table-condensed col-table">
|
||||
<thead>
|
||||
<th>zIndex</th>
|
||||
<th>Column</th>
|
||||
<th>Left Y Axis</th>
|
||||
<th>Right Y Axis</th>
|
||||
<th>Label</th>
|
||||
<th>Color</th>
|
||||
<th>Type</th>
|
||||
</thead>
|
||||
<tbody ui-sortable ng-model="form.seriesList">
|
||||
<tr ng-repeat="name in form.seriesList">
|
||||
<td style="cursor: move;"><i class="fa fa-arrows-v"></i> <span ng-bind="options.seriesOptions[name].zIndex + 1"></span></td>
|
||||
<td>{{name}}</td>
|
||||
<td>
|
||||
<input type="radio" ng-value="0" ng-model="options.seriesOptions[name].yAxis">
|
||||
</td>
|
||||
<td>
|
||||
<input type="radio" ng-value="1" ng-model="options.seriesOptions[name].yAxis">
|
||||
</td>
|
||||
<td style="padding: 3px; width: 140px;">
|
||||
<input placeholder="{{name}}" class="form-control input-sm super-small-input" type="text" ng-model="options.seriesOptions[name].name">
|
||||
</td>
|
||||
<td style="padding: 3px; width: 35px;">
|
||||
<ui-select ng-model="options.seriesOptions[name].color">
|
||||
<ui-select-match><color-box color="$select.selected.value"></color-box></ui-select-match>
|
||||
<ui-select-choices repeat="color.value as (key, color) in colors">
|
||||
<color-box color="color.value"></color-box><span ng-bind-html="color.key | capitalize | highlight: $select.search"></span>
|
||||
</ui-select-choices>
|
||||
</ui-select>
|
||||
</td>
|
||||
<td style="padding: 3px; width: 105px;">
|
||||
<ui-select ng-model="options.seriesOptions[name].type">
|
||||
<ui-select-match placeholder="Chart Type"><i class="fa fa-{{$select.selected.value.icon}}"></i> {{$select.selected.value.name}}</ui-select-match>
|
||||
<ui-select-choices repeat="info.chartType as (chartType, info) in chartTypes">
|
||||
<div><i class="fa fa-{{info.value.icon}}"></i><span> </span><span ng-bind-html="info.value.name | highlight: $select.search"></span></div>
|
||||
</ui-select-choices>
|
||||
</ui-select>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</form>
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
<div class="form-group">
|
||||
<label class="control-label">Time Label</label>
|
||||
<input type="text" class="form-control" ng-model="cohortOptions.timeLabel">
|
||||
<label class="control-label">People Label</label>
|
||||
<input type="text" class="form-control" ng-model="cohortOptions.peopleLabel">
|
||||
|
||||
<label class="control-label">Bucket Column</label>
|
||||
<select ng-model="bucket_column" ng-options="value as key for (key, value) in columns" class="form-control"></select>
|
||||
<label class="control-label">Bucket Total Value Column</label>
|
||||
<select ng-model="total_column" ng-options="value as key for (key, value) in columns" class="form-control"></select>
|
||||
<label class="control-label">Day Number Column</label>
|
||||
<select ng-model="value_column" ng-options="value as key for (key, value) in columns" class="form-control"></select>
|
||||
<label class="control-label">Day Value Column</label>
|
||||
<select ng-model="day_column" ng-options="value as key for (key, value) in columns" class="form-control"></select>
|
||||
</div>
|
||||
<label class="control-label">Time Interval</label>
|
||||
<select class="form-control" ng-model="visualization.options.timeInterval">
|
||||
<option value="daily">Daily</option>
|
||||
<option value="weekly">Weekly</option>
|
||||
<option value="monthly">Monthly</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
8
rd_ui/app/views/visualizations/date_range_selector.html
Normal file
8
rd_ui/app/views/visualizations/date_range_selector.html
Normal file
@@ -0,0 +1,8 @@
|
||||
<div>
|
||||
<span>
|
||||
From <input type="date" ng-model="dateRangeHuman.min">
|
||||
</span>
|
||||
<span>
|
||||
To <input type="date" ng-model="dateRangeHuman.max">
|
||||
</span>
|
||||
</div>
|
||||
@@ -1,25 +1,42 @@
|
||||
<div>
|
||||
<span ng-click="openEditor=!openEditor" class="details-toggle" ng-class="{open: openEditor}">Edit</span>
|
||||
|
||||
<form ng-show="openEditor" role="form" name="visForm" ng-submit="submit()">
|
||||
<div class="form-group">
|
||||
<label class="control-label">Name</label>
|
||||
<input name="name" type="text" class="form-control" ng-model="visualization.name" placeholder="{{visualization.type | capitalize}}">
|
||||
<form ng-show="openEditor" role="form" name="visForm" ng-submit="submit()" class="form-horizontal">
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<div class="form-group row">
|
||||
<label class="control-label col-sm-5">Visualization Type</label>
|
||||
|
||||
<div class="col-sm-7">
|
||||
<select required ng-model="visualization.type" ng-options="value as key for (key, value) in visTypes" class="form-control" ng-change="typeChanged()"></select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="col-md-6">
|
||||
<div class="form-group row">
|
||||
<label class="control-label col-sm-5">Name</label>
|
||||
<div class="col-sm-7">
|
||||
<input name="name" type="text" class="form-control" ng-model="visualization.name" placeholder="{{visualization.type | capitalize}}">
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label class="control-label">Visualization Type</label>
|
||||
<select required ng-model="visualization.type" ng-options="value as key for (key, value) in visTypes" class="form-control" ng-change="typeChanged()"></select>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<visualization-options-editor></visualization-options-editor>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<visualization-options-editor></visualization-options-editor>
|
||||
|
||||
<div class="form-group" ng-if="editRawOptions">
|
||||
<label class="control-label">Advanced</label>
|
||||
<textarea json-text ng-model="visualization.options" class="form-control" rows="10"></textarea>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<div class="form-group text-center">
|
||||
<button type="submit" class="btn btn-primary">Save</button>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -2,16 +2,16 @@
|
||||
<div class="filter" ng-repeat="filter in filters">
|
||||
<ui-select ng-model="filter.current" ng-if="!filter.multiple">
|
||||
<ui-select-match placeholder="Select value for {{filter.friendlyName}}...">{{filter.friendlyName}}: {{$select.selected}}</ui-select-match>
|
||||
<ui-select-choices repeat="value in filter.values | filter: $select.search track by $index">
|
||||
<ui-select-choices repeat="value in filter.values | filter: $select.search">
|
||||
{{value}}
|
||||
</ui-select-choices>
|
||||
</ui-select>
|
||||
|
||||
<ui-select ng-model="filter.current" multiple ng-if="filter.multiple">
|
||||
<ui-select-match placeholder="Select value for {{filter.friendlyName}}...">{{filter.friendlyName}}: {{$item}}</ui-select-match>
|
||||
<ui-select-choices repeat="value in filter.values | filter: $select.search track by $index">
|
||||
<ui-select-choices repeat="value in filter.values | filter: $select.search">
|
||||
{{value}}
|
||||
</ui-select-choices>
|
||||
</ui-select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -10,8 +10,8 @@
|
||||
"jquery": "1.9.1",
|
||||
"bootstrap": "3.0.0",
|
||||
"es5-shim": "2.0.8",
|
||||
"angular-moment": "0.2.0",
|
||||
"moment": "2.1.0",
|
||||
"angular-moment": "0.10.3",
|
||||
"moment": "~2.8.0",
|
||||
"codemirror": "4.8.0",
|
||||
"highcharts": "3.0.10",
|
||||
"underscore": "1.5.1",
|
||||
@@ -30,9 +30,11 @@
|
||||
"angular-ui-bootstrap-bower": "~0.12.1",
|
||||
"leaflet": "~0.7.3",
|
||||
"angular-base64-upload": "~0.1.11",
|
||||
"angular-ui-select": "0.8.2",
|
||||
"angular-ui-select": "~0.13.2",
|
||||
"angular-bootstrap-show-errors": "~2.3.0",
|
||||
"angular-sanitize": "1.2.18"
|
||||
"angular-sanitize": "1.2.18",
|
||||
"d3": "3.5.6",
|
||||
"angular-ui-sortable": "~0.13.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"angular-mocks": "1.2.18",
|
||||
|
||||
@@ -12,7 +12,6 @@
|
||||
"grunt-contrib-copy": "^0.5.0",
|
||||
"grunt-contrib-cssmin": "^0.9.0",
|
||||
"grunt-contrib-htmlmin": "^0.3.0",
|
||||
"grunt-contrib-imagemin": "^0.7.0",
|
||||
"grunt-contrib-jshint": "^0.10.0",
|
||||
"grunt-contrib-uglify": "^0.4.0",
|
||||
"grunt-contrib-watch": "^0.6.1",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
featureFlags = [];
|
||||
clientConfig = {};
|
||||
currentUser = {
|
||||
id: 1,
|
||||
name: 'John Mock',
|
||||
|
||||
@@ -7,7 +7,7 @@ from flask_mail import Mail
|
||||
from redash import settings
|
||||
from redash.query_runner import import_query_runners
|
||||
|
||||
__version__ = '0.8.1'
|
||||
__version__ = '0.8.3'
|
||||
|
||||
|
||||
def setup_logging():
|
||||
@@ -38,3 +38,6 @@ mail.init_mail(settings.all_settings())
|
||||
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
|
||||
|
||||
import_query_runners(settings.QUERY_RUNNERS)
|
||||
|
||||
from redash.version_check import reset_new_version_status
|
||||
reset_new_version_status()
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import json
|
||||
from flask_admin.contrib.peewee import ModelView
|
||||
from flask.ext.admin import Admin
|
||||
from flask.ext.admin.base import MenuLink
|
||||
from flask_admin.contrib.peewee.form import CustomModelConverter
|
||||
from flask_admin.form.widgets import DateTimePickerWidget
|
||||
from playhouse.postgres_ext import ArrayField, DateTimeTZField
|
||||
@@ -82,6 +83,9 @@ def init_admin(app):
|
||||
admin.add_view(QueryModelView(models.Query))
|
||||
admin.add_view(QueryResultModelView(models.QueryResult))
|
||||
admin.add_view(DashboardModelView(models.Dashboard))
|
||||
logout_link = MenuLink('Logout', '/logout', 'logout')
|
||||
|
||||
for m in (models.Visualization, models.Widget, models.ActivityLog, models.Group, models.Event):
|
||||
admin.add_view(BaseModelView(m))
|
||||
|
||||
admin.add_link(logout_link)
|
||||
@@ -52,6 +52,7 @@ def hmac_load_user_from_request(request):
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_user_from_api_key(api_key, query_id):
|
||||
if not api_key:
|
||||
return None
|
||||
@@ -67,8 +68,19 @@ def get_user_from_api_key(api_key, query_id):
|
||||
|
||||
return user
|
||||
|
||||
def api_key_load_user_from_request(request):
|
||||
|
||||
def get_api_key_from_request(request):
|
||||
api_key = request.args.get('api_key', None)
|
||||
|
||||
if api_key is None and request.headers.get('Authorization'):
|
||||
auth_header = request.headers.get('Authorization')
|
||||
api_key = auth_header.replace('Key ', '', 1)
|
||||
|
||||
return api_key
|
||||
|
||||
|
||||
def api_key_load_user_from_request(request):
|
||||
api_key = get_api_key_from_request(request)
|
||||
query_id = request.view_args.get('query_id', None)
|
||||
|
||||
user = get_user_from_api_key(api_key, query_id)
|
||||
|
||||
@@ -2,7 +2,7 @@ from flask import request
|
||||
from flask.ext.restful import abort
|
||||
from flask_login import current_user
|
||||
|
||||
from funcy import distinct
|
||||
from funcy import distinct, take
|
||||
from itertools import chain
|
||||
|
||||
from redash import models
|
||||
@@ -19,7 +19,7 @@ class DashboardRecentAPI(BaseResource):
|
||||
if len(recent) < 10:
|
||||
global_recent = [d.to_dict() for d in models.Dashboard.recent()]
|
||||
|
||||
return distinct(chain(recent, global_recent), key=lambda d: d['id'])
|
||||
return take(20, distinct(chain(recent, global_recent), key=lambda d: d['id']))
|
||||
|
||||
|
||||
class DashboardListAPI(BaseResource):
|
||||
|
||||
@@ -28,6 +28,9 @@ class DataSourceAPI(BaseResource):
|
||||
def post(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id(data_source_id)
|
||||
req = request.get_json(True)
|
||||
|
||||
data_source.replace_secret_placeholders(req['options'])
|
||||
|
||||
if not validate_configuration(req['type'], req['options']):
|
||||
abort(400)
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ from flask.ext.restful import abort
|
||||
from flask_login import current_user, login_required
|
||||
import sqlparse
|
||||
|
||||
from funcy import distinct
|
||||
from funcy import distinct, take
|
||||
from itertools import chain
|
||||
|
||||
from redash import models
|
||||
@@ -56,7 +56,7 @@ class QueryRecentAPI(BaseResource):
|
||||
if len(recent) < 10:
|
||||
global_recent = [d.to_dict() for d in models.Query.recent()]
|
||||
|
||||
return distinct(chain(recent, global_recent), key=lambda d: d['id'])
|
||||
return take(20, distinct(chain(recent, global_recent), key=lambda d: d['id']))
|
||||
|
||||
|
||||
class QueryListAPI(BaseResource):
|
||||
|
||||
@@ -62,25 +62,8 @@ class QueryResultListAPI(BaseResource):
|
||||
|
||||
ONE_YEAR = 60 * 60 * 24 * 365.25
|
||||
|
||||
cache_headers = {
|
||||
'Cache-Control': 'max-age=%d' % ONE_YEAR
|
||||
}
|
||||
|
||||
class QueryResultAPI(BaseResource):
|
||||
@staticmethod
|
||||
def csv_response(query_result):
|
||||
s = cStringIO.StringIO()
|
||||
|
||||
query_data = json.loads(query_result.data)
|
||||
writer = csv.DictWriter(s, fieldnames=[col['name'] for col in query_data['columns']])
|
||||
writer.writer = utils.UnicodeWriter(s)
|
||||
writer.writeheader()
|
||||
for row in query_data['rows']:
|
||||
writer.writerow(row)
|
||||
|
||||
headers = {'Content-Type': "text/csv; charset=UTF-8"}
|
||||
headers.update(cache_headers)
|
||||
return make_response(s.getvalue(), 200, headers)
|
||||
|
||||
@staticmethod
|
||||
def add_cors_headers(headers):
|
||||
@@ -106,6 +89,7 @@ class QueryResultAPI(BaseResource):
|
||||
|
||||
@require_permission('view_query')
|
||||
def get(self, query_id=None, query_result_id=None, filetype='json'):
|
||||
should_cache = query_result_id is not None
|
||||
if query_result_id is None and query_id is not None:
|
||||
query = models.Query.get(models.Query.id == query_id)
|
||||
if query:
|
||||
@@ -133,21 +117,40 @@ class QueryResultAPI(BaseResource):
|
||||
|
||||
record_event.delay(event)
|
||||
|
||||
headers = {}
|
||||
if filetype == 'json':
|
||||
response = self.make_json_response(query_result)
|
||||
else:
|
||||
response = self.make_csv_response(query_result)
|
||||
|
||||
if len(settings.ACCESS_CONTROL_ALLOW_ORIGIN) > 0:
|
||||
self.add_cors_headers(headers)
|
||||
self.add_cors_headers(response.headers)
|
||||
|
||||
if filetype == 'json':
|
||||
data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder)
|
||||
headers.update(cache_headers)
|
||||
return make_response(data, 200, headers)
|
||||
else:
|
||||
return self.csv_response(query_result)
|
||||
if should_cache:
|
||||
response.headers.add_header('Cache-Control', 'max-age=%d' % ONE_YEAR)
|
||||
|
||||
return response
|
||||
|
||||
else:
|
||||
abort(404)
|
||||
|
||||
def make_json_response(self, query_result):
|
||||
data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder)
|
||||
return make_response(data, 200, {})
|
||||
|
||||
@staticmethod
|
||||
def make_csv_response(query_result):
|
||||
s = cStringIO.StringIO()
|
||||
|
||||
query_data = json.loads(query_result.data)
|
||||
writer = csv.DictWriter(s, fieldnames=[col['name'] for col in query_data['columns']])
|
||||
writer.writer = utils.UnicodeWriter(s)
|
||||
writer.writeheader()
|
||||
for row in query_data['rows']:
|
||||
writer.writerow(row)
|
||||
|
||||
headers = {'Content-Type': "text/csv; charset=UTF-8"}
|
||||
return make_response(s.getvalue(), 200, headers)
|
||||
|
||||
|
||||
api.add_resource(QueryResultListAPI, '/api/query_results', endpoint='query_results')
|
||||
api.add_resource(QueryResultAPI,
|
||||
|
||||
@@ -4,8 +4,9 @@ import json
|
||||
from flask import render_template, send_from_directory, current_app
|
||||
from flask_login import current_user, login_required
|
||||
|
||||
from redash import settings
|
||||
from redash import settings, __version__, redis_connection
|
||||
from redash.wsgi import app
|
||||
from redash.version_check import get_latest_version
|
||||
|
||||
|
||||
@app.route('/admin/<anything>/<whatever>')
|
||||
@@ -36,13 +37,18 @@ def index(**kwargs):
|
||||
'permissions': current_user.permissions
|
||||
}
|
||||
|
||||
features = {
|
||||
client_config = {
|
||||
'clientSideMetrics': settings.CLIENT_SIDE_METRICS,
|
||||
'allowScriptsInUserInput': settings.ALLOW_SCRIPTS_IN_USER_INPUT
|
||||
'allowScriptsInUserInput': settings.ALLOW_SCRIPTS_IN_USER_INPUT,
|
||||
'highChartsTurboThreshold': settings.HIGHCHARTS_TURBO_THRESHOLD,
|
||||
'dateFormat': settings.DATE_FORMAT,
|
||||
'dateTimeFormat': "{0} HH:mm".format(settings.DATE_FORMAT),
|
||||
'newVersionAvailable': get_latest_version(),
|
||||
'version': __version__
|
||||
}
|
||||
|
||||
return render_template("index.html", user=json.dumps(user), name=settings.NAME,
|
||||
features=json.dumps(features),
|
||||
client_config=json.dumps(client_config),
|
||||
analytics=settings.ANALYTICS)
|
||||
|
||||
|
||||
|
||||
@@ -131,7 +131,7 @@ class ApiUser(UserMixin, PermissionsCheckMixin):
|
||||
|
||||
class Group(BaseModel):
|
||||
DEFAULT_PERMISSIONS = ['create_dashboard', 'create_query', 'edit_dashboard', 'edit_query',
|
||||
'view_query', 'view_source', 'execute_query', 'list_users']
|
||||
'view_query', 'view_source', 'execute_query', 'list_users', 'schedule_query']
|
||||
|
||||
id = peewee.PrimaryKeyField()
|
||||
name = peewee.CharField(max_length=100)
|
||||
@@ -263,6 +263,8 @@ class ActivityLog(BaseModel):
|
||||
|
||||
|
||||
class DataSource(BaseModel):
|
||||
SECRET_PLACEHOLDER = '--------'
|
||||
|
||||
id = peewee.PrimaryKeyField()
|
||||
name = peewee.CharField(unique=True)
|
||||
type = peewee.CharField()
|
||||
@@ -283,7 +285,7 @@ class DataSource(BaseModel):
|
||||
}
|
||||
|
||||
if all:
|
||||
d['options'] = json.loads(self.options)
|
||||
d['options'] = self.configuration
|
||||
d['queue_name'] = self.queue_name
|
||||
d['scheduled_queue_name'] = self.scheduled_queue_name
|
||||
|
||||
@@ -292,6 +294,23 @@ class DataSource(BaseModel):
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def configuration(self):
|
||||
configuration = json.loads(self.options)
|
||||
schema = self.query_runner.configuration_schema()
|
||||
for prop in schema.get('secret', []):
|
||||
if prop in configuration and configuration[prop]:
|
||||
configuration[prop] = self.SECRET_PLACEHOLDER
|
||||
|
||||
return configuration
|
||||
|
||||
def replace_secret_placeholders(self, configuration):
|
||||
current_configuration = json.loads(self.options)
|
||||
schema = self.query_runner.configuration_schema()
|
||||
for prop in schema.get('secret', []):
|
||||
if prop in configuration and configuration[prop] == self.SECRET_PLACEHOLDER:
|
||||
configuration[prop] = current_configuration[prop]
|
||||
|
||||
def get_schema(self, refresh=False):
|
||||
key = "data_source:schema:{}".format(self.id)
|
||||
|
||||
@@ -350,10 +369,10 @@ class QueryResult(BaseModel):
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def unused(cls):
|
||||
week_ago = datetime.datetime.now() - datetime.timedelta(days=7)
|
||||
def unused(cls, days=7):
|
||||
age_threshold = datetime.datetime.now() - datetime.timedelta(days=days)
|
||||
|
||||
unused_results = cls.select().where(Query.id == None, cls.retrieved_at < week_ago)\
|
||||
unused_results = cls.select().where(Query.id == None, cls.retrieved_at < age_threshold)\
|
||||
.join(Query, join_type=peewee.JOIN_LEFT_OUTER)
|
||||
|
||||
return unused_results
|
||||
@@ -602,19 +621,26 @@ class Alert(ModelTimestampsMixin, BaseModel):
|
||||
def all(cls):
|
||||
return cls.select(Alert, User, Query).join(Query).switch(Alert).join(User)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'query': self.query.to_dict(),
|
||||
'user': self.user.to_dict(),
|
||||
'options': self.options,
|
||||
'state': self.state,
|
||||
'last_triggered_at': self.last_triggered_at,
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at
|
||||
def to_dict(self, full=True):
|
||||
d = {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'options': self.options,
|
||||
'state': self.state,
|
||||
'last_triggered_at': self.last_triggered_at,
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at
|
||||
}
|
||||
|
||||
if full:
|
||||
d['query'] = self.query.to_dict()
|
||||
d['user'] = self.user.to_dict()
|
||||
else:
|
||||
d['query_id'] = self._data['query']
|
||||
d['user_id'] = self._data['user']
|
||||
|
||||
return d
|
||||
|
||||
def evaluate(self):
|
||||
data = json.loads(self.query.latest_query_data.data)
|
||||
# todo: safe guard for empty
|
||||
@@ -856,7 +882,7 @@ class Event(BaseModel):
|
||||
return event
|
||||
|
||||
|
||||
all_models = (DataSource, User, QueryResult, Query, Alert, Dashboard, Visualization, Widget, ActivityLog, Group, Event)
|
||||
all_models = (DataSource, User, QueryResult, Query, Alert, AlertSubscription, Dashboard, Visualization, Widget, ActivityLog, Group, Event)
|
||||
|
||||
|
||||
def init_db():
|
||||
|
||||
@@ -9,6 +9,7 @@ logger = logging.getLogger(__name__)
|
||||
__all__ = [
|
||||
'ValidationError',
|
||||
'BaseQueryRunner',
|
||||
'InterruptException',
|
||||
'TYPE_DATETIME',
|
||||
'TYPE_BOOLEAN',
|
||||
'TYPE_INTEGER',
|
||||
@@ -38,6 +39,9 @@ SUPPORTED_COLUMN_TYPES = set([
|
||||
TYPE_DATE
|
||||
])
|
||||
|
||||
class InterruptException(Exception):
|
||||
pass
|
||||
|
||||
class BaseQueryRunner(object):
|
||||
def __init__(self, configuration):
|
||||
jsonschema.validate(configuration, self.configuration_schema())
|
||||
@@ -67,6 +71,24 @@ class BaseQueryRunner(object):
|
||||
def run_query(self, query):
|
||||
raise NotImplementedError()
|
||||
|
||||
def fetch_columns(self, columns):
|
||||
column_names = []
|
||||
duplicates_counter = 1
|
||||
new_columns = []
|
||||
|
||||
for col in columns:
|
||||
column_name = col[0]
|
||||
if column_name in column_names:
|
||||
column_name = "{}{}".format(column_name, duplicates_counter)
|
||||
duplicates_counter += 1
|
||||
|
||||
column_names.append(column_name)
|
||||
new_columns.append({'name': column_name,
|
||||
'friendly_name': column_name,
|
||||
'type': col[1]})
|
||||
|
||||
return new_columns
|
||||
|
||||
def get_schema(self):
|
||||
return []
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ import time
|
||||
|
||||
import requests
|
||||
|
||||
from redash import settings
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
@@ -22,9 +23,6 @@ try:
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
logger.warning("Missing dependencies. Please install google-api-python-client and oauth2client.")
|
||||
logger.warning("You can use pip: pip install google-api-python-client oauth2client")
|
||||
|
||||
enabled = False
|
||||
|
||||
types_map = {
|
||||
@@ -99,7 +97,8 @@ class BigQuery(BaseQueryRunner):
|
||||
'title': 'JSON Key File'
|
||||
}
|
||||
},
|
||||
'required': ['jsonKeyFile', 'projectId']
|
||||
'required': ['jsonKeyFile', 'projectId'],
|
||||
'secret': ['jsonKeyFile']
|
||||
}
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
@@ -113,7 +112,7 @@ class BigQuery(BaseQueryRunner):
|
||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
|
||||
credentials = SignedJwtAssertionCredentials(key['client_email'], key['private_key'], scope=scope)
|
||||
http = httplib2.Http()
|
||||
http = httplib2.Http(timeout=settings.BIGQUERY_HTTP_TIMEOUT)
|
||||
http = credentials.authorize(http)
|
||||
|
||||
return build("bigquery", "v2", http=http)
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import urllib
|
||||
from requests.auth import HTTPBasicAuth
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash import models
|
||||
|
||||
import requests
|
||||
import dateutil
|
||||
from dateutil.parser import parse
|
||||
|
||||
try:
|
||||
import http.client as http_client
|
||||
@@ -27,9 +24,15 @@ ELASTICSEARCH_TYPES_MAPPING = {
|
||||
"boolean" : TYPE_BOOLEAN,
|
||||
"string" : TYPE_STRING,
|
||||
"date" : TYPE_DATE,
|
||||
"object" : TYPE_STRING,
|
||||
# "geo_point" TODO: Need to split to 2 fields somehow
|
||||
}
|
||||
|
||||
ELASTICSEARCH_BUILTIN_FIELDS_MAPPING = {
|
||||
"_id" : "Id",
|
||||
"_score" : "Score"
|
||||
}
|
||||
|
||||
PYTHON_TYPES_MAPPING = {
|
||||
str: TYPE_STRING,
|
||||
unicode: TYPE_STRING,
|
||||
@@ -39,56 +42,10 @@ PYTHON_TYPES_MAPPING = {
|
||||
float: TYPE_FLOAT
|
||||
}
|
||||
|
||||
#
|
||||
# ElasticSearch currently supports only simple Lucene style queries (like Kibana
|
||||
# but without the aggregation).
|
||||
#
|
||||
# Full blown JSON based ElasticSearch queries (including aggregations) will be
|
||||
# added later
|
||||
#
|
||||
# Simple query example:
|
||||
#
|
||||
# - Query the index named "twitter"
|
||||
# - Filter by "user:kimchy"
|
||||
# - Return the fields: "@timestamp", "tweet" and "user"
|
||||
# - Return up to 15 results
|
||||
# - Sort by @timestamp ascending
|
||||
#
|
||||
# {
|
||||
# "index" : "twitter",
|
||||
# "query" : "user:kimchy",
|
||||
# "fields" : ["@timestamp", "tweet", "user"],
|
||||
# "size" : 15,
|
||||
# "sort" : "@timestamp:asc"
|
||||
# }
|
||||
#
|
||||
#
|
||||
# Simple query on a logstash ElasticSearch instance:
|
||||
#
|
||||
# - Query the index named "logstash-2015.04.*" (in this case its all of April 2015)
|
||||
# - Filter by type:events AND eventName:UserUpgrade AND channel:selfserve
|
||||
# - Return fields: "@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"
|
||||
# - Return up to 250 results
|
||||
# - Sort by @timestamp ascending
|
||||
class BaseElasticSearch(BaseQueryRunner):
|
||||
|
||||
# {
|
||||
# "index" : "logstash-2015.04.*",
|
||||
# "query" : "type:events AND eventName:UserUpgrade AND channel:selfserve",
|
||||
# "fields" : ["@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"],
|
||||
# "size" : 250,
|
||||
# "sort" : "@timestamp:asc"
|
||||
# }
|
||||
#
|
||||
#
|
||||
DEBUG_ENABLED = True
|
||||
|
||||
class ElasticSearch(BaseQueryRunner):
|
||||
DEBUG_ENABLED = False
|
||||
|
||||
"""
|
||||
ElastichSearch query runner for querying ElasticSearch servers.
|
||||
Query can be done using the Lucene Syntax (single line) or the more complex,
|
||||
full blown ElasticSearch JSON syntax
|
||||
"""
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
@@ -97,6 +54,14 @@ class ElasticSearch(BaseQueryRunner):
|
||||
'server': {
|
||||
'type': 'string',
|
||||
'title': 'Base URL'
|
||||
},
|
||||
'basic_auth_user': {
|
||||
'type': 'string',
|
||||
'title': 'Basic Auth User'
|
||||
},
|
||||
'basic_auth_password': {
|
||||
'type': 'string',
|
||||
'title': 'Basic Auth Password'
|
||||
}
|
||||
},
|
||||
"required" : ["server"]
|
||||
@@ -104,20 +69,16 @@ class ElasticSearch(BaseQueryRunner):
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(ElasticSearch, self).__init__(configuration_json)
|
||||
super(BaseElasticSearch, self).__init__(configuration_json)
|
||||
|
||||
self.syntax = "json"
|
||||
|
||||
if self.DEBUG_ENABLED:
|
||||
http_client.HTTPConnection.debuglevel = 1
|
||||
|
||||
|
||||
# you need to initialize logging, otherwise you will not see anything from requests
|
||||
logging.basicConfig()
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
@@ -125,11 +86,26 @@ class ElasticSearch(BaseQueryRunner):
|
||||
requests_log.setLevel(logging.DEBUG)
|
||||
requests_log.propagate = True
|
||||
|
||||
def get_mappings(self, url):
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
self.server_url = self.configuration["server"]
|
||||
if self.server_url[-1] == "/":
|
||||
self.server_url = self.server_url[:-1]
|
||||
|
||||
basic_auth_user = self.configuration["basic_auth_user"]
|
||||
basic_auth_password = self.configuration["basic_auth_password"]
|
||||
self.auth = None
|
||||
if basic_auth_user and basic_auth_password:
|
||||
self.auth = HTTPBasicAuth(basic_auth_user, basic_auth_password)
|
||||
|
||||
def _get_mappings(self, url):
|
||||
mappings = {}
|
||||
|
||||
r = requests.get(url)
|
||||
r = requests.get(url, auth=self.auth)
|
||||
mappings_data = r.json()
|
||||
|
||||
logger.debug(mappings_data)
|
||||
|
||||
for index_name in mappings_data:
|
||||
index_mappings = mappings_data[index_name]
|
||||
for m in index_mappings.get("mappings", {}):
|
||||
@@ -141,14 +117,21 @@ class ElasticSearch(BaseQueryRunner):
|
||||
if property_type in ELASTICSEARCH_TYPES_MAPPING:
|
||||
mappings[property_name] = property_type
|
||||
else:
|
||||
raise "Unknown property type: {0}".format(property_type)
|
||||
raise Exception("Unknown property type: {0}".format(property_type))
|
||||
|
||||
return mappings
|
||||
|
||||
def parse_results(self, mappings, result_fields, raw_result, result_columns, result_rows):
|
||||
result_columns_index = {}
|
||||
for c in result_columns:
|
||||
result_columns_index[c["name"]] = c
|
||||
def _parse_results(self, mappings, result_fields, raw_result, result_columns, result_rows):
|
||||
|
||||
def add_column_if_needed(mappings, column_name, friendly_name, result_columns, result_columns_index):
|
||||
if friendly_name not in result_columns_index:
|
||||
result_columns.append({
|
||||
"name" : friendly_name,
|
||||
"friendly_name" : friendly_name,
|
||||
"type" : mappings.get(column_name, "string")})
|
||||
result_columns_index[friendly_name] = result_columns[-1]
|
||||
|
||||
result_columns_index = {c["name"] : c for c in result_columns}
|
||||
|
||||
result_fields_index = {}
|
||||
if result_fields:
|
||||
@@ -157,32 +140,49 @@ class ElasticSearch(BaseQueryRunner):
|
||||
|
||||
for h in raw_result["hits"]["hits"]:
|
||||
row = {}
|
||||
for column in h["_source"]:
|
||||
|
||||
for field, column in ELASTICSEARCH_BUILTIN_FIELDS_MAPPING.iteritems():
|
||||
if field in h:
|
||||
add_column_if_needed(mappings, field, column, result_columns, result_columns_index)
|
||||
row[column] = h[field]
|
||||
|
||||
column_name = "_source" if "_source" in h else "fields"
|
||||
for column in h[column_name]:
|
||||
if result_fields and column not in result_fields_index:
|
||||
continue
|
||||
|
||||
if column not in result_columns_index:
|
||||
result_columns.append({
|
||||
"name" : column,
|
||||
"friendly_name" : column,
|
||||
"type" : mappings.get(column, "string")
|
||||
})
|
||||
result_columns_index[column] = result_columns[-1]
|
||||
add_column_if_needed(mappings, column, column, result_columns, result_columns_index)
|
||||
|
||||
value = h[column_name][column]
|
||||
row[column] = value[0] if isinstance(value, list) and len(value) == 1 else value
|
||||
|
||||
row[column] = h["_source"][column]
|
||||
|
||||
if row and len(row) > 0:
|
||||
result_rows.append(row)
|
||||
|
||||
def execute_simple_query(self, url, _from, mappings, result_fields, result_columns, result_rows):
|
||||
|
||||
class Kibana(BaseElasticSearch):
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(Kibana, self).__init__(configuration_json)
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
def _execute_simple_query(self, url, auth, _from, mappings, result_fields, result_columns, result_rows):
|
||||
url += "&from={0}".format(_from)
|
||||
r = requests.get(url)
|
||||
r = requests.get(url, auth=self.auth)
|
||||
if r.status_code != 200:
|
||||
raise Exception("Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text))
|
||||
|
||||
raw_result = r.json()
|
||||
|
||||
self.parse_results(mappings, result_fields, raw_result, result_columns, result_rows)
|
||||
self._parse_results(mappings, result_fields, raw_result, result_columns, result_rows)
|
||||
|
||||
total = raw_result["hits"]["total"]
|
||||
result_size = len(raw_result["hits"]["hits"])
|
||||
@@ -203,19 +203,14 @@ class ElasticSearch(BaseQueryRunner):
|
||||
result_fields = query_params.get("fields", None)
|
||||
sort = query_params.get("sort", None)
|
||||
|
||||
server_url = self.configuration["server"]
|
||||
if not server_url:
|
||||
if not self.server_url:
|
||||
error = "Missing configuration key 'server'"
|
||||
return None, error
|
||||
|
||||
url = "{0}/{1}/_search?".format(self.server_url, index_name)
|
||||
mapping_url = "{0}/{1}/_mapping".format(self.server_url, index_name)
|
||||
|
||||
if server_url[-1] == "/":
|
||||
server_url = server_url[:-1]
|
||||
|
||||
url = "{0}/{1}/_search?".format(server_url, index_name)
|
||||
mapping_url = "{0}/{1}/_mapping".format(server_url, index_name)
|
||||
|
||||
mappings = self.get_mappings(mapping_url)
|
||||
mappings = self._get_mappings(mapping_url)
|
||||
|
||||
logger.debug(json.dumps(mappings, indent=4))
|
||||
|
||||
@@ -235,7 +230,7 @@ class ElasticSearch(BaseQueryRunner):
|
||||
if isinstance(query_data, str) or isinstance(query_data, unicode):
|
||||
_from = 0
|
||||
while True:
|
||||
total = self.execute_simple_query(url, _from, mappings, result_fields, result_columns, result_rows)
|
||||
total = self._execute_simple_query(url, _from, mappings, result_fields, result_columns, result_rows)
|
||||
_from += size
|
||||
if _from >= total:
|
||||
break
|
||||
@@ -256,4 +251,61 @@ class ElasticSearch(BaseQueryRunner):
|
||||
return json_data, error
|
||||
|
||||
|
||||
class ElasticSearch(BaseElasticSearch):
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(ElasticSearch, self).__init__(configuration_json)
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
def run_query(self, query):
|
||||
try:
|
||||
error = None
|
||||
|
||||
logger.debug(query)
|
||||
query_dict = json.loads(query)
|
||||
|
||||
index_name = query_dict.pop("index", "")
|
||||
|
||||
if not self.server_url:
|
||||
error = "Missing configuration key 'server'"
|
||||
return None, error
|
||||
|
||||
url = "{0}/{1}/_search".format(self.server_url, index_name)
|
||||
mapping_url = "{0}/{1}/_mapping".format(self.server_url, index_name)
|
||||
|
||||
mappings = self._get_mappings(mapping_url)
|
||||
|
||||
logger.debug(json.dumps(mappings, indent=4))
|
||||
|
||||
params = {"source": json.dumps(query_dict)}
|
||||
logger.debug("Using URL: %s", url)
|
||||
logger.debug("Using params : %s", params)
|
||||
r = requests.get(url, params=params, auth=self.auth)
|
||||
logger.debug("Result: %s", r.json())
|
||||
|
||||
result_columns = []
|
||||
result_rows = []
|
||||
self._parse_results(mappings, None, r.json(), result_columns, result_rows)
|
||||
|
||||
json_data = json.dumps({
|
||||
"columns" : result_columns,
|
||||
"rows" : result_rows
|
||||
})
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(Kibana)
|
||||
register(ElasticSearch)
|
||||
|
||||
@@ -13,9 +13,6 @@ try:
|
||||
from dateutil import parser
|
||||
enabled = True
|
||||
except ImportError:
|
||||
logger.warning("Missing dependencies. Please install gspread, dateutil and oauth2client.")
|
||||
logger.warning("You can use pip: pip install gspread dateutil oauth2client")
|
||||
|
||||
enabled = False
|
||||
|
||||
|
||||
@@ -27,22 +24,51 @@ def _load_key(filename):
|
||||
def _guess_type(value):
|
||||
try:
|
||||
val = int(value)
|
||||
return TYPE_INTEGER, val
|
||||
return TYPE_INTEGER
|
||||
except ValueError:
|
||||
pass
|
||||
try:
|
||||
val = float(value)
|
||||
return TYPE_FLOAT, val
|
||||
return TYPE_FLOAT
|
||||
except ValueError:
|
||||
pass
|
||||
if str(value).lower() in ('true', 'false'):
|
||||
return TYPE_BOOLEAN, bool(value)
|
||||
return TYPE_BOOLEAN
|
||||
try:
|
||||
val = parser.parse(value)
|
||||
return TYPE_DATETIME, val
|
||||
return TYPE_DATETIME
|
||||
except ValueError:
|
||||
pass
|
||||
return TYPE_STRING, value
|
||||
return TYPE_STRING
|
||||
|
||||
|
||||
def _value_eval_list(value):
|
||||
value_list = []
|
||||
for member in value:
|
||||
try:
|
||||
val = int(member)
|
||||
value_list.append(val)
|
||||
continue
|
||||
except ValueError:
|
||||
pass
|
||||
try:
|
||||
val = float(member)
|
||||
value_list.append(val)
|
||||
continue
|
||||
except ValueError:
|
||||
pass
|
||||
if str(member).lower() in ('true', 'false'):
|
||||
val = bool(member)
|
||||
value_list.append(val)
|
||||
continue
|
||||
try:
|
||||
val = parser.parse(member)
|
||||
value_list.append(val)
|
||||
continue
|
||||
except ValueError:
|
||||
pass
|
||||
value_list.append(member)
|
||||
return value_list
|
||||
|
||||
|
||||
class GoogleSpreadsheet(BaseQueryRunner):
|
||||
@@ -70,7 +96,8 @@ class GoogleSpreadsheet(BaseQueryRunner):
|
||||
'title': 'JSON Key File'
|
||||
}
|
||||
},
|
||||
'required': ['jsonKeyFile']
|
||||
'required': ['jsonKeyFile'],
|
||||
'secret': ['jsonKeyFile']
|
||||
}
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
@@ -105,7 +132,7 @@ class GoogleSpreadsheet(BaseQueryRunner):
|
||||
'friendly_name': column_name,
|
||||
'type': _guess_type(all_data[self.HEADER_INDEX+1][j])
|
||||
})
|
||||
rows = [dict(zip(column_names, row)) for row in all_data[self.HEADER_INDEX+1:]]
|
||||
rows = [dict(zip(column_names, _value_eval_list(row))) for row in all_data[self.HEADER_INDEX+1:]]
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
|
||||
@@ -44,7 +44,8 @@ class Graphite(BaseQueryRunner):
|
||||
'title': 'Verify SSL certificate'
|
||||
}
|
||||
},
|
||||
'required': ['url']
|
||||
'required': ['url'],
|
||||
'secret': ['password']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -59,7 +60,7 @@ class Graphite(BaseQueryRunner):
|
||||
else:
|
||||
self.auth = None
|
||||
|
||||
self.verify = self.configuration["verify"]
|
||||
self.verify = self.configuration.get("verify", True)
|
||||
self.base_url = "%s/render?format=json&" % self.configuration['url']
|
||||
|
||||
def run_query(self, query):
|
||||
@@ -80,4 +81,4 @@ class Graphite(BaseQueryRunner):
|
||||
|
||||
return data, error
|
||||
|
||||
register(Graphite)
|
||||
register(Graphite)
|
||||
|
||||
@@ -11,8 +11,6 @@ try:
|
||||
from pyhive import hive
|
||||
enabled = True
|
||||
except ImportError, e:
|
||||
logger.warning("Missing dependencies. Please install pyhive.")
|
||||
logger.warning("You can use pip: pip install pyhive")
|
||||
enabled = False
|
||||
|
||||
COLUMN_NAME = 0
|
||||
|
||||
@@ -12,8 +12,6 @@ try:
|
||||
from impala.error import DatabaseError, RPCError
|
||||
enabled = True
|
||||
except ImportError, e:
|
||||
logger.warning("Missing dependencies. Please install impyla.")
|
||||
logger.warning("You can use pip: pip install impyla")
|
||||
enabled = False
|
||||
|
||||
COLUMN_NAME = 0
|
||||
@@ -68,7 +66,8 @@ class Impala(BaseQueryRunner):
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": ["host"]
|
||||
"required": ["host"],
|
||||
"secret": ["ldap_password"]
|
||||
}
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -11,8 +11,6 @@ try:
|
||||
enabled = True
|
||||
|
||||
except ImportError:
|
||||
logger.warning("Missing dependencies. Please install influxdb.")
|
||||
logger.warning("You can use pip: pip install influxdb")
|
||||
enabled = False
|
||||
|
||||
def _transform_result(results):
|
||||
|
||||
@@ -2,7 +2,6 @@ import json
|
||||
import datetime
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
from dateutil.parser import parse
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
@@ -17,8 +16,6 @@ try:
|
||||
enabled = True
|
||||
|
||||
except ImportError:
|
||||
logger.warning("Missing dependencies. Please install pymongo.")
|
||||
logger.warning("You can use pip: pip install pymongo")
|
||||
enabled = False
|
||||
|
||||
|
||||
@@ -32,7 +29,6 @@ TYPES_MAP = {
|
||||
datetime.datetime: TYPE_DATETIME,
|
||||
}
|
||||
|
||||
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
|
||||
|
||||
class MongoDBJSONEncoder(JSONEncoder):
|
||||
def default(self, o):
|
||||
@@ -41,66 +37,25 @@ class MongoDBJSONEncoder(JSONEncoder):
|
||||
|
||||
return super(MongoDBJSONEncoder, self).default(o)
|
||||
|
||||
# Simple query example:
|
||||
#
|
||||
# {
|
||||
# "collection" : "my_collection",
|
||||
# "query" : {
|
||||
# "date" : {
|
||||
# "$gt" : "ISODate(\"2015-01-15 11:41\")",
|
||||
# },
|
||||
# "type" : 1
|
||||
# },
|
||||
# "fields" : {
|
||||
# "_id" : 1,
|
||||
# "name" : 2
|
||||
# },
|
||||
# "sort" : [
|
||||
# {
|
||||
# "name" : "date",
|
||||
# "direction" : -1
|
||||
# }
|
||||
# ]
|
||||
#
|
||||
# }
|
||||
#
|
||||
#
|
||||
# Aggregation
|
||||
# ===========
|
||||
# Uses a syntax similar to the one used in PyMongo, however to support the
|
||||
# correct order of sorting, it uses a regular list for the "$sort" operation
|
||||
# that converts into a SON (sorted dictionary) object before execution.
|
||||
#
|
||||
# Aggregation query example:
|
||||
#
|
||||
# {
|
||||
# "collection" : "things",
|
||||
# "aggregate" : [
|
||||
# {
|
||||
# "$unwind" : "$tags"
|
||||
# },
|
||||
# {
|
||||
# "$group" : {
|
||||
# "_id" : "$tags",
|
||||
# "count" : { "$sum" : 1 }
|
||||
# }
|
||||
# },
|
||||
# {
|
||||
# "$sort" : [
|
||||
# {
|
||||
# "name" : "count",
|
||||
# "direction" : -1
|
||||
# },
|
||||
# {
|
||||
# "name" : "_id",
|
||||
# "direction" : -1
|
||||
# }
|
||||
# ]
|
||||
# }
|
||||
# ]
|
||||
# }
|
||||
#
|
||||
#
|
||||
|
||||
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
|
||||
|
||||
|
||||
def datetime_parser(dct):
|
||||
for k, v in dct.iteritems():
|
||||
if isinstance(v, basestring):
|
||||
m = date_regex.findall(v)
|
||||
if len(m) > 0:
|
||||
dct[k] = parse(m[0], yearfirst=True)
|
||||
|
||||
return dct
|
||||
|
||||
|
||||
def parse_query_json(query):
|
||||
query_data = json.loads(query, object_hook=datetime_parser)
|
||||
return query_data
|
||||
|
||||
|
||||
class MongoDB(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
@@ -147,25 +102,6 @@ class MongoDB(BaseQueryRunner):
|
||||
|
||||
return None
|
||||
|
||||
def _fix_dates(self, data):
|
||||
for k in data:
|
||||
if isinstance(data[k], list):
|
||||
for i in range(0, len(data[k])):
|
||||
if isinstance(data[k][i], (str, unicode)):
|
||||
self._convert_date(data[k], i)
|
||||
elif not isinstance(data[k][i], (int)):
|
||||
self._fix_dates(data[k][i])
|
||||
|
||||
elif isinstance(data[k], dict):
|
||||
self._fix_dates(data[k])
|
||||
else:
|
||||
if isinstance(data[k], (str, unicode)):
|
||||
self._convert_date(data, k)
|
||||
|
||||
def _convert_date(self, q, field_name):
|
||||
m = date_regex.findall(q[field_name])
|
||||
if len(m) > 0:
|
||||
q[field_name] = parse(m[0], yearfirst=True)
|
||||
|
||||
def run_query(self, query):
|
||||
if self.is_replica_set:
|
||||
@@ -179,8 +115,7 @@ class MongoDB(BaseQueryRunner):
|
||||
logger.debug("mongodb got query: %s", query)
|
||||
|
||||
try:
|
||||
query_data = json.loads(query)
|
||||
self._fix_dates(query_data)
|
||||
query_data = parse_query_json(query)
|
||||
except ValueError:
|
||||
return None, "Invalid query format. The query is not a valid JSON."
|
||||
|
||||
|
||||
57
redash/query_runner/mql.py
Normal file
57
redash/query_runner/mql.py
Normal file
@@ -0,0 +1,57 @@
|
||||
import json
|
||||
|
||||
from . import BaseQueryRunner, register
|
||||
from .mongodb import TYPES_MAP, TYPE_STRING
|
||||
|
||||
try:
|
||||
import pymongo
|
||||
from ognom import query_to_plan
|
||||
from website.server.utils import simplify
|
||||
enabled = True
|
||||
except ImportError:
|
||||
enabled = False
|
||||
|
||||
def deduce_columns(rows):
|
||||
column_to_type = {}
|
||||
for row in rows:
|
||||
for column, value in row.iteritems():
|
||||
column_to_type[column] = TYPES_MAP.get(value.__class__, TYPE_STRING)
|
||||
return [{'name': column, 'friendly_name': column, 'type': type}
|
||||
for column, type in column_to_type.iteritems()]
|
||||
|
||||
class MQL(BaseQueryRunner):
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(MQL, self).__init__(configuration_json)
|
||||
self.syntax = 'sql'
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'uri': {
|
||||
'type': 'string',
|
||||
'title': 'Connection String'
|
||||
}
|
||||
},
|
||||
'required': ['uri']
|
||||
}
|
||||
|
||||
def run_query(self, query):
|
||||
conn = pymongo.MongoClient(self.configuration['uri'])
|
||||
# execute() returns a generator (that wraps a cursor)
|
||||
gen = query_to_plan(query).execute(conn)
|
||||
# simplify converts special MongoDB data types (ObjectId, Date, etc') to strings
|
||||
result = simplify(list(gen))
|
||||
return json.dumps({'columns': deduce_columns(result), 'rows': result}), None
|
||||
|
||||
register(MQL)
|
||||
@@ -33,7 +33,8 @@ class Mysql(BaseQueryRunner):
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'host': {
|
||||
'type': 'string'
|
||||
'type': 'string',
|
||||
'default': '127.0.0.1'
|
||||
},
|
||||
'user': {
|
||||
'type': 'string'
|
||||
@@ -46,11 +47,29 @@ class Mysql(BaseQueryRunner):
|
||||
'type': 'string',
|
||||
'title': 'Database name'
|
||||
},
|
||||
"port": {
|
||||
"type": "number"
|
||||
'port': {
|
||||
'type': 'number',
|
||||
'default': 3306,
|
||||
},
|
||||
'use_ssl': {
|
||||
'type': 'boolean',
|
||||
'title': 'Use SSL'
|
||||
},
|
||||
'ssl_cacert': {
|
||||
'type': 'string',
|
||||
'title': 'Path to CA certificate file to verify peer against (SSL)'
|
||||
},
|
||||
'ssl_cert': {
|
||||
'type': 'string',
|
||||
'title': 'Path to client certificate file (SSL)'
|
||||
},
|
||||
'ssl_key': {
|
||||
'type': 'string',
|
||||
'title': 'Path to private key file (SSL)'
|
||||
}
|
||||
},
|
||||
'required': ['db']
|
||||
'required': ['db'],
|
||||
'secret': ['passwd']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -110,7 +129,8 @@ class Mysql(BaseQueryRunner):
|
||||
passwd=self.configuration.get('passwd', ''),
|
||||
db=self.configuration['db'],
|
||||
port=self.configuration.get('port', 3306),
|
||||
charset='utf8', use_unicode=True)
|
||||
charset='utf8', use_unicode=True,
|
||||
ssl=self._get_ssl_parameters())
|
||||
cursor = connection.cursor()
|
||||
logger.debug("MySQL running query: %s", query)
|
||||
cursor.execute(query)
|
||||
@@ -119,13 +139,8 @@ class Mysql(BaseQueryRunner):
|
||||
|
||||
# TODO - very similar to pg.py
|
||||
if cursor.description is not None:
|
||||
columns_data = [(i[0], i[1]) for i in cursor.description]
|
||||
|
||||
rows = [dict(zip((c[0] for c in columns_data), row)) for row in data]
|
||||
|
||||
columns = [{'name': col[0],
|
||||
'friendly_name': col[0],
|
||||
'type': types_map.get(col[1], None)} for col in columns_data]
|
||||
columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in cursor.description])
|
||||
rows = [dict(zip((c['name'] for c in columns), row)) for row in data]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
@@ -149,4 +164,19 @@ class Mysql(BaseQueryRunner):
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(Mysql)
|
||||
def _get_ssl_parameters(self):
|
||||
ssl_params = {}
|
||||
|
||||
if self.configuration.get('use_ssl'):
|
||||
config_map = dict(ssl_cacert='ca',
|
||||
ssl_cert='cert',
|
||||
ssl_key='key')
|
||||
for key, cfg in config_map.items():
|
||||
val = self.configuration.get(key)
|
||||
if val:
|
||||
ssl_params[cfg] = val
|
||||
|
||||
return ssl_params
|
||||
|
||||
|
||||
register(Mysql)
|
||||
|
||||
175
redash/query_runner/oracle.py
Normal file
175
redash/query_runner/oracle.py
Normal file
@@ -0,0 +1,175 @@
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
try:
|
||||
import cx_Oracle
|
||||
|
||||
TYPES_MAP = {
|
||||
cx_Oracle.DATETIME: TYPE_DATETIME,
|
||||
cx_Oracle.CLOB: TYPE_STRING,
|
||||
cx_Oracle.LOB: TYPE_STRING,
|
||||
cx_Oracle.FIXED_CHAR: TYPE_STRING,
|
||||
cx_Oracle.FIXED_NCHAR: TYPE_STRING,
|
||||
cx_Oracle.FIXED_UNICODE: TYPE_STRING,
|
||||
cx_Oracle.INTERVAL: TYPE_DATETIME,
|
||||
cx_Oracle.LONG_NCHAR: TYPE_STRING,
|
||||
cx_Oracle.LONG_STRING: TYPE_STRING,
|
||||
cx_Oracle.LONG_UNICODE: TYPE_STRING,
|
||||
cx_Oracle.NATIVE_FLOAT: TYPE_FLOAT,
|
||||
cx_Oracle.NCHAR: TYPE_STRING,
|
||||
cx_Oracle.NUMBER: TYPE_FLOAT,
|
||||
cx_Oracle.ROWID: TYPE_INTEGER,
|
||||
cx_Oracle.STRING: TYPE_STRING,
|
||||
cx_Oracle.TIMESTAMP: TYPE_DATETIME,
|
||||
cx_Oracle.UNICODE: TYPE_STRING,
|
||||
}
|
||||
|
||||
|
||||
ENABLED = True
|
||||
except ImportError:
|
||||
ENABLED = False
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Oracle(BaseQueryRunner):
|
||||
|
||||
@classmethod
|
||||
def get_col_type(cls, col_type, scale):
|
||||
if col_type == cx_Oracle.NUMBER:
|
||||
return TYPE_FLOAT if scale > 0 else TYPE_INTEGER
|
||||
else:
|
||||
return TYPES_MAP.get(col_type, None)
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return ENABLED
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"user": {
|
||||
"type": "string"
|
||||
},
|
||||
"password": {
|
||||
"type": "string"
|
||||
},
|
||||
"host": {
|
||||
"type": "string"
|
||||
},
|
||||
"port": {
|
||||
"type": "number"
|
||||
},
|
||||
"servicename": {
|
||||
"type": "string",
|
||||
"title": "DSN Service Name"
|
||||
}
|
||||
},
|
||||
"required": ["servicename"],
|
||||
"secret": ["password"]
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "oracle"
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(Oracle, self).__init__(configuration_json)
|
||||
|
||||
dsn = cx_Oracle.makedsn(
|
||||
self.configuration["host"],
|
||||
self.configuration["port"],
|
||||
service_name=self.configuration["servicename"])
|
||||
|
||||
self.connection_string = "{}/{}@{}".format(self.configuration["user"], self.configuration["password"], dsn)
|
||||
|
||||
def get_schema(self):
|
||||
query = """
|
||||
SELECT
|
||||
user_tables.TABLESPACE_NAME,
|
||||
all_tab_cols.TABLE_NAME,
|
||||
all_tab_cols.COLUMN_NAME
|
||||
FROM all_tab_cols
|
||||
JOIN user_tables ON (all_tab_cols.TABLE_NAME = user_tables.TABLE_NAME)
|
||||
"""
|
||||
|
||||
results, error = self.run_query(query)
|
||||
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
|
||||
schema = {}
|
||||
for row in results['rows']:
|
||||
if row['TABLESPACE_NAME'] != None:
|
||||
table_name = '{}.{}'.format(row['TABLESPACE_NAME'], row['TABLE_NAME'])
|
||||
else:
|
||||
table_name = row['TABLE_NAME']
|
||||
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
|
||||
schema[table_name]['columns'].append(row['COLUMN_NAME'])
|
||||
|
||||
return schema.values()
|
||||
|
||||
@classmethod
|
||||
def _convert_number(cls, value):
|
||||
try:
|
||||
return int(value)
|
||||
except:
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def output_handler(cls, cursor, name, default_type, length, precision, scale):
|
||||
if default_type in (cx_Oracle.CLOB, cx_Oracle.LOB):
|
||||
return cursor.var(cx_Oracle.LONG_STRING, 80000, cursor.arraysize)
|
||||
|
||||
if default_type in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
|
||||
return cursor.var(unicode, length, cursor.arraysize)
|
||||
|
||||
if default_type == cx_Oracle.NUMBER:
|
||||
if scale <= 0:
|
||||
return cursor.var(cx_Oracle.STRING, 255, outconverter=Oracle._convert_number, arraysize=cursor.arraysize)
|
||||
|
||||
def run_query(self, query):
|
||||
connection = cx_Oracle.connect(self.connection_string)
|
||||
connection.outputtypehandler = Oracle.output_handler
|
||||
|
||||
cursor = connection.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
|
||||
if cursor.description is not None:
|
||||
columns = self.fetch_columns([(i[0], Oracle.get_col_type(i[1], i[5])) for i in cursor.description])
|
||||
rows = [dict(zip((c['name'] for c in columns), row)) for row in cursor]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
else:
|
||||
error = 'Query completed but it returned no data.'
|
||||
json_data = None
|
||||
except cx_Oracle.DatabaseError as err:
|
||||
logging.exception(err.message)
|
||||
error = "Query failed. {}.".format(err.message)
|
||||
json_data = None
|
||||
except KeyboardInterrupt:
|
||||
connection.cancel()
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as err:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(Oracle)
|
||||
@@ -57,17 +57,20 @@ class PostgreSQL(BaseQueryRunner):
|
||||
"type": "string"
|
||||
},
|
||||
"host": {
|
||||
"type": "string"
|
||||
"type": "string",
|
||||
"default": "127.0.0.1"
|
||||
},
|
||||
"port": {
|
||||
"type": "number"
|
||||
"type": "number",
|
||||
"default": 5432
|
||||
},
|
||||
"dbname": {
|
||||
"type": "string",
|
||||
"title": "Database Name"
|
||||
}
|
||||
},
|
||||
"required": ["dbname"]
|
||||
"required": ["dbname"],
|
||||
"secret": ["password"]
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -121,29 +124,9 @@ class PostgreSQL(BaseQueryRunner):
|
||||
cursor.execute(query)
|
||||
_wait(connection)
|
||||
|
||||
# While set would be more efficient here, it sorts the data which is not what we want, but due to the small
|
||||
# size of the data we can assume it's ok.
|
||||
column_names = []
|
||||
columns = []
|
||||
duplicates_counter = 1
|
||||
|
||||
if cursor.description is not None:
|
||||
for column in cursor.description:
|
||||
# TODO: this deduplication needs to be generalized and reused in all query runners.
|
||||
column_name = column.name
|
||||
if column_name in column_names:
|
||||
column_name += str(duplicates_counter)
|
||||
duplicates_counter += 1
|
||||
|
||||
column_names.append(column_name)
|
||||
|
||||
columns.append({
|
||||
'name': column_name,
|
||||
'friendly_name': column_name,
|
||||
'type': types_map.get(column.type_code, None)
|
||||
})
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||
columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in cursor.description])
|
||||
rows = [dict(zip((c['name'] for c in columns), row)) for row in cursor]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
error = None
|
||||
@@ -159,7 +142,7 @@ class PostgreSQL(BaseQueryRunner):
|
||||
logging.exception(e)
|
||||
error = e.message
|
||||
json_data = None
|
||||
except KeyboardInterrupt:
|
||||
except (KeyboardInterrupt, InterruptException):
|
||||
connection.cancel()
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
@@ -170,4 +153,4 @@ class PostgreSQL(BaseQueryRunner):
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(PostgreSQL)
|
||||
register(PostgreSQL)
|
||||
|
||||
@@ -11,8 +11,6 @@ try:
|
||||
enabled = True
|
||||
|
||||
except ImportError:
|
||||
logger.warning("Missing dependencies. Please install PyHive.")
|
||||
logger.warning("You can use pip: pip install pyhive")
|
||||
enabled = False
|
||||
|
||||
PRESTO_TYPES_MAPPING = {
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import sys
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import weakref
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash import models
|
||||
@@ -99,7 +97,14 @@ class Python(BaseQueryRunner):
|
||||
return iter(obj)
|
||||
|
||||
def add_result_column(self, result, column_name, friendly_name, column_type):
|
||||
""" Helper function to add columns inside a Python script running in re:dash in an easier way """
|
||||
"""Helper function to add columns inside a Python script running in re:dash in an easier way
|
||||
|
||||
Parameters:
|
||||
:result dict: The result dict
|
||||
:column_name string: Name of the column, which should be consisted of lowercase latin letters or underscore.
|
||||
:friendly_name string: Name of the column for display
|
||||
:column_type string: Type of the column. Check supported data types for details.
|
||||
"""
|
||||
if column_type not in SUPPORTED_COLUMN_TYPES:
|
||||
raise Exception("'{0}' is not a supported column type".format(column_type))
|
||||
|
||||
@@ -113,12 +118,24 @@ class Python(BaseQueryRunner):
|
||||
})
|
||||
|
||||
def add_result_row(self, result, values):
|
||||
"""Helper function to add one row to results set
|
||||
|
||||
Parameters:
|
||||
:result dict: The result dict
|
||||
:values dict: One row of result in dict. The key should be one of the column names. The value is the value of the column in this row.
|
||||
"""
|
||||
if not "rows" in result:
|
||||
result["rows"] = []
|
||||
|
||||
result["rows"].append(values)
|
||||
|
||||
def execute_query(self, data_source_name_or_id, query):
|
||||
"""Run query from specific data source.
|
||||
|
||||
Parameters:
|
||||
:data_source_name_or_id string|integer: Name or ID of the data source
|
||||
:query string: Query to run
|
||||
"""
|
||||
try:
|
||||
if type(data_source_name_or_id) == int:
|
||||
data_source = models.DataSource.get_by_id(data_source_name_or_id)
|
||||
@@ -137,6 +154,11 @@ class Python(BaseQueryRunner):
|
||||
return json.loads(data)
|
||||
|
||||
def get_query_result(self, query_id):
|
||||
"""Get result of an existing query.
|
||||
|
||||
Parameters:
|
||||
:query_id integer: ID of existing query
|
||||
"""
|
||||
try:
|
||||
query = models.Query.get_by_id(query_id)
|
||||
except models.Query.DoesNotExist:
|
||||
@@ -173,7 +195,8 @@ class Python(BaseQueryRunner):
|
||||
restricted_globals["add_result_row"] = self.add_result_row
|
||||
restricted_globals["disable_print_log"] = self._custom_print.disable
|
||||
restricted_globals["enable_print_log"] = self._custom_print.enable
|
||||
|
||||
|
||||
# Supported data types
|
||||
restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME
|
||||
restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN
|
||||
restricted_globals["TYPE_INTEGER"] = TYPE_INTEGER
|
||||
|
||||
96
redash/query_runner/sqlite.py
Normal file
96
redash/query_runner/sqlite.py
Normal file
@@ -0,0 +1,96 @@
|
||||
import json
|
||||
import logging
|
||||
import sqlite3
|
||||
import sys
|
||||
|
||||
from redash.query_runner import BaseQueryRunner
|
||||
from redash.query_runner import TYPE_STRING
|
||||
from redash.query_runner import register
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Sqlite(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"dbpath": {
|
||||
"type": "string",
|
||||
"title": "Database Path"
|
||||
}
|
||||
},
|
||||
"required": ["dbpath"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "sqlite"
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(Sqlite, self).__init__(configuration_json)
|
||||
|
||||
self._dbpath = self.configuration['dbpath']
|
||||
|
||||
def get_schema(self):
|
||||
query_table = "select tbl_name from sqlite_master where type='table'"
|
||||
query_columns = "PRAGMA table_info(%s)"
|
||||
|
||||
results, error = self.run_query(query_table)
|
||||
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
|
||||
schema = {}
|
||||
for row in results['rows']:
|
||||
table_name = row['tbl_name']
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
results_table, error = self.run_query(query_columns % (table_name,))
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results_table = json.loads(results_table)
|
||||
for row_column in results_table['rows']:
|
||||
schema[table_name]['columns'].append(row_column['name'])
|
||||
|
||||
return schema.values()
|
||||
|
||||
def run_query(self, query):
|
||||
connection = sqlite3.connect(self._dbpath)
|
||||
|
||||
cursor = connection.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
|
||||
if cursor.description is not None:
|
||||
columns = self.fetch_columns([(i[0], None) for i in cursor.description])
|
||||
rows = [dict(zip((c['name'] for c in columns), row)) for row in cursor]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
else:
|
||||
error = 'Query completed but it returned no data.'
|
||||
json_data = None
|
||||
except KeyboardInterrupt:
|
||||
connection.cancel()
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
return json_data, error
|
||||
|
||||
register(Sqlite)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
113
redash/query_runner/treasuredata.py
Normal file
113
redash/query_runner/treasuredata.py
Normal file
@@ -0,0 +1,113 @@
|
||||
import json
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner import *
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
import tdclient
|
||||
enabled = True
|
||||
|
||||
except ImportError:
|
||||
enabled = False
|
||||
|
||||
TD_TYPES_MAPPING = {
|
||||
'bigint': TYPE_INTEGER,
|
||||
'tinyint': TYPE_INTEGER,
|
||||
'smallint': TYPE_INTEGER,
|
||||
'int': TYPE_INTEGER,
|
||||
'integer': TYPE_INTEGER,
|
||||
'long': TYPE_INTEGER,
|
||||
'double': TYPE_FLOAT,
|
||||
'decimal': TYPE_FLOAT,
|
||||
'float': TYPE_FLOAT,
|
||||
'real': TYPE_FLOAT,
|
||||
'boolean': TYPE_BOOLEAN,
|
||||
'timestamp': TYPE_DATETIME,
|
||||
'date': TYPE_DATETIME,
|
||||
'char': TYPE_STRING,
|
||||
'string': TYPE_STRING,
|
||||
'varchar': TYPE_STRING,
|
||||
}
|
||||
|
||||
|
||||
class TreasureData(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'endpoint': {
|
||||
'type': 'string'
|
||||
},
|
||||
'apikey': {
|
||||
'type': 'string'
|
||||
},
|
||||
'type': {
|
||||
'type': 'string'
|
||||
},
|
||||
"db": {
|
||||
"type": "string",
|
||||
"title": "Database Name"
|
||||
}
|
||||
},
|
||||
'required': ['apikey','db']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "treasuredata"
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(TreasureData, self).__init__(configuration_json)
|
||||
|
||||
def get_schema(self):
|
||||
schema = {}
|
||||
try:
|
||||
with tdclient.Client(self.configuration.get('apikey')) as client:
|
||||
for table in client.tables(self.configuration.get('db')):
|
||||
table_name = '{}.{}'.format(self.configuration.get('db'), table.name)
|
||||
for table_schema in table.schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': table.schema}
|
||||
except Exception, ex:
|
||||
raise Exception("Failed getting schema")
|
||||
return schema.values()
|
||||
|
||||
def run_query(self, query):
|
||||
connection = tdclient.connect(
|
||||
endpoint=self.configuration.get('endpoint', 'https://api.treasuredata.com'),
|
||||
apikey=self.configuration.get('apikey'),
|
||||
type=self.configuration.get('type', 'hive'),
|
||||
db=self.configuration.get('db'))
|
||||
|
||||
cursor = connection.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
columns_data = [(row[0], cursor.show_job()['hive_result_schema'][i][1]) for i,row in enumerate(cursor.description)]
|
||||
|
||||
columns = [{'name': col[0],
|
||||
'friendly_name': col[0],
|
||||
'type': TD_TYPES_MAPPING.get(col[1], None)} for col in columns_data]
|
||||
|
||||
rows = [dict(zip(([c[0] for c in columns_data]), r)) for i, r in enumerate(cursor.fetchall())]
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
except Exception, ex:
|
||||
json_data = None
|
||||
error = ex.message
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(TreasureData)
|
||||
@@ -51,7 +51,8 @@ class Vertica(BaseQueryRunner):
|
||||
"type": "number"
|
||||
},
|
||||
},
|
||||
'required': ['database']
|
||||
'required': ['database'],
|
||||
'secret': ['password']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import json
|
||||
import os
|
||||
import urlparse
|
||||
from funcy import distinct
|
||||
|
||||
|
||||
def parse_db_url(url):
|
||||
@@ -66,9 +67,10 @@ DATABASE_CONFIG = parse_db_url(os.environ.get("REDASH_DATABASE_URL", "postgresql
|
||||
CELERY_BROKER = os.environ.get("REDASH_CELERY_BROKER", REDIS_URL)
|
||||
CELERY_BACKEND = os.environ.get("REDASH_CELERY_BACKEND", CELERY_BROKER)
|
||||
|
||||
# The following enables periodic job (every 5 minutes) of removing unused query results. Behind this "feature flag" until
|
||||
# proved to be "safe".
|
||||
# The following enables periodic job (every 5 minutes) of removing unused query results.
|
||||
QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "true"))
|
||||
QUERY_RESULTS_CLEANUP_COUNT = int(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_COUNT", "100"))
|
||||
QUERY_RESULTS_CLEANUP_MAX_AGE = int(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_MAX_AGE", "7"))
|
||||
|
||||
AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "api_key")
|
||||
PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "true"))
|
||||
@@ -104,6 +106,13 @@ MAIL_ASCII_ATTACHMENTS = parse_boolean(os.environ.get('REDASH_MAIL_ASCII_ATTACHM
|
||||
|
||||
HOST = os.environ.get('REDASH_HOST', '')
|
||||
|
||||
HIPCHAT_API_TOKEN = os.environ.get('REDASH_HIPCHAT_API_TOKEN', None)
|
||||
HIPCHAT_ROOM_ID = os.environ.get('REDASH_HIPCHAT_ROOM_ID', None)
|
||||
|
||||
WEBHOOK_ENDPOINT = os.environ.get('REDASH_WEBHOOK_ENDPOINT', None)
|
||||
WEBHOOK_USERNAME = os.environ.get('REDASH_WEBHOOK_USERNAME', None)
|
||||
WEBHOOK_PASSWORD = os.environ.get('REDASH_WEBHOOK_PASSWORD', None)
|
||||
|
||||
# CORS settings for the Query Result API (and possbily future external APIs).
|
||||
# In most cases all you need to do is set REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN
|
||||
# to the calling domain (or domains in a comma separated list).
|
||||
@@ -113,11 +122,12 @@ ACCESS_CONTROL_REQUEST_METHOD = os.environ.get("REDASH_CORS_ACCESS_CONTROL_REQUE
|
||||
ACCESS_CONTROL_ALLOW_HEADERS = os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_HEADERS", "Content-Type")
|
||||
|
||||
# Query Runners
|
||||
QUERY_RUNNERS = array_from_string(os.environ.get("REDASH_ENABLED_QUERY_RUNNERS", ",".join([
|
||||
default_query_runners = [
|
||||
'redash.query_runner.big_query',
|
||||
'redash.query_runner.google_spreadsheets',
|
||||
'redash.query_runner.graphite',
|
||||
'redash.query_runner.mongodb',
|
||||
'redash.query_runner.mql',
|
||||
'redash.query_runner.mysql',
|
||||
'redash.query_runner.pg',
|
||||
'redash.query_runner.url',
|
||||
@@ -126,8 +136,16 @@ QUERY_RUNNERS = array_from_string(os.environ.get("REDASH_ENABLED_QUERY_RUNNERS",
|
||||
'redash.query_runner.presto',
|
||||
'redash.query_runner.hive_ds',
|
||||
'redash.query_runner.impala_ds',
|
||||
'redash.query_runner.vertica'
|
||||
])))
|
||||
'redash.query_runner.vertica',
|
||||
'redash.query_runner.treasuredata',
|
||||
'redash.query_runner.oracle',
|
||||
'redash.query_runner.sqlite',
|
||||
]
|
||||
|
||||
enabled_query_runners = array_from_string(os.environ.get("REDASH_ENABLED_QUERY_RUNNERS", ",".join(default_query_runners)))
|
||||
additional_query_runners = array_from_string(os.environ.get("REDASH_ADDITIONAL_QUERY_RUNNERS", ""))
|
||||
|
||||
QUERY_RUNNERS = distinct(enabled_query_runners + additional_query_runners)
|
||||
|
||||
# Support for Sentry (http://getsentry.com/). Just set your Sentry DSN to enable it:
|
||||
SENTRY_DSN = os.environ.get("REDASH_SENTRY_DSN", "")
|
||||
@@ -135,7 +153,14 @@ SENTRY_DSN = os.environ.get("REDASH_SENTRY_DSN", "")
|
||||
# Client side toggles:
|
||||
ALLOW_SCRIPTS_IN_USER_INPUT = parse_boolean(os.environ.get("REDASH_ALLOW_SCRIPTS_IN_USER_INPUT", "false"))
|
||||
CLIENT_SIDE_METRICS = parse_boolean(os.environ.get("REDASH_CLIENT_SIDE_METRICS", "false"))
|
||||
# http://api.highcharts.com/highcharts#plotOptions.series.turboThreshold
|
||||
HIGHCHARTS_TURBO_THRESHOLD = int(os.environ.get("REDASH_HIGHCHARTS_TURBO_THRESHOLD", "1000"))
|
||||
DATE_FORMAT = os.environ.get("REDASH_DATE_FORMAT", "DD/MM/YY")
|
||||
|
||||
# Features:
|
||||
FEATURE_ALLOW_ALL_TO_EDIT_QUERIES = parse_boolean(os.environ.get("REDASH_FEATURE_ALLOW_ALL_TO_EDIT", "true"))
|
||||
FEATURE_TABLES_PERMISSIONS = parse_boolean(os.environ.get("REDASH_FEATURE_TABLES_PERMISSIONS", "false"))
|
||||
VERSION_CHECK = parse_boolean(os.environ.get("REDASH_VERSION_CEHCK", "true"))
|
||||
|
||||
# BigQuery
|
||||
BIGQUERY_HTTP_TIMEOUT = int(os.environ.get("REDASH_BIGQUERY_HTTP_TIMEOUT", "600"))
|
||||
|
||||
@@ -1,14 +1,22 @@
|
||||
import time
|
||||
import logging
|
||||
import signal
|
||||
import traceback
|
||||
from flask.ext.mail import Message
|
||||
import redis
|
||||
import hipchat
|
||||
import requests
|
||||
import json
|
||||
from redash.utils import json_dumps
|
||||
from requests.auth import HTTPBasicAuth
|
||||
from celery import Task
|
||||
from celery.result import AsyncResult
|
||||
from celery.utils.log import get_task_logger
|
||||
from redash import redis_connection, models, statsd_client, settings, utils, mail
|
||||
from redash.utils import gen_query_hash
|
||||
from redash.worker import celery
|
||||
from redash.query_runner import get_query_runner
|
||||
from redash.query_runner import get_query_runner, InterruptException
|
||||
from version_check import run_version_check
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
@@ -132,7 +140,7 @@ class QueryTask(object):
|
||||
return self._async_result.ready()
|
||||
|
||||
def cancel(self):
|
||||
return self._async_result.revoke(terminate=True)
|
||||
return self._async_result.revoke(terminate=True, signal='SIGINT')
|
||||
|
||||
@staticmethod
|
||||
def _job_lock_id(query_hash, data_source_id):
|
||||
@@ -213,7 +221,10 @@ def cleanup_query_results():
|
||||
Each time the job deletes only 100 query results so it won't choke the database in case of many such results.
|
||||
"""
|
||||
|
||||
unused_query_results = models.QueryResult.unused().limit(100)
|
||||
logging.info("Running query results clean up (removing maximum of %d unused results, that are %d days old or more)",
|
||||
settings.QUERY_RESULTS_CLEANUP_COUNT, settings.QUERY_RESULTS_CLEANUP_MAX_AGE)
|
||||
|
||||
unused_query_results = models.QueryResult.unused(settings.QUERY_RESULTS_CLEANUP_MAX_AGE).limit(settings.QUERY_RESULTS_CLEANUP_COUNT)
|
||||
total_unused_query_results = models.QueryResult.unused().count()
|
||||
deleted_count = models.QueryResult.delete().where(models.QueryResult.id << unused_query_results).execute()
|
||||
|
||||
@@ -250,22 +261,24 @@ def check_alerts_for_query(self, query_id):
|
||||
continue
|
||||
|
||||
# message = Message
|
||||
recipients = [s.email for s in alert.subscribers()]
|
||||
logger.debug("Notifying: %s", recipients)
|
||||
html = """
|
||||
Check <a href="{host}/alerts/{alert_id}">alert</a> / check <a href="{host}/queries/{query_id}">query</a>.
|
||||
""".format(host=settings.HOST, alert_id=alert.id, query_id=query.id)
|
||||
|
||||
with app.app_context():
|
||||
message = Message(recipients=recipients,
|
||||
subject="[{1}] {0}".format(alert.name, new_state.upper()),
|
||||
html=html)
|
||||
notify_mail(alert, html, new_state, app)
|
||||
|
||||
mail.send(message)
|
||||
if settings.HIPCHAT_API_TOKEN:
|
||||
notify_hipchat(alert, html, new_state)
|
||||
|
||||
if settings.WEBHOOK_ENDPOINT:
|
||||
notify_webhook(alert, query, html, new_state)
|
||||
|
||||
def signal_handler(*args):
|
||||
raise InterruptException
|
||||
|
||||
@celery.task(bind=True, base=BaseTask, track_started=True)
|
||||
def execute_query(self, query, data_source_id, metadata):
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
start_time = time.time()
|
||||
|
||||
logger.info("Loading data source (%d)...", data_source_id)
|
||||
@@ -317,3 +330,42 @@ def execute_query(self, query, data_source_id, metadata):
|
||||
@celery.task(base=BaseTask)
|
||||
def record_event(event):
|
||||
models.Event.record(event)
|
||||
|
||||
@celery.task(base=BaseTask)
|
||||
def version_check():
|
||||
run_version_check()
|
||||
|
||||
def notify_hipchat(alert, html, new_state):
|
||||
try:
|
||||
hipchat_client = hipchat.HipChat(token=settings.HIPCHAT_API_TOKEN)
|
||||
message = '[' + new_state.upper() + '] ' + alert.name + '<br />' + html
|
||||
hipchat_client.message_room(settings.HIPCHAT_ROOM_ID, settings.NAME, message, message_format='html')
|
||||
except:
|
||||
logger.exception("hipchat send ERROR.")
|
||||
|
||||
def notify_mail(alert, html, new_state, app):
|
||||
recipients = [s.email for s in alert.subscribers()]
|
||||
logger.debug("Notifying: %s", recipients)
|
||||
try:
|
||||
with app.app_context():
|
||||
message = Message(recipients=recipients,
|
||||
subject="[{1}] {0}".format(alert.name, new_state.upper()),
|
||||
html=html)
|
||||
mail.send(message)
|
||||
except:
|
||||
logger.exception("mail send ERROR.")
|
||||
|
||||
def notify_webhook(alert, query, html, new_state):
|
||||
try:
|
||||
data = {
|
||||
'event': 'alert_state_change',
|
||||
'alert': alert.to_dict(full=False),
|
||||
'url_base': settings.HOST
|
||||
}
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
auth = HTTPBasicAuth(settings.WEBHOOK_USERNAME, settings.WEBHOOK_PASSWORD) if settings.WEBHOOK_USERNAME else None
|
||||
resp = requests.post(settings.WEBHOOK_ENDPOINT, data=json_dumps(data), auth=auth, headers=headers)
|
||||
if resp.status_code != 200:
|
||||
logger.error("webhook send ERROR. status_code => {status}".format(status=resp.status_code))
|
||||
except:
|
||||
logger.exception("webhook send ERROR.")
|
||||
|
||||
51
redash/version_check.py
Normal file
51
redash/version_check.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import logging
|
||||
import requests
|
||||
import semver
|
||||
|
||||
from redash import __version__ as current_version
|
||||
from redash import redis_connection
|
||||
from redash.utils import json_dumps
|
||||
|
||||
REDIS_KEY = "new_version_available"
|
||||
|
||||
|
||||
def run_version_check():
|
||||
logging.info("Performing version check.")
|
||||
logging.info("Current version: %s", current_version)
|
||||
|
||||
data = json_dumps({
|
||||
'current_version': current_version
|
||||
})
|
||||
headers = {'content-type': 'application/json'}
|
||||
|
||||
try:
|
||||
response = requests.post('https://version.redash.io/api/report?channel=stable',
|
||||
data=data, headers=headers, timeout=3.0)
|
||||
latest_version = response.json()['release']['version']
|
||||
|
||||
_compare_and_update(latest_version)
|
||||
except requests.RequestException:
|
||||
logging.exception("Failed checking for new version.")
|
||||
except (ValueError, KeyError):
|
||||
logging.exception("Failed checking for new version (probably bad/non-JSON response).")
|
||||
|
||||
|
||||
def reset_new_version_status():
|
||||
latest_version = get_latest_version()
|
||||
if latest_version:
|
||||
_compare_and_update(latest_version)
|
||||
|
||||
|
||||
def get_latest_version():
|
||||
return redis_connection.get(REDIS_KEY)
|
||||
|
||||
|
||||
def _compare_and_update(latest_version):
|
||||
# TODO: support alpha channel (allow setting which channel to check & parse build number)
|
||||
is_newer = semver.compare(current_version, latest_version) == -1
|
||||
logging.info("Latest version: %s (newer: %s)", latest_version, is_newer)
|
||||
|
||||
if is_newer:
|
||||
redis_connection.set(REDIS_KEY, latest_version)
|
||||
else:
|
||||
redis_connection.delete(REDIS_KEY)
|
||||
@@ -1,5 +1,7 @@
|
||||
from random import randint
|
||||
from celery import Celery
|
||||
from datetime import timedelta
|
||||
from celery.schedules import crontab
|
||||
from redash import settings, __version__
|
||||
|
||||
|
||||
@@ -22,6 +24,14 @@ celery_schedule = {
|
||||
}
|
||||
}
|
||||
|
||||
if settings.VERSION_CHECK:
|
||||
celery_schedule['version_check'] = {
|
||||
'task': 'redash.tasks.version_check',
|
||||
# We need to schedule the version check to run at a random hour/minute, to spread the requests from all users
|
||||
# evenly.
|
||||
'schedule': crontab(minute=randint(0, 59), hour=randint(0, 23))
|
||||
}
|
||||
|
||||
if settings.QUERY_RESULTS_CLEANUP_ENABLED:
|
||||
celery_schedule['cleanup_query_results'] = {
|
||||
'task': 'redash.tasks.cleanup_query_results',
|
||||
@@ -37,5 +47,4 @@ if settings.SENTRY_DSN:
|
||||
from raven.contrib.celery import register_signal, register_logger_signal
|
||||
|
||||
client = Client(settings.SENTRY_DSN, release=__version__)
|
||||
register_logger_signal(client)
|
||||
register_signal(client)
|
||||
|
||||
@@ -33,3 +33,5 @@ pysaml2==2.4.0
|
||||
pycrypto==2.6.1
|
||||
funcy==1.5
|
||||
raven==5.6.0
|
||||
semver==2.2.1
|
||||
python-simple-hipchat==0.4.0
|
||||
|
||||
@@ -8,3 +8,4 @@ pyhive==0.1.5
|
||||
pymongo==2.7.2
|
||||
pyOpenSSL==0.14
|
||||
vertica-python==0.5.1
|
||||
td-client==0.3.2
|
||||
|
||||
4
requirements_oracle_ds.txt
Normal file
4
requirements_oracle_ds.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
# Requires installation of, or similar versions of:
|
||||
# oracle-instantclient12.1-basic_12.1.0.2.0-2_amd64.deb
|
||||
# oracle-instantclient12.1-devel_12.1.0.2.0-2_amd64.deb
|
||||
cx_Oracle==5.2
|
||||
@@ -1,12 +0,0 @@
|
||||
# -*- mode: ruby -*-
|
||||
# vi: set ft=ruby :
|
||||
|
||||
# Vagrantfile API/syntax version. Don't touch unless you know what you're doing!
|
||||
VAGRANTFILE_API_VERSION = "2"
|
||||
|
||||
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
|
||||
# Every Vagrant virtual environment requires a box to build off of.
|
||||
config.vm.box = "box-cutter/debian76"
|
||||
config.vm.provision "shell", path: "setup.sh"
|
||||
config.vm.network "forwarded_port", guest: 80, host: 9001
|
||||
end
|
||||
1
setup/amazon_linux/README.md
Normal file
1
setup/amazon_linux/README.md
Normal file
@@ -0,0 +1 @@
|
||||
Bootstrap script for Amazon Linux AMI. *Not supported*, we recommend to use the Docker images instead.
|
||||
@@ -2,8 +2,7 @@
|
||||
set -eu
|
||||
|
||||
REDASH_BASE_PATH=/opt/redash
|
||||
FILES_BASE_URL=https://raw.githubusercontent.com/EverythingMe/redash/docs_setup/setup/files/
|
||||
FILE_BASE_URL_FOR_AMAZON_LINUX=https://raw.githubusercontent.com/EverythingMe/redash/master/setup/files/
|
||||
FILES_BASE_URL=https://raw.githubusercontent.com/getredash/redash/master/setup/amazon_linux/files/
|
||||
# Verify running as root:
|
||||
if [ "$(id -u)" != "0" ]; then
|
||||
if [ $# -ne 0 ]; then
|
||||
@@ -106,7 +105,7 @@ fi
|
||||
|
||||
# Install latest version
|
||||
REDASH_VERSION=${REDASH_VERSION-0.6.3.b906}
|
||||
LATEST_URL="https://github.com/EverythingMe/redash/releases/download/v${REDASH_VERSION}/redash.$REDASH_VERSION.tar.gz"
|
||||
LATEST_URL="https://github.com/getredash/redash/releases/download/v${REDASH_VERSION}/redash.$REDASH_VERSION.tar.gz"
|
||||
VERSION_DIR="/opt/redash/redash.$REDASH_VERSION"
|
||||
REDASH_TARBALL=/tmp/redash.tar.gz
|
||||
REDASH_TARBALL=/tmp/redash.tar.gz
|
||||
@@ -178,7 +177,7 @@ fi
|
||||
|
||||
|
||||
# Get supervisord startup script
|
||||
sudo -u redash wget -O /opt/redash/supervisord/supervisord.conf $FILE_BASE_URL_FOR_AMAZON_LINUX"supervisord_for_amazon_linux.conf"
|
||||
sudo -u redash wget -O /opt/redash/supervisord/supervisord.conf $FILES_BASE_URL"supervisord.conf"
|
||||
|
||||
# install start-stop-daemon
|
||||
wget http://developer.axis.com/download/distribution/apps-sys-utils-start-stop-daemon-IR1_9_18-2.tar.gz
|
||||
@@ -187,7 +186,7 @@ cd apps/sys-utils/start-stop-daemon-IR1_9_18-2/
|
||||
gcc start-stop-daemon.c -o start-stop-daemon
|
||||
cp start-stop-daemon /sbin/
|
||||
|
||||
wget -O /etc/init.d/redash_supervisord $FILE_BASE_URL_FOR_AMAZON_LINUX"redash_supervisord_init_for_amazon_linux"
|
||||
wget -O /etc/init.d/redash_supervisord $FILES_BASE_URL"redash_supervisord_init"
|
||||
add_service "redash_supervisord"
|
||||
|
||||
# Nginx setup
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user