mirror of
https://github.com/getredash/redash.git
synced 2025-12-25 01:03:20 -05:00
Compare commits
222 Commits
v0.1.31
...
v0.3.5+b16
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cc957cc3e8 | ||
|
|
dd5fd72bd2 | ||
|
|
9d4655cc00 | ||
|
|
3320de07f2 | ||
|
|
68482afa5c | ||
|
|
bfeded207a | ||
|
|
a5971b0c69 | ||
|
|
6d93ccc0d0 | ||
|
|
69f5de6478 | ||
|
|
4630a8d18d | ||
|
|
79e40a667b | ||
|
|
2c904641a5 | ||
|
|
1303163aee | ||
|
|
14ecfd2cc8 | ||
|
|
a91eb9435b | ||
|
|
b5d2285b99 | ||
|
|
fece24a50a | ||
|
|
7d77da8339 | ||
|
|
e43366f422 | ||
|
|
c7af5bdce9 | ||
|
|
3f302ee4a3 | ||
|
|
53ef0f3f2d | ||
|
|
c6dbb8d7c8 | ||
|
|
f4088e0b38 | ||
|
|
d3d46aa023 | ||
|
|
55cc3dd90e | ||
|
|
0822789002 | ||
|
|
ffb2ec9bd1 | ||
|
|
2bcb56d249 | ||
|
|
8ccbe9c069 | ||
|
|
85f98f7405 | ||
|
|
ac946fd014 | ||
|
|
3680d0c65d | ||
|
|
8130d28442 | ||
|
|
9cac38d5da | ||
|
|
81122c9865 | ||
|
|
b8a0077b1d | ||
|
|
62108e3dac | ||
|
|
0c9fa8b51b | ||
|
|
aa2bf4fe22 | ||
|
|
e82f561c03 | ||
|
|
d348fe9012 | ||
|
|
7271b7a5f0 | ||
|
|
522536cfe0 | ||
|
|
f557b53ce2 | ||
|
|
1277da7e92 | ||
|
|
f334122e41 | ||
|
|
269cbe839b | ||
|
|
2a3bcc2ecb | ||
|
|
5babab85c8 | ||
|
|
8debd01a36 | ||
|
|
51a37cae3d | ||
|
|
3c24e76eb4 | ||
|
|
6dc9f8ea2b | ||
|
|
157b1ca0b4 | ||
|
|
8be95262d4 | ||
|
|
3cbdae6e5c | ||
|
|
edcf0661a6 | ||
|
|
6d14c5c555 | ||
|
|
a0662d5323 | ||
|
|
cbd1cf7c25 | ||
|
|
a55225b5e8 | ||
|
|
b81c3ba614 | ||
|
|
2d0998a995 | ||
|
|
766840de68 | ||
|
|
791f2e0b34 | ||
|
|
9241a7c35d | ||
|
|
dda92477cf | ||
|
|
07455e5821 | ||
|
|
1b9aae0137 | ||
|
|
30b86ea781 | ||
|
|
a186d44d8f | ||
|
|
574f75b293 | ||
|
|
252ae7455a | ||
|
|
d73dbdeee0 | ||
|
|
72065c0ee2 | ||
|
|
07caee1d12 | ||
|
|
4c3904760c | ||
|
|
8ad2c2a59e | ||
|
|
e5a365ba41 | ||
|
|
fc0b118188 | ||
|
|
a207b93d0d | ||
|
|
b1d588b1f2 | ||
|
|
95a6bab8b5 | ||
|
|
c82433e6b4 | ||
|
|
2e84852519 | ||
|
|
da746d15a0 | ||
|
|
1b519269d8 | ||
|
|
5ffaf1aead | ||
|
|
b704406164 | ||
|
|
5c9fe40702 | ||
|
|
fe7c4f96aa | ||
|
|
83909a07fa | ||
|
|
cd99927881 | ||
|
|
8bbb485d5b | ||
|
|
b2ec77668e | ||
|
|
f8302ab65a | ||
|
|
e632cf1c42 | ||
|
|
640557df4f | ||
|
|
9b7227a88b | ||
|
|
aabc912862 | ||
|
|
02d6567347 | ||
|
|
6f8767d1fc | ||
|
|
bc787efc86 | ||
|
|
e0d46c3942 | ||
|
|
5a2bed29aa | ||
|
|
8fbcd0c34d | ||
|
|
97df37536c | ||
|
|
373b9c6a97 | ||
|
|
009726c62d | ||
|
|
69c07a41e9 | ||
|
|
64afd62a1f | ||
|
|
4318468957 | ||
|
|
1af3fc1c96 | ||
|
|
1e11f8032a | ||
|
|
a1a7ca8a0a | ||
|
|
52758fa66e | ||
|
|
fa43ff1365 | ||
|
|
bd15162fb7 | ||
|
|
cc980edc66 | ||
|
|
7fd094ba39 | ||
|
|
68ef489d8c | ||
|
|
21ff1d7482 | ||
|
|
669b1d9a63 | ||
|
|
29531a361c | ||
|
|
c40cf2e7e8 | ||
|
|
7bf391e772 | ||
|
|
fbb84af955 | ||
|
|
d954eb63ef | ||
|
|
1b14161535 | ||
|
|
bcf854604b | ||
|
|
f265d9174a | ||
|
|
970e0e2d04 | ||
|
|
9055865e1c | ||
|
|
f9b6aca8e8 | ||
|
|
d084b5a03c | ||
|
|
a6ab0ff2aa | ||
|
|
1bce924d83 | ||
|
|
f571e8ac6e | ||
|
|
27bf2e642b | ||
|
|
d4ca903a07 | ||
|
|
0f8bbdc9f2 | ||
|
|
fb9f814b00 | ||
|
|
b4f88196dc | ||
|
|
78e748548c | ||
|
|
199cddfbdb | ||
|
|
c0ca602017 | ||
|
|
3471b9853e | ||
|
|
6765d7d89f | ||
|
|
250aa17e63 | ||
|
|
2942d20ac3 | ||
|
|
d32799b2dc | ||
|
|
ff62fbbcf4 | ||
|
|
69ec362a8d | ||
|
|
41d00543d0 | ||
|
|
f890e590e1 | ||
|
|
2aec982577 | ||
|
|
b66d5daad0 | ||
|
|
6ff07b99dc | ||
|
|
1586860e15 | ||
|
|
99dac8f6fd | ||
|
|
5fb910b886 | ||
|
|
fb826ec838 | ||
|
|
5198cc17d3 | ||
|
|
261ecfcb11 | ||
|
|
6582bce0d3 | ||
|
|
db91ca82c1 | ||
|
|
cb7fbc16b0 | ||
|
|
c6c639f16f | ||
|
|
cb5968bc5f | ||
|
|
693b25efc5 | ||
|
|
6eddaeda61 | ||
|
|
349bfa9139 | ||
|
|
b0f75678ee | ||
|
|
0a0f7d7365 | ||
|
|
6d1ff98bda | ||
|
|
065324d256 | ||
|
|
69f7c3417e | ||
|
|
806f57c627 | ||
|
|
e4c7844cae | ||
|
|
6ebfa16740 | ||
|
|
43cfdb8727 | ||
|
|
b31c5be70e | ||
|
|
d84d047470 | ||
|
|
42a0659012 | ||
|
|
6386f0f9aa | ||
|
|
9aaf17d478 | ||
|
|
1f908f9040 | ||
|
|
b51ef059f5 | ||
|
|
a9e135c94f | ||
|
|
212ade2da7 | ||
|
|
f939bf6108 | ||
|
|
3360cd934b | ||
|
|
f35a0970ac | ||
|
|
97ca722a11 | ||
|
|
e554c9bdd7 | ||
|
|
567a732e1e | ||
|
|
5b532d03a0 | ||
|
|
cd838e5a7e | ||
|
|
bb096be00c | ||
|
|
7b78bfe191 | ||
|
|
a45ba0bf30 | ||
|
|
5ce3699a58 | ||
|
|
1cd836ac8d | ||
|
|
c83705119d | ||
|
|
fdd2cfe1d1 | ||
|
|
8327baa2f6 | ||
|
|
84df2fb85c | ||
|
|
cab6f9e58d | ||
|
|
d2ace5c1cf | ||
|
|
5eddddb7b5 | ||
|
|
6408b9e5e1 | ||
|
|
b0159c8246 | ||
|
|
b056e49ec5 | ||
|
|
fef5c287d7 | ||
|
|
09c65ee9dc | ||
|
|
a2385a1779 | ||
|
|
95529ce8f0 | ||
|
|
1a6e5b425a | ||
|
|
87e0962c5a | ||
|
|
1625149221 | ||
|
|
2b13ef1063 |
5
.coveragerc
Normal file
5
.coveragerc
Normal file
@@ -0,0 +1,5 @@
|
||||
[report]
|
||||
omit =
|
||||
*/settings.py
|
||||
*/python?.?/*
|
||||
*/site-packages/nose/*
|
||||
9
.env.example
Normal file
9
.env.example
Normal file
@@ -0,0 +1,9 @@
|
||||
REDASH_CONNECTION_ADAPTER=pg
|
||||
REDASH_CONNECTION_STRING="dbname=data"
|
||||
REDASH_STATIC_ASSETS_PATH=../rd_ui/app/
|
||||
REDASH_GOOGLE_APPS_DOMAIN=
|
||||
REDASH_ADMINS=
|
||||
REDASH_WORKERS_COUNT=2
|
||||
REDASH_COOKIE_SECRET=
|
||||
REDASH_DATABASE_URL='postgresql://rd'
|
||||
REDASH_LOG_LEVEL = "INFO"
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -1,10 +1,13 @@
|
||||
.coveralls.yml
|
||||
.idea
|
||||
*.pyc
|
||||
rd_service/settings.py
|
||||
.coverage
|
||||
rd_ui/dist
|
||||
.DS_Store
|
||||
|
||||
# Vagrant related
|
||||
.vagrant
|
||||
Berksfile.lock
|
||||
rd_service/dump.rdb
|
||||
redash/dump.rdb
|
||||
.env
|
||||
.ruby-version
|
||||
@@ -1,3 +0,0 @@
|
||||
cookbook 'apt'
|
||||
cookbook 'postgresql'
|
||||
cookbook 'redash', git: 'git@github.com:EverythingMe/chef-redash.git'
|
||||
9
Makefile
9
Makefile
@@ -1,6 +1,6 @@
|
||||
NAME=redash
|
||||
VERSION=0.1
|
||||
FULL_VERSION=$(VERSION).$(CIRCLE_BUILD_NUM)
|
||||
VERSION=`python ./manage.py version`
|
||||
FULL_VERSION=$(VERSION)+b$(CIRCLE_BUILD_NUM)
|
||||
FILENAME=$(CIRCLE_ARTIFACTS)/$(NAME).$(FULL_VERSION).tar.gz
|
||||
|
||||
deps:
|
||||
@@ -10,7 +10,10 @@ deps:
|
||||
cd rd_ui && grunt build
|
||||
|
||||
pack:
|
||||
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
|
||||
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
|
||||
|
||||
upload:
|
||||
python bin/upload_version.py $(FULL_VERSION) $(FILENAME)
|
||||
|
||||
test:
|
||||
nosetests --with-coverage --cover-package=redash tests/*.py
|
||||
|
||||
2
Procfile.dev
Normal file
2
Procfile.dev
Normal file
@@ -0,0 +1,2 @@
|
||||
web: ./manage.py runserver -p $PORT
|
||||
worker: ./manage.py runworkers
|
||||
2
Procfile.heroku
Normal file
2
Procfile.heroku
Normal file
@@ -0,0 +1,2 @@
|
||||
web: ./manage.py runserver -p $PORT --host 0.0.0.0 -d -r
|
||||
worker: ./manage.py runworkers
|
||||
61
README.md
61
README.md
@@ -1,4 +1,5 @@
|
||||
# [_re:dash_](https://github.com/everythingme/redash)
|
||||

|
||||
|
||||
**_re:dash_** is our take on freeing the data within our company in a way that will better fit our culture and usage patterns.
|
||||
|
||||
@@ -21,10 +22,15 @@ You can try out the demo instance: http://rd-demo.herokuapp.com/ (login with any
|
||||
|
||||
Due to Heroku dev plan limits, it has a small database of flights (see schema [here](http://rd-demo.herokuapp.com/dashboard/schema)). Also due to another Heroku limitation, it is running with the regular user, hence you can DELETE or INSERT data/tables. Please be nice and don't do this.
|
||||
|
||||
## Getting help
|
||||
|
||||
* [Google Group (mailing list)](https://groups.google.com/forum/#!forum/redash-users): the best place to get updates about new releases or ask general questions.
|
||||
* #redash IRC channel on [Freenode](http://www.freenode.net/).
|
||||
|
||||
## Technology
|
||||
|
||||
* Python
|
||||
* [AngularJS](http://angularjs.org/)
|
||||
* [Tornado](http://tornadoweb.org)
|
||||
* [PostgreSQL](http://www.postgresql.org/) / [AWS Redshift](http://aws.amazon.com/redshift/)
|
||||
* [Redis](http://redis.io)
|
||||
|
||||
@@ -40,62 +46,21 @@ It's very likely that in the future we will switch to [D3.js](http://d3js.org/)
|
||||
|
||||
## Getting Started
|
||||
|
||||
1. Clone the repo:
|
||||
```bash
|
||||
git clone git@github.com:EverythingMe/redash.git
|
||||
```
|
||||
2. Create settings file from the example one (& update relevant settings):
|
||||
```bash
|
||||
cp rd_service/settings_example.py rd_service/settings.py
|
||||
```
|
||||
It's highly recommended that the user you use to connect to the data database (the one you query) is read-only.
|
||||
3. Create the operational databases from rd_service/data/tables.sql
|
||||
3. Install `npm` packages (mainly: Bower & Grunt):
|
||||
```bash
|
||||
cd rd_ui
|
||||
npm install
|
||||
```
|
||||
4. Install `bower` packages:
|
||||
```bash
|
||||
bower install
|
||||
```
|
||||
5. Build the UI:
|
||||
```bash
|
||||
grunt build
|
||||
```
|
||||
6. Install PIP packages:
|
||||
```bash
|
||||
pip install -r ../rd_service/requirements.txt
|
||||
```
|
||||
6. Start the API server:
|
||||
```bash
|
||||
cd ../rd_service
|
||||
python server.py
|
||||
```
|
||||
7. Start the workers:
|
||||
```bash
|
||||
python cli.py worker
|
||||
```
|
||||
8. Open `http://localhost:8888/` and query away.
|
||||
* [Setting up re:dash on Heroku in 5 minutes](https://github.com/EverythingMe/redash/wiki/Setting-up-re:dash-on-Heroku-in-5-minutes)
|
||||
* [Setting re:dash on your own server (Ubuntu)](https://github.com/EverythingMe/redash/wiki/Setting-re:dash-on-your-own-server-(Ubuntu))
|
||||
|
||||
**Need help setting re:dash or one of the dependencies up?** Ping @arikfr on the IRC #redash channel or send a message to the [mailing list](https://groups.google.com/forum/#!forum/redash-users), and he will gladly help.
|
||||
|
||||
## Roadmap
|
||||
|
||||
We plan to release new minor version every 2-3 weeks. Of course, if we get additional help from contributors it will help speed things up.
|
||||
|
||||
Below you can see the "big" features of the next 3 releases (for full list, click on the link):
|
||||
|
||||
### [v0.2](https://github.com/EverythingMe/redash/issues?milestone=1&state=open)
|
||||
|
||||
- Ability to generate multiple visualizations for a single query (dataset) in a more flexible way than today. Also easier extensbility points to add additional visualizations.
|
||||
- Dashboard filters: ability to filter/slice the data you see in a single dashboard using filters (date or selectors).
|
||||
- UI Improvements (better notifications & flows, improved queries page)
|
||||
- Comments on queries.
|
||||
|
||||
### [v0.3](https://github.com/EverythingMe/redash/issues?milestone=2&state=open)
|
||||
|
||||
- Support for API access using API keys, instead of Google Login.
|
||||
- Dashboard filters: ability to filter/slice the data you see in a single dashboard using filters (date or selectors).
|
||||
- Multiple databases support (including other database type than PostgreSQL).
|
||||
- Scheduled reports by email.
|
||||
- Comments on queries.
|
||||
|
||||
### [v0.4](https://github.com/EverythingMe/redash/issues?milestone=3&state=open)
|
||||
|
||||
|
||||
60
Vagrantfile
vendored
60
Vagrantfile
vendored
@@ -1,60 +0,0 @@
|
||||
# -*- mode: ruby -*-
|
||||
# vi: set ft=ruby :
|
||||
|
||||
# Vagrantfile API/syntax version. Don't touch unless you know what you're doing!
|
||||
VAGRANTFILE_API_VERSION = '2'
|
||||
|
||||
POSTGRES_PASSWORD = 'securepass'
|
||||
|
||||
# Currently, chef postgress cookbook works with cleartext paswords,
|
||||
# unless the password begins with 'md5'
|
||||
# See https://github.com/hw-cookbooks/postgresql/issues/95
|
||||
require "digest/md5"
|
||||
postgres_password_md5 = 'md5'+Digest::MD5.hexdigest(POSTGRES_PASSWORD+'postgres')
|
||||
|
||||
# After starting the vagrant machine, the application is accessible via the URL
|
||||
# http://localhost:9999
|
||||
HOST_PORT_TO_FORWARD_TO_REDASH = 9999
|
||||
|
||||
# Deploy direcly the code in parent dir; Don't download a release tarball
|
||||
live_testing_deployment = true
|
||||
|
||||
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
|
||||
config.vm.box = 'ubuntu-precise-cloudimg-amd64'
|
||||
config.vm.box_url = 'http://cloud-images.ubuntu.com/vagrant/precise/current/precise-server-cloudimg-amd64-vagrant-disk1.box'
|
||||
|
||||
if config.respond_to? :cache
|
||||
config.cache.auto_detect = true
|
||||
end
|
||||
|
||||
config.berkshelf.enabled = true
|
||||
config.omnibus.chef_version = :latest
|
||||
|
||||
config.vm.network 'forwarded_port', guest: 8888, host: HOST_PORT_TO_FORWARD_TO_REDASH
|
||||
|
||||
if live_testing_deployment
|
||||
config.vm.synced_folder "..", "/opt/redash"
|
||||
end
|
||||
|
||||
config.vm.provision :chef_solo do |chef|
|
||||
# run apt-get update before anything else (specifically postgresql)..
|
||||
chef.add_recipe 'apt'
|
||||
chef.add_recipe 'redash::redis_for_redash'
|
||||
chef.add_recipe 'postgresql::client'
|
||||
chef.add_recipe 'postgresql::server'
|
||||
chef.add_recipe 'redash::redash_pg_schema'
|
||||
chef.add_recipe 'redash::redash'
|
||||
# chef.log_level = :debug
|
||||
chef.json = {
|
||||
'apt' => { 'compiletime' => true },
|
||||
'postgresql' => { 'password' => {'postgres' => postgres_password_md5 } },
|
||||
'redash' => { 'db' => {'host' => 'localhost',
|
||||
'user' => 'postgres',
|
||||
'password' => POSTGRES_PASSWORD },
|
||||
'allow' => {'google_app_domain' => 'gmail.com',
|
||||
'admins' => ['joe@egmail.com','jack@gmail.com']},
|
||||
'install_tarball' => !live_testing_deployment,
|
||||
'user' => live_testing_deployment ? 'vagrant' : 'redash'}
|
||||
}
|
||||
end
|
||||
end
|
||||
@@ -15,7 +15,8 @@ if __name__ == '__main__':
|
||||
params = json.dumps({
|
||||
'tag_name': 'v{0}'.format(version),
|
||||
'name': 're:dash v{0}'.format(version),
|
||||
'target_commitish': commit_sha
|
||||
'target_commitish': commit_sha,
|
||||
'prerelease': True
|
||||
})
|
||||
|
||||
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
|
||||
|
||||
@@ -8,8 +8,14 @@ machine:
|
||||
dependencies:
|
||||
pre:
|
||||
- make deps
|
||||
- pip install requests
|
||||
- pip install -r dev_requirements.txt
|
||||
- pip install -r requirements.txt
|
||||
cache_directories:
|
||||
- rd_ui/node_modules/
|
||||
- rd_ui/app/bower_components/
|
||||
test:
|
||||
override:
|
||||
- make test
|
||||
post:
|
||||
- make pack
|
||||
deployment:
|
||||
|
||||
3
dev_requirements.txt
Normal file
3
dev_requirements.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
nose==1.3.0
|
||||
coverage==3.7.1
|
||||
mock==1.0.1
|
||||
102
manage.py
Executable file
102
manage.py
Executable file
@@ -0,0 +1,102 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
CLI to manage redash.
|
||||
"""
|
||||
import atfork
|
||||
atfork.monkeypatch_os_fork_functions()
|
||||
import atfork.stdlib_fixer
|
||||
atfork.stdlib_fixer.fix_logging_module()
|
||||
|
||||
import logging
|
||||
import time
|
||||
from redash import settings, app, db, models, data_manager, __version__
|
||||
from flask.ext.script import Manager, prompt_pass
|
||||
|
||||
manager = Manager(app)
|
||||
database_manager = Manager(help="Manages the database (create/drop tables).")
|
||||
users_manager = Manager(help="Users management commands.")
|
||||
|
||||
@manager.command
|
||||
def version():
|
||||
"""Displays re:dash version."""
|
||||
print __version__
|
||||
|
||||
|
||||
@manager.command
|
||||
def runworkers():
|
||||
"""Starts the re:dash query executors/workers."""
|
||||
|
||||
try:
|
||||
old_workers = data_manager.redis_connection.smembers('workers')
|
||||
data_manager.redis_connection.delete('workers')
|
||||
|
||||
logging.info("Cleaning old workers: %s", old_workers)
|
||||
|
||||
data_manager.start_workers(settings.WORKERS_COUNT, settings.CONNECTION_ADAPTER, settings.CONNECTION_STRING)
|
||||
logging.info("Workers started.")
|
||||
|
||||
while True:
|
||||
try:
|
||||
data_manager.refresh_queries()
|
||||
data_manager.report_status()
|
||||
except Exception as e:
|
||||
logging.error("Something went wrong with refreshing queries...")
|
||||
logging.exception(e)
|
||||
time.sleep(60)
|
||||
except KeyboardInterrupt:
|
||||
logging.warning("Exiting; waiting for threads")
|
||||
data_manager.stop_workers()
|
||||
|
||||
@manager.shell
|
||||
def make_shell_context():
|
||||
return dict(app=app, db=db, models=models)
|
||||
|
||||
@database_manager.command
|
||||
def create_tables():
|
||||
"""Creates the database tables."""
|
||||
from redash.models import create_db
|
||||
|
||||
create_db(True, False)
|
||||
|
||||
@database_manager.command
|
||||
def drop_tables():
|
||||
"""Drop the database tables."""
|
||||
from redash.models import create_db
|
||||
|
||||
create_db(False, True)
|
||||
|
||||
|
||||
@users_manager.option('email', help="User's email")
|
||||
@users_manager.option('name', help="User's full name")
|
||||
@users_manager.option('--admin', dest='is_admin', default=False, help="set user as admin")
|
||||
@users_manager.option('--google', dest='google_auth', default=False, help="user uses Google Auth to login")
|
||||
def create(email, name, is_admin=False, google_auth=False):
|
||||
print "Creating user (%s, %s)..." % (email, name)
|
||||
print "Admin: %r" % is_admin
|
||||
print "Login with Google Auth: %r\n" % google_auth
|
||||
|
||||
user = models.User(email=email, name=name, is_admin=is_admin)
|
||||
if not google_auth:
|
||||
password = prompt_pass("Password")
|
||||
user.hash_password(password)
|
||||
|
||||
try:
|
||||
user.save()
|
||||
except Exception, e:
|
||||
print "Failed creating user: %s" % e.message
|
||||
|
||||
|
||||
@users_manager.option('email', help="email address of user to delete")
|
||||
def delete(email):
|
||||
deleted_count = models.User.delete().where(models.User.email == email).execute()
|
||||
print "Deleted %d users." % deleted_count
|
||||
|
||||
manager.add_command("database", database_manager)
|
||||
manager.add_command("users", users_manager)
|
||||
|
||||
if __name__ == '__main__':
|
||||
channel = logging.StreamHandler()
|
||||
logging.getLogger().addHandler(channel)
|
||||
logging.getLogger().setLevel(settings.LOG_LEVEL)
|
||||
|
||||
manager.run()
|
||||
13
migrations/add_created_at_field.py
Normal file
13
migrations/add_created_at_field.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db
|
||||
from redash import models
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = Migrator(db.database)
|
||||
with db.database.transaction():
|
||||
migrator.add_column(models.Dashboard, models.Dashboard.created_at, 'created_at')
|
||||
migrator.add_column(models.Widget, models.Widget.created_at, 'created_at')
|
||||
|
||||
db.close_db(None)
|
||||
12
migrations/add_password_to_users.py
Normal file
12
migrations/add_password_to_users.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db
|
||||
from redash import models
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = Migrator(db.database)
|
||||
with db.database.transaction():
|
||||
migrator.add_column(models.User, models.User.password_hash, 'password_hash')
|
||||
|
||||
db.close_db(None)
|
||||
12
migrations/change_queries_description_to_nullable.py
Normal file
12
migrations/change_queries_description_to_nullable.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db
|
||||
from redash import models
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = Migrator(db.database)
|
||||
with db.database.transaction():
|
||||
migrator.set_nullable(models.Query, models.Query.description, True)
|
||||
|
||||
db.close_db(None)
|
||||
13
migrations/change_query_id_on_widgets_to_null.py
Normal file
13
migrations/change_query_id_on_widgets_to_null.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db
|
||||
from redash import models
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = Migrator(db.database)
|
||||
with db.database.transaction():
|
||||
migrator.set_nullable(models.Widget, models.Widget.query_id, True)
|
||||
migrator.set_nullable(models.Widget, models.Widget.type, True)
|
||||
|
||||
db.close_db(None)
|
||||
56
migrations/create_users.py
Normal file
56
migrations/create_users.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import json
|
||||
import itertools
|
||||
import peewee
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db, settings
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
|
||||
if not models.User.table_exists():
|
||||
print "Creating user table..."
|
||||
models.User.create_table()
|
||||
|
||||
migrator = Migrator(db.database)
|
||||
with db.database.transaction():
|
||||
print "Creating user field on dashboard and queries..."
|
||||
try:
|
||||
migrator.rename_column(models.Query, '"user"', "user_email")
|
||||
migrator.rename_column(models.Dashboard, '"user"', "user_email")
|
||||
except peewee.ProgrammingError:
|
||||
print "Failed to rename user column -- assuming it already exists"
|
||||
|
||||
with db.database.transaction():
|
||||
models.Query.user.null = True
|
||||
models.Dashboard.user.null = True
|
||||
|
||||
try:
|
||||
migrator.add_column(models.Query, models.Query.user, "user_id")
|
||||
migrator.add_column(models.Dashboard, models.Dashboard.user, "user_id")
|
||||
except peewee.ProgrammingError:
|
||||
print "Failed to create user_id column -- assuming it already exists"
|
||||
|
||||
print "Creating user for all queries and dashboards..."
|
||||
for obj in itertools.chain(models.Query.select(), models.Dashboard.select()):
|
||||
# Some old databases might have queries with empty string as user email:
|
||||
email = obj.user_email or settings.ADMINS[0]
|
||||
email = email.split(',')[0]
|
||||
|
||||
print ".. {} , {}, {}".format(type(obj), obj.id, email)
|
||||
|
||||
try:
|
||||
user = models.User.get(models.User.email == email)
|
||||
except models.User.DoesNotExist:
|
||||
is_admin = email in settings.ADMINS
|
||||
user = models.User.create(email=email, name=email, is_admin=is_admin)
|
||||
|
||||
obj.user = user
|
||||
obj.save()
|
||||
|
||||
print "Set user_id to non null..."
|
||||
with db.database.transaction():
|
||||
migrator.set_nullable(models.Query, models.Query.user, False)
|
||||
migrator.set_nullable(models.Dashboard, models.Dashboard.user, False)
|
||||
migrator.set_nullable(models.Query, models.Query.user_email, True)
|
||||
migrator.set_nullable(models.Dashboard, models.Dashboard.user_email, True)
|
||||
70
migrations/create_visualizations.py
Normal file
70
migrations/create_visualizations.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import json
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
default_options = {"series": {"type": "column"}}
|
||||
|
||||
db.connect_db()
|
||||
|
||||
if not models.Visualization.table_exists():
|
||||
print "Creating visualization table..."
|
||||
models.Visualization.create_table()
|
||||
|
||||
with db.database.transaction():
|
||||
migrator = Migrator(db.database)
|
||||
print "Adding visualization_id to widgets:"
|
||||
field = models.Widget.visualization
|
||||
field.null = True
|
||||
migrator.add_column(models.Widget, models.Widget.visualization, 'visualization_id')
|
||||
|
||||
print 'Creating TABLE visualizations for all queries...'
|
||||
for query in models.Query.select():
|
||||
vis = models.Visualization(query=query, name="Table",
|
||||
description=query.description or "",
|
||||
type="TABLE", options="{}")
|
||||
vis.save()
|
||||
|
||||
print 'Creating COHORT visualizations for all queries named like %cohort%...'
|
||||
for query in models.Query.select().where(models.Query.name ** "%cohort%"):
|
||||
vis = models.Visualization(query=query, name="Cohort",
|
||||
description=query.description or "",
|
||||
type="COHORT", options="{}")
|
||||
vis.save()
|
||||
|
||||
print 'Create visualization for all widgets (unless exists already):'
|
||||
for widget in models.Widget.select():
|
||||
print 'Processing widget id: %d:' % widget.id
|
||||
vis_type = widget.type.upper()
|
||||
if vis_type == 'GRID':
|
||||
vis_type = 'TABLE'
|
||||
|
||||
query = models.Query.get_by_id(widget.query_id)
|
||||
vis = query.visualizations.where(models.Visualization.type == vis_type).first()
|
||||
if vis:
|
||||
print '... visualization type (%s) found.' % vis_type
|
||||
widget.visualization = vis
|
||||
widget.save()
|
||||
else:
|
||||
vis_name = vis_type.title()
|
||||
|
||||
options = json.loads(widget.options)
|
||||
vis_options = {"series": options} if options else default_options
|
||||
vis_options = json.dumps(vis_options)
|
||||
|
||||
vis = models.Visualization(query=query, name=vis_name,
|
||||
description=query.description or "",
|
||||
type=vis_type, options=vis_options)
|
||||
|
||||
print '... Created visualization for type: %s' % vis_type
|
||||
vis.save()
|
||||
widget.visualization = vis
|
||||
widget.save()
|
||||
|
||||
with db.database.transaction():
|
||||
migrator = Migrator(db.database)
|
||||
print "Setting visualization_id as not null..."
|
||||
migrator.set_nullable(models.Widget, models.Widget.visualization, False)
|
||||
|
||||
db.close_db(None)
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
"""
|
||||
CLI to start the workers.
|
||||
|
||||
TODO: move API server startup here.
|
||||
"""
|
||||
import atfork
|
||||
atfork.monkeypatch_os_fork_functions()
|
||||
import atfork.stdlib_fixer
|
||||
atfork.stdlib_fixer.fix_logging_module()
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import urlparse
|
||||
import redis
|
||||
import time
|
||||
import settings
|
||||
import data
|
||||
|
||||
|
||||
def start_workers(data_manager):
|
||||
try:
|
||||
old_workers = data_manager.redis_connection.smembers('workers')
|
||||
data_manager.redis_connection.delete('workers')
|
||||
|
||||
logging.info("Cleaning old workers: %s", old_workers)
|
||||
|
||||
data_manager.start_workers(settings.WORKERS_COUNT, settings.CONNECTION_STRING)
|
||||
logging.info("Workers started.")
|
||||
|
||||
while True:
|
||||
try:
|
||||
data_manager.refresh_queries()
|
||||
except Exception as e:
|
||||
logging.error("Something went wrong with refreshing queries...")
|
||||
logging.exception(e)
|
||||
time.sleep(60)
|
||||
except KeyboardInterrupt:
|
||||
logging.warning("Exiting; waiting for threads")
|
||||
data_manager.stop_workers()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
channel = logging.StreamHandler()
|
||||
logging.getLogger().addHandler(channel)
|
||||
logging.getLogger().setLevel(settings.LOG_LEVEL)
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("command")
|
||||
args = parser.parse_args()
|
||||
|
||||
url = urlparse.urlparse(settings.REDIS_URL)
|
||||
redis_connection = redis.StrictRedis(host=url.hostname, port=url.port, db=0, password=url.password)
|
||||
data_manager = data.Manager(redis_connection, settings.INTERNAL_DB_CONNECTION_STRING, settings.MAX_CONNECTIONS)
|
||||
|
||||
if args.command == "worker":
|
||||
start_workers(data_manager)
|
||||
else:
|
||||
print "Unknown command"
|
||||
|
||||
@@ -1,174 +0,0 @@
|
||||
"""
|
||||
Django ORM based models to describe the data model of re:dash.
|
||||
"""
|
||||
import hashlib
|
||||
import json
|
||||
import time
|
||||
from django.db import models
|
||||
from django.template.defaultfilters import slugify
|
||||
import utils
|
||||
|
||||
|
||||
class QueryResult(models.Model):
|
||||
id = models.AutoField(primary_key=True)
|
||||
query_hash = models.CharField(max_length=32)
|
||||
query = models.TextField()
|
||||
data = models.TextField()
|
||||
runtime = models.FloatField()
|
||||
retrieved_at = models.DateTimeField()
|
||||
|
||||
class Meta:
|
||||
app_label = 'redash'
|
||||
db_table = 'query_results'
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'query_hash': self.query_hash,
|
||||
'query': self.query,
|
||||
'data': json.loads(self.data),
|
||||
'runtime': self.runtime,
|
||||
'retrieved_at': self.retrieved_at
|
||||
}
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at)
|
||||
|
||||
|
||||
class Query(models.Model):
|
||||
id = models.AutoField(primary_key=True)
|
||||
latest_query_data = models.ForeignKey(QueryResult)
|
||||
name = models.CharField(max_length=255)
|
||||
description = models.CharField(max_length=4096)
|
||||
query = models.TextField()
|
||||
query_hash = models.CharField(max_length=32)
|
||||
api_key = models.CharField(max_length=40)
|
||||
ttl = models.IntegerField()
|
||||
user = models.CharField(max_length=360)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
app_label = 'redash'
|
||||
db_table = 'queries'
|
||||
|
||||
def to_dict(self, with_result=True, with_stats=False):
|
||||
d = {
|
||||
'id': self.id,
|
||||
'latest_query_data_id': self.latest_query_data_id,
|
||||
'name': self.name,
|
||||
'description': self.description,
|
||||
'query': self.query,
|
||||
'query_hash': self.query_hash,
|
||||
'ttl': self.ttl,
|
||||
'user': self.user,
|
||||
'api_key': self.api_key,
|
||||
'created_at': self.created_at,
|
||||
}
|
||||
|
||||
if with_stats:
|
||||
d['avg_runtime'] = self.avg_runtime
|
||||
d['min_runtime'] = self.min_runtime
|
||||
d['max_runtime'] = self.max_runtime
|
||||
d['last_retrieved_at'] = self.last_retrieved_at
|
||||
d['times_retrieved'] = self.times_retrieved
|
||||
|
||||
if with_result and self.latest_query_data_id:
|
||||
d['latest_query_data'] = self.latest_query_data.to_dict()
|
||||
|
||||
return d
|
||||
|
||||
@classmethod
|
||||
def all_queries(cls):
|
||||
query = """SELECT queries.*, query_stats.*
|
||||
FROM queries
|
||||
LEFT OUTER JOIN
|
||||
(SELECT qu.query_hash,
|
||||
count(0) AS "times_retrieved",
|
||||
avg(runtime) AS "avg_runtime",
|
||||
min(runtime) AS "min_runtime",
|
||||
max(runtime) AS "max_runtime",
|
||||
max(retrieved_at) AS "last_retrieved_at"
|
||||
FROM queries qu
|
||||
JOIN query_results qr ON qu.query_hash=qr.query_hash
|
||||
GROUP BY qu.query_hash) query_stats ON query_stats.query_hash = queries.query_hash
|
||||
"""
|
||||
return cls.objects.raw(query)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.query_hash = utils.gen_query_hash(self.query)
|
||||
self._set_api_key()
|
||||
super(Query, self).save(*args, **kwargs)
|
||||
|
||||
def _set_api_key(self):
|
||||
if not self.api_key:
|
||||
self.api_key = hashlib.sha1(
|
||||
u''.join([str(time.time()), self.query, self.user, self.name])).hexdigest()
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.id)
|
||||
|
||||
|
||||
class Dashboard(models.Model):
|
||||
id = models.AutoField(primary_key=True)
|
||||
slug = models.CharField(max_length=140)
|
||||
name = models.CharField(max_length=100)
|
||||
user = models.CharField(max_length=360)
|
||||
layout = models.TextField()
|
||||
is_archived = models.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
app_label = 'redash'
|
||||
db_table = 'dashboards'
|
||||
|
||||
def to_dict(self, with_widgets=False):
|
||||
layout = json.loads(self.layout)
|
||||
|
||||
if with_widgets:
|
||||
widgets = {w.id: w.to_dict() for w in self.widgets.all()}
|
||||
widgets_layout = map(lambda row: map(lambda widget_id: widgets.get(widget_id, None), row), layout)
|
||||
else:
|
||||
widgets_layout = None
|
||||
|
||||
return {
|
||||
'id': self.id,
|
||||
'slug': self.slug,
|
||||
'name': self.name,
|
||||
'user': self.user,
|
||||
'layout': layout,
|
||||
'widgets': widgets_layout
|
||||
}
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
# TODO: make sure slug is unique
|
||||
if not self.slug:
|
||||
self.slug = slugify(self.name)
|
||||
super(Dashboard, self).save(*args, **kwargs)
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s=%s" % (self.id, self.name)
|
||||
|
||||
|
||||
class Widget(models.Model):
|
||||
id = models.AutoField(primary_key=True)
|
||||
query = models.ForeignKey(Query)
|
||||
type = models.CharField(max_length=100)
|
||||
width = models.IntegerField()
|
||||
options = models.TextField()
|
||||
dashboard = models.ForeignKey(Dashboard, related_name='widgets')
|
||||
|
||||
class Meta:
|
||||
app_label = 'redash'
|
||||
db_table = 'widgets'
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'query': self.query.to_dict(),
|
||||
'type': self.type,
|
||||
'width': self.width,
|
||||
'options': json.loads(self.options),
|
||||
'dashboard_id': self.dashboard_id
|
||||
}
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s=>%s" % (self.id, self.dashboard_id)
|
||||
@@ -1,46 +0,0 @@
|
||||
BEGIN;
|
||||
CREATE TABLE "query_results" (
|
||||
"id" serial NOT NULL PRIMARY KEY,
|
||||
"query_hash" varchar(32) NOT NULL,
|
||||
"query" text NOT NULL,
|
||||
"data" text NOT NULL,
|
||||
"runtime" double precision NOT NULL,
|
||||
"retrieved_at" timestamp with time zone NOT NULL
|
||||
)
|
||||
;
|
||||
CREATE TABLE "queries" (
|
||||
"id" serial NOT NULL PRIMARY KEY,
|
||||
"latest_query_data_id" integer REFERENCES "query_results" ("id") DEFERRABLE INITIALLY DEFERRED,
|
||||
"name" varchar(255) NOT NULL,
|
||||
"description" varchar(4096),
|
||||
"query" text NOT NULL,
|
||||
"query_hash" varchar(32) NOT NULL,
|
||||
"api_key" varchar(40),
|
||||
"ttl" integer NOT NULL,
|
||||
"user" varchar(360) NOT NULL,
|
||||
"created_at" timestamp with time zone NOT NULL
|
||||
)
|
||||
;
|
||||
CREATE TABLE "dashboards" (
|
||||
"id" serial NOT NULL PRIMARY KEY,
|
||||
"slug" varchar(140) NOT NULL,
|
||||
"name" varchar(100) NOT NULL,
|
||||
"user" varchar(360) NOT NULL,
|
||||
"layout" text NOT NULL,
|
||||
"is_archived" boolean NOT NULL
|
||||
)
|
||||
;
|
||||
CREATE TABLE "widgets" (
|
||||
"id" serial NOT NULL PRIMARY KEY,
|
||||
"query_id" integer NOT NULL REFERENCES "queries" ("id") DEFERRABLE INITIALLY DEFERRED,
|
||||
"type" varchar(100) NOT NULL,
|
||||
"width" integer NOT NULL,
|
||||
"options" text NOT NULL,
|
||||
"dashboard_id" integer NOT NULL REFERENCES "dashboards" ("id") DEFERRABLE INITIALLY DEFERRED
|
||||
)
|
||||
;
|
||||
CREATE INDEX "queries_latest_query_data_id" ON "queries" ("latest_query_data_id");
|
||||
CREATE INDEX "widgets_query_id" ON "widgets" ("query_id");
|
||||
CREATE INDEX "widgets_dashboard_id" ON "widgets" ("dashboard_id");
|
||||
|
||||
COMMIT;
|
||||
@@ -1,10 +0,0 @@
|
||||
psycopg2==2.5.1
|
||||
redis==2.7.5
|
||||
tornado==3.0.2
|
||||
sqlparse==0.1.8
|
||||
Django==1.5.4
|
||||
django-db-pool==0.0.10
|
||||
qr==0.6.0
|
||||
python-dateutil==2.1
|
||||
setproctitle==1.1.8
|
||||
atfork==0.1.2
|
||||
@@ -1,355 +0,0 @@
|
||||
"""
|
||||
Tornado based API implementation for re:dash.
|
||||
|
||||
Also at the moment the Tornado server is used to serve the static assets (and the Angular.js app),
|
||||
but this is only due to configuration issues and temporary.
|
||||
|
||||
Usage:
|
||||
python server.py [--port=8888] [--debug] [--static=..]
|
||||
|
||||
port - port to listen to
|
||||
debug - enable debug mode (extensive logging, restart on code change)
|
||||
static - static assets path
|
||||
|
||||
If static option isn't specified it will be taken from settings.py.
|
||||
"""
|
||||
import csv
|
||||
import hashlib
|
||||
import json
|
||||
import numbers
|
||||
import os
|
||||
import urlparse
|
||||
import logging
|
||||
import cStringIO
|
||||
import datetime
|
||||
import dateutil.parser
|
||||
import redis
|
||||
import sqlparse
|
||||
import tornado.ioloop
|
||||
import tornado.web
|
||||
import tornado.auth
|
||||
import tornado.options
|
||||
import settings
|
||||
import time
|
||||
from data import utils
|
||||
import data
|
||||
|
||||
|
||||
class BaseHandler(tornado.web.RequestHandler):
|
||||
def initialize(self):
|
||||
self.data_manager = self.application.settings.get('data_manager', None)
|
||||
self.redis_connection = self.application.settings['redis_connection']
|
||||
|
||||
def get_current_user(self):
|
||||
user = self.get_secure_cookie("user")
|
||||
return user
|
||||
|
||||
def write_json(self, response, encode=True):
|
||||
if encode:
|
||||
response = json.dumps(response, cls=utils.JSONEncoder)
|
||||
self.set_header("Content-Type", "application/json; charset=UTF-8")
|
||||
self.write(response)
|
||||
|
||||
|
||||
class BaseAuthenticatedHandler(BaseHandler):
|
||||
@tornado.web.authenticated
|
||||
def prepare(self):
|
||||
pass
|
||||
|
||||
|
||||
class PingHandler(tornado.web.RequestHandler):
|
||||
def get(self):
|
||||
self.write("PONG")
|
||||
|
||||
|
||||
class GoogleLoginHandler(tornado.web.RequestHandler,
|
||||
tornado.auth.GoogleMixin):
|
||||
@tornado.web.asynchronous
|
||||
@tornado.gen.coroutine
|
||||
def get(self):
|
||||
if self.get_argument("openid.mode", None):
|
||||
user = yield self.get_authenticated_user()
|
||||
|
||||
if user['email'] in settings.ALLOWED_USERS or user['email'].endswith("@%s" % settings.GOOGLE_APPS_DOMAIN):
|
||||
logging.info("Authenticated: %s", user['email'])
|
||||
self.set_secure_cookie("user", user['email'])
|
||||
self.redirect("/")
|
||||
else:
|
||||
logging.error("Failed logging in with: %s", user)
|
||||
self.authenticate_redirect()
|
||||
else:
|
||||
self.authenticate_redirect()
|
||||
|
||||
|
||||
class MainHandler(BaseAuthenticatedHandler):
|
||||
def get(self, *args):
|
||||
email_md5 = hashlib.md5(self.current_user.lower()).hexdigest()
|
||||
gravatar_url = "https://www.gravatar.com/avatar/%s?s=40" % email_md5
|
||||
|
||||
user = {
|
||||
'gravatar_url': gravatar_url,
|
||||
'is_admin': self.current_user in settings.ADMINS,
|
||||
'name': self.current_user
|
||||
}
|
||||
|
||||
self.render("index.html", user=json.dumps(user), analytics=settings.ANALYTICS)
|
||||
|
||||
|
||||
class QueryFormatHandler(BaseAuthenticatedHandler):
|
||||
def post(self):
|
||||
arguments = json.loads(self.request.body)
|
||||
query = arguments.get("query", "")
|
||||
|
||||
self.write(sqlparse.format(query, reindent=True, keyword_case='upper'))
|
||||
|
||||
|
||||
class StatusHandler(BaseAuthenticatedHandler):
|
||||
def get(self):
|
||||
status = {}
|
||||
info = self.redis_connection.info()
|
||||
status['redis_used_memory'] = info['used_memory_human']
|
||||
|
||||
status['queries_count'] = data.models.Query.objects.count()
|
||||
status['query_results_count'] = data.models.QueryResult.objects.count()
|
||||
status['dashboards_count'] = data.models.Dashboard.objects.count()
|
||||
status['widgets_count'] = data.models.Widget.objects.count()
|
||||
|
||||
status['workers'] = [self.redis_connection.hgetall(w)
|
||||
for w in self.redis_connection.smembers('workers')]
|
||||
|
||||
manager_status = self.redis_connection.hgetall('manager:status')
|
||||
status['manager'] = manager_status
|
||||
status['manager']['queue_size'] = self.redis_connection.zcard('jobs')
|
||||
|
||||
self.write_json(status)
|
||||
|
||||
|
||||
class WidgetsHandler(BaseAuthenticatedHandler):
|
||||
def post(self, widget_id=None):
|
||||
widget_properties = json.loads(self.request.body)
|
||||
widget_properties['options'] = json.dumps(widget_properties['options'])
|
||||
widget = data.models.Widget(**widget_properties)
|
||||
widget.save()
|
||||
|
||||
layout = json.loads(widget.dashboard.layout)
|
||||
new_row = True
|
||||
|
||||
if len(layout) == 0 or widget.width == 2:
|
||||
layout.append([widget.id])
|
||||
elif len(layout[-1]) == 1:
|
||||
neighbour_widget = data.models.Widget.objects.get(pk=layout[-1][0])
|
||||
if neighbour_widget.width == 1:
|
||||
layout[-1].append(widget.id)
|
||||
new_row = False
|
||||
else:
|
||||
layout.append([widget.id])
|
||||
else:
|
||||
layout.append([widget.id])
|
||||
|
||||
widget.dashboard.layout = json.dumps(layout)
|
||||
widget.dashboard.save()
|
||||
|
||||
self.write_json({'widget': widget.to_dict(), 'layout': layout, 'new_row': new_row})
|
||||
|
||||
def delete(self, widget_id):
|
||||
widget_id = int(widget_id)
|
||||
widget = data.models.Widget.objects.get(pk=widget_id)
|
||||
# TODO: reposition existing ones
|
||||
layout = json.loads(widget.dashboard.layout)
|
||||
layout = map(lambda row: filter(lambda w: w != widget_id, row), layout)
|
||||
layout = filter(lambda row: len(row) > 0, layout)
|
||||
widget.dashboard.layout = json.dumps(layout)
|
||||
widget.dashboard.save()
|
||||
|
||||
widget.delete()
|
||||
|
||||
|
||||
class DashboardHandler(BaseAuthenticatedHandler):
|
||||
def get(self, dashboard_slug=None):
|
||||
if dashboard_slug:
|
||||
dashboard = data.models.Dashboard.objects.prefetch_related('widgets__query__latest_query_data').get(slug=dashboard_slug)
|
||||
self.write_json(dashboard.to_dict(with_widgets=True))
|
||||
else:
|
||||
dashboards = [d.to_dict() for d in
|
||||
data.models.Dashboard.objects.filter(is_archived=False)]
|
||||
self.write_json(dashboards)
|
||||
|
||||
def post(self, dashboard_id):
|
||||
if dashboard_id:
|
||||
dashboard_properties = json.loads(self.request.body)
|
||||
dashboard = data.models.Dashboard.objects.get(pk=dashboard_id)
|
||||
dashboard.layout = dashboard_properties['layout']
|
||||
dashboard.name = dashboard_properties['name']
|
||||
dashboard.save()
|
||||
|
||||
self.write_json(dashboard.to_dict(with_widgets=True))
|
||||
else:
|
||||
dashboard_properties = json.loads(self.request.body)
|
||||
dashboard = data.models.Dashboard(name=dashboard_properties['name'],
|
||||
user=self.current_user,
|
||||
layout='[]')
|
||||
dashboard.save()
|
||||
self.write_json(dashboard.to_dict())
|
||||
|
||||
def delete(self, dashboard_slug):
|
||||
dashboard = data.models.Dashboard.objects.get(slug=dashboard_slug)
|
||||
dashboard.is_archived = True
|
||||
dashboard.save()
|
||||
|
||||
|
||||
class QueriesHandler(BaseAuthenticatedHandler):
|
||||
def post(self, id=None):
|
||||
query_def = json.loads(self.request.body)
|
||||
if 'created_at' in query_def:
|
||||
query_def['created_at'] = dateutil.parser.parse(query_def['created_at'])
|
||||
|
||||
query_def.pop('latest_query_data', None)
|
||||
|
||||
if id:
|
||||
query = data.models.Query(**query_def)
|
||||
fields = query_def.keys()
|
||||
fields.remove('id')
|
||||
query.save(update_fields=fields)
|
||||
else:
|
||||
query_def['user'] = self.current_user
|
||||
query = data.models.Query(**query_def)
|
||||
query.save()
|
||||
|
||||
self.write_json(query.to_dict(with_result=False))
|
||||
|
||||
def get(self, id=None):
|
||||
if id:
|
||||
q = data.models.Query.objects.get(pk=id)
|
||||
if q:
|
||||
self.write_json(q.to_dict())
|
||||
else:
|
||||
self.send_error(404)
|
||||
else:
|
||||
self.write_json([q.to_dict(with_result=False, with_stats=True) for q in data.models.Query.all_queries()])
|
||||
|
||||
|
||||
class QueryResultsHandler(BaseAuthenticatedHandler):
|
||||
def get(self, query_result_id):
|
||||
query_result = self.data_manager.get_query_result_by_id(query_result_id)
|
||||
if query_result:
|
||||
self.write_json({'query_result': query_result.to_dict(parse_data=True)})
|
||||
else:
|
||||
self.send_error(404)
|
||||
|
||||
def post(self, _):
|
||||
params = json.loads(self.request.body)
|
||||
|
||||
if params['ttl'] == 0:
|
||||
query_result = None
|
||||
else:
|
||||
query_result = self.data_manager.get_query_result(params['query'], int(params['ttl']))
|
||||
|
||||
if query_result:
|
||||
self.write_json({'query_result': query_result.to_dict(parse_data=True)})
|
||||
else:
|
||||
job = self.data_manager.add_job(params['query'], data.Job.HIGH_PRIORITY)
|
||||
self.write({'job': job.to_dict()})
|
||||
|
||||
|
||||
class CsvQueryResultsHandler(BaseAuthenticatedHandler):
|
||||
def get_current_user(self):
|
||||
user = super(CsvQueryResultsHandler, self).get_current_user()
|
||||
if not user:
|
||||
api_key = self.get_argument("api_key", None)
|
||||
query = data.models.Query.objects.get(pk=self.path_args[0])
|
||||
|
||||
if query.api_key and query.api_key == api_key:
|
||||
user = "API-Key=%s" % api_key
|
||||
|
||||
return user
|
||||
|
||||
def get(self, query_id, result_id=None):
|
||||
if not result_id:
|
||||
query = data.models.Query.objects.get(pk=query_id)
|
||||
if query:
|
||||
result_id = query.latest_query_data_id
|
||||
|
||||
query_result = result_id and self.data_manager.get_query_result_by_id(result_id)
|
||||
if query_result:
|
||||
self.set_header("Content-Type", "text/csv; charset=UTF-8")
|
||||
s = cStringIO.StringIO()
|
||||
|
||||
query_data = json.loads(query_result.data)
|
||||
writer = csv.DictWriter(s, fieldnames=[col['name'] for col in query_data['columns']])
|
||||
writer.writer = utils.UnicodeWriter(s)
|
||||
writer.writeheader()
|
||||
for row in query_data['rows']:
|
||||
for k, v in row.iteritems():
|
||||
if isinstance(v, numbers.Number) and (v > 1000 * 1000 * 1000 * 100):
|
||||
row[k] = datetime.datetime.fromtimestamp(v/1000.0)
|
||||
|
||||
writer.writerow(row)
|
||||
|
||||
self.write(s.getvalue())
|
||||
else:
|
||||
self.send_error(404)
|
||||
|
||||
|
||||
class JobsHandler(BaseAuthenticatedHandler):
|
||||
def get(self, job_id=None):
|
||||
if job_id:
|
||||
# TODO: if finished, include the query result
|
||||
job = data.Job.load(self.data_manager.redis_connection, job_id)
|
||||
self.write({'job': job.to_dict()})
|
||||
else:
|
||||
raise NotImplemented
|
||||
|
||||
def delete(self, job_id):
|
||||
job = data.Job.load(self.data_manager.redis_connection, job_id)
|
||||
job.cancel()
|
||||
|
||||
|
||||
def get_application(static_path, is_debug, redis_connection, data_manager):
|
||||
return tornado.web.Application([(r"/", MainHandler),
|
||||
(r"/ping", PingHandler),
|
||||
(r"/api/queries/([0-9]*)/results(?:/([0-9]*))?.csv", CsvQueryResultsHandler),
|
||||
(r"/api/queries/format", QueryFormatHandler),
|
||||
(r"/api/queries(?:/([0-9]*))?", QueriesHandler),
|
||||
(r"/api/query_results(?:/([0-9]*))?", QueryResultsHandler),
|
||||
(r"/api/jobs/(.*)", JobsHandler),
|
||||
(r"/api/widgets(?:/([0-9]*))?", WidgetsHandler),
|
||||
(r"/api/dashboards(?:/(.*))?", DashboardHandler),
|
||||
(r"/admin/(.*)", MainHandler),
|
||||
(r"/dashboard/(.*)", MainHandler),
|
||||
(r"/queries(.*)", MainHandler),
|
||||
(r"/login", GoogleLoginHandler),
|
||||
(r"/status.json", StatusHandler),
|
||||
(r"/(.*)", tornado.web.StaticFileHandler,
|
||||
{"path": static_path})],
|
||||
template_path=static_path,
|
||||
static_path=static_path,
|
||||
debug=is_debug,
|
||||
login_url="/login",
|
||||
cookie_secret=settings.COOKIE_SECRET,
|
||||
redis_connection=redis_connection,
|
||||
data_manager=data_manager)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
tornado.options.define("port", default=8888, type=int)
|
||||
tornado.options.define("debug", default=False, type=bool)
|
||||
tornado.options.define("static", default=settings.STATIC_ASSETS_PATH, type=str)
|
||||
|
||||
tornado.options.parse_command_line()
|
||||
|
||||
root_path = os.path.dirname(__file__)
|
||||
static_path = os.path.abspath(os.path.join(root_path, tornado.options.options.static))
|
||||
|
||||
url = urlparse.urlparse(settings.REDIS_URL)
|
||||
redis_connection = redis.StrictRedis(host=url.hostname, port=url.port, db=0, password=url.password)
|
||||
data_manager = data.Manager(redis_connection, settings.INTERNAL_DB_CONNECTION_STRING,
|
||||
settings.MAX_CONNECTIONS)
|
||||
|
||||
logging.info("re:dash web server stating on port: %d...", tornado.options.options.port)
|
||||
logging.info("UI assets path: %s...", static_path)
|
||||
|
||||
application = get_application(static_path, tornado.options.options.debug,
|
||||
redis_connection, data_manager)
|
||||
|
||||
application.listen(tornado.options.options.port)
|
||||
tornado.ioloop.IOLoop.instance().start()
|
||||
@@ -1,36 +0,0 @@
|
||||
"""
|
||||
Example settings module. You should make your own copy as settings.py and enter the real settings.
|
||||
"""
|
||||
|
||||
import django.conf
|
||||
|
||||
REDIS_URL = "redis://localhost:6379"
|
||||
# Connection string for the database that is used to run queries against
|
||||
CONNECTION_STRING = "user= password= host= port=5439 dbname="
|
||||
# Connection string for the operational databases (where we store the queries, results, etc)
|
||||
INTERNAL_DB_CONNECTION_STRING = "dbname=postgres"
|
||||
# Google Apps domain to allow access from; any user with email in this Google Apps will be allowed
|
||||
# access
|
||||
GOOGLE_APPS_DOMAIN = ""
|
||||
# Email addresses of specific users not from the above set Google Apps Domain, that you want to
|
||||
# allow access to re:dash
|
||||
ALLOWED_USERS = []
|
||||
# Email addresses of admin users
|
||||
ADMINS = []
|
||||
STATIC_ASSETS_PATH = "../rd_ui/dist/"
|
||||
WORKERS_COUNT = 2
|
||||
MAX_CONNECTIONS = 3
|
||||
COOKIE_SECRET = "c292a0a3aa32397cdb050e233733900f"
|
||||
LOG_LEVEL = "INFO"
|
||||
ANALYTICS = ""
|
||||
|
||||
# Configuration of the operational database for the Django models
|
||||
django.conf.settings.configure(DATABASES = { 'default': {
|
||||
'ENGINE': 'dbpool.db.backends.postgresql_psycopg2',
|
||||
'OPTIONS': {'MAX_CONNS': 10, 'MIN_CONNS': 1},
|
||||
'NAME': 'postgres',
|
||||
'USER': '',
|
||||
'PASSWORD': '',
|
||||
'HOST': '',
|
||||
'PORT': '',
|
||||
},}, TIME_ZONE = 'UTC')
|
||||
@@ -170,7 +170,7 @@ module.exports = function (grunt) {
|
||||
}
|
||||
},
|
||||
useminPrepare: {
|
||||
html: '<%= yeoman.app %>/index.html',
|
||||
html: ['<%= yeoman.app %>/index.html', '<%= yeoman.app %>/login.html'],
|
||||
options: {
|
||||
dest: '<%= yeoman.dist %>'
|
||||
}
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
</button>
|
||||
<a class="navbar-brand" href="/"><strong>re:dash</strong></a>
|
||||
</div>
|
||||
{% raw %}
|
||||
<div class="collapse navbar-collapse navbar-ex1-collapse">
|
||||
<ul class="nav navbar-nav">
|
||||
<li class="active" ng-show="pageTitle"><a class="page-title" ng-bind="pageTitle"></a></li>
|
||||
@@ -42,13 +43,13 @@
|
||||
<a href="#" ng-bind="name"></a>
|
||||
<ul class="dropdown-menu">
|
||||
<li ng-repeat="dashboard in group" role="presentation">
|
||||
<a role="menu-item" ng-href="/dashboard/{{!dashboard.slug}}" ng-bind="dashboard.name"></a>
|
||||
<a role="menu-item" ng-href="/dashboard/{{dashboard.slug}}" ng-bind="dashboard.name"></a>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
</span>
|
||||
<li ng-repeat="dashboard in otherDashboards">
|
||||
<a role="menu-item" ng-href="/dashboard/{{!dashboard.slug}}" ng-bind="dashboard.name"></a>
|
||||
<a role="menu-item" ng-href="/dashboard/{{dashboard.slug}}" ng-bind="dashboard.name"></a>
|
||||
</li>
|
||||
<li class="divider"></li>
|
||||
<li><a data-toggle="modal" href="#new_dashboard_dialog">New Dashboard</a></li>
|
||||
@@ -64,10 +65,11 @@
|
||||
</ul>
|
||||
<ul class="nav navbar-nav navbar-right">
|
||||
<p class="navbar-text avatar">
|
||||
<img ng-src="{{!currentUser.gravatar_url}}" class="img-circle" alt="{{!currentUser.name}}" width="40" height="40"/>
|
||||
<img ng-src="{{currentUser.gravatar_url}}" class="img-circle" alt="{{currentUser.name}}" width="40" height="40"/>
|
||||
</p>
|
||||
</ul>
|
||||
</div>
|
||||
{% endraw %}
|
||||
|
||||
</div>
|
||||
</nav>
|
||||
@@ -109,6 +111,10 @@
|
||||
<!-- build:js({.tmp,app}) /scripts/scripts.js -->
|
||||
<script src="/scripts/app.js"></script>
|
||||
<script src="/scripts/controllers.js"></script>
|
||||
<script src="/scripts/visualizations/base.js"></script>
|
||||
<script src="/scripts/visualizations/chart.js"></script>
|
||||
<script src="/scripts/visualizations/cohort.js"></script>
|
||||
<script src="/scripts/visualizations/table.js"></script>
|
||||
<script src="/scripts/admin_controllers.js"></script>
|
||||
<script src="/scripts/directives.js"></script>
|
||||
<script src="/scripts/services.js"></script>
|
||||
@@ -119,13 +125,14 @@
|
||||
<!-- endbuild -->
|
||||
|
||||
<script>
|
||||
var currentUser = {% raw user %};
|
||||
var currentUser = {{ user|safe }};
|
||||
|
||||
currentUser.canEdit = function(object) {
|
||||
return object.user && (object.user.indexOf(currentUser.name) != -1);
|
||||
var user_id = object.user_id || (object.user && object.user.id);
|
||||
return user_id && (user_id == currentUser.id);
|
||||
};
|
||||
|
||||
{% raw analytics %}
|
||||
{{ analytics|safe }}
|
||||
</script>
|
||||
|
||||
</body>
|
||||
|
||||
85
rd_ui/app/login.html
Normal file
85
rd_ui/app/login.html
Normal file
@@ -0,0 +1,85 @@
|
||||
<!DOCTYPE html>
|
||||
<!--[if lt IE 7]> <html class="no-js lt-ie9 lt-ie8 lt-ie7"> <![endif]-->
|
||||
<!--[if IE 7]> <html class="no-js lt-ie9 lt-ie8"> <![endif]-->
|
||||
<!--[if IE 8]> <html class="no-js lt-ie9"> <![endif]-->
|
||||
<!--[if gt IE 8]><!--> <html class="no-js"> <!--<![endif]-->
|
||||
<head>
|
||||
<title>re:dash Login</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
|
||||
<!-- build:css /styles/main_login.css -->
|
||||
<link rel="stylesheet" href="/bower_components/bootstrap/dist/css/bootstrap.css">
|
||||
<link rel="stylesheet" href="/styles/redash.css">
|
||||
<link rel="stylesheet" href="/styles/login.css">
|
||||
<!-- endbuild -->
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<nav class="navbar navbar-inverse navbar-fixed-top" role="navigation">
|
||||
<div class="container">
|
||||
<div class="navbar-header">
|
||||
<button type="button" class="navbar-toggle" data-toggle="collapse"
|
||||
data-target=".navbar-ex1-collapse">
|
||||
<span class="sr-only">Toggle navigation</span>
|
||||
<span class="icon-bar"></span>
|
||||
<span class="icon-bar"></span>
|
||||
<span class="icon-bar"></span>
|
||||
</button>
|
||||
<a class="navbar-brand" href="/"><strong>re:dash</strong></a>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div class="container">
|
||||
<div class="row">
|
||||
|
||||
<div class="main">
|
||||
<form role="form" method="post" name="login">
|
||||
<div class="form-group">
|
||||
<label for="inputUsernameEmail">Username or email</label>
|
||||
<input type="text" class="form-control" id="inputUsernameEmail" name="username" value="{{username}}">
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<!--<a class="pull-right" href="#">Forgot password?</a>-->
|
||||
<label for="inputPassword">Password</label>
|
||||
<input type="password" class="form-control" id="inputPassword" name="password">
|
||||
</div>
|
||||
<div class="checkbox pull-right">
|
||||
<label>
|
||||
<input type="checkbox" name="remember">
|
||||
Remember me </label>
|
||||
</div>
|
||||
|
||||
<button type="submit" class="btn btn btn-primary">
|
||||
Log In
|
||||
</button>
|
||||
</form>
|
||||
|
||||
{% if show_google_openid %}
|
||||
|
||||
<div class="login-or">
|
||||
<hr class="hr-or">
|
||||
<span class="span-or">or</span>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-xs-6 col-sm-6 col-md-6">
|
||||
<a href="/google_auth/login?next={{next}}" class="btn btn-lg btn-info btn-block">Google</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="/bower_components/jquery/jquery.js"></script>
|
||||
|
||||
<script>
|
||||
{{ analytics|safe }}
|
||||
</script>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
@@ -5,6 +5,7 @@ angular.module('redash', [
|
||||
'redash.filters',
|
||||
'redash.services',
|
||||
'redash.renderers',
|
||||
'redash.visualization',
|
||||
'ui.codemirror',
|
||||
'highchart',
|
||||
'angular-growl',
|
||||
|
||||
@@ -1,13 +1,51 @@
|
||||
(function () {
|
||||
var DashboardCtrl = function ($scope, $routeParams, $http, Dashboard) {
|
||||
var DashboardCtrl = function ($scope, $routeParams, $http, $timeout, Dashboard) {
|
||||
$scope.refreshEnabled = false;
|
||||
$scope.refreshRate = 60;
|
||||
$scope.dashboard = Dashboard.get({slug: $routeParams.dashboardSlug}, function(dashboard) {
|
||||
$scope.$parent.pageTitle = dashboard.name;
|
||||
});
|
||||
|
||||
var autoRefresh = function() {
|
||||
if ($scope.refreshEnabled) {
|
||||
$timeout(function() {
|
||||
Dashboard.get({slug: $routeParams.dashboardSlug}, function(dashboard) {
|
||||
var newWidgets = _.groupBy(_.flatten(dashboard.widgets), 'id');
|
||||
|
||||
_.each($scope.dashboard.widgets, function(row) {
|
||||
_.each(row, function(widget, i) {
|
||||
var newWidget = newWidgets[widget.id];
|
||||
if (newWidget && newWidget[0].visualization.query.latest_query_data_id != widget.visualization.query.latest_query_data_id ) {
|
||||
row[i] = newWidget[0];
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
autoRefresh();
|
||||
});
|
||||
|
||||
}, $scope.refreshRate);
|
||||
};
|
||||
}
|
||||
|
||||
$scope.triggerRefresh = function(){
|
||||
$scope.refreshEnabled = !$scope.refreshEnabled;
|
||||
|
||||
if ($scope.refreshEnabled) {
|
||||
var refreshRate = _.min(_.flatten($scope.dashboard.widgets), function(widget) {
|
||||
return widget.visualization.query.ttl;
|
||||
}).visualization.query.ttl;
|
||||
|
||||
$scope.refreshRate = _.max([120, refreshRate * 2])*1000;
|
||||
|
||||
autoRefresh();
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
var WidgetCtrl = function ($scope, $http, $location, Query) {
|
||||
$scope.deleteWidget = function() {
|
||||
if (!confirm('Are you sure you want to remove "' + $scope.widget.query.name + '" from the dashboard?')) {
|
||||
if (!confirm('Are you sure you want to remove "' + $scope.widget.visualization.name + '" from the dashboard?')) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -20,11 +58,12 @@
|
||||
});
|
||||
};
|
||||
|
||||
$scope.open = function(query) {
|
||||
$scope.open = function(query, visualization) {
|
||||
$location.path('/queries/' + query.id);
|
||||
$location.hash(visualization.id);
|
||||
}
|
||||
|
||||
$scope.query = new Query($scope.widget.query);
|
||||
$scope.query = new Query($scope.widget.visualization.query);
|
||||
$scope.queryResult = $scope.query.getQueryResult();
|
||||
|
||||
$scope.updateTime = (new Date($scope.queryResult.getUpdatedAt())).toISOString();
|
||||
@@ -33,12 +72,14 @@
|
||||
$scope.updateTime = '';
|
||||
}
|
||||
|
||||
var QueryFiddleCtrl = function ($scope, $window, $routeParams, $http, $location, growl, notifications, Query) {
|
||||
var QueryFiddleCtrl = function ($scope, $window, $location, $routeParams, $http, $location, growl, notifications, Query, Visualization) {
|
||||
var DEFAULT_TAB = 'table';
|
||||
var pristineHash = null;
|
||||
$scope.dirty = undefined;
|
||||
|
||||
var leavingPageText = "You will lose your changes if you leave";
|
||||
|
||||
$scope.dirty = undefined;
|
||||
$scope.newVisualization = undefined;
|
||||
|
||||
$window.onbeforeunload = function(){
|
||||
if (currentUser.canEdit($scope.query) && $scope.dirty) {
|
||||
return leavingPageText;
|
||||
@@ -72,8 +113,9 @@
|
||||
|
||||
$scope.$parent.pageTitle = "Query Fiddle";
|
||||
|
||||
$scope.tabs = [{'key': 'table', 'name': 'Table'}, {'key': 'chart', 'name': 'Chart'},
|
||||
{'key': 'pivot', 'name': 'Pivot Table'}, {'key': 'cohort', 'name': 'Cohort'}];
|
||||
$scope.$watch(function() {return $location.hash()}, function(hash) {
|
||||
$scope.selectedTab = hash || DEFAULT_TAB;
|
||||
});
|
||||
|
||||
$scope.lockButton = function (lock) {
|
||||
$scope.queryExecuting = lock;
|
||||
@@ -92,6 +134,7 @@
|
||||
if (!oldId) {
|
||||
oldId = $scope.query.id;
|
||||
}
|
||||
|
||||
delete $scope.query.latest_query_data;
|
||||
$scope.query.$save(function (q) {
|
||||
pristineHash = q.getHash();
|
||||
@@ -109,6 +152,9 @@
|
||||
} else {
|
||||
// TODO: replace this with a safer method
|
||||
$location.path($location.path().replace(oldId, q.id)).replace();
|
||||
|
||||
// Reset visualizations tab to table after duplicating a query:
|
||||
$location.hash('table');
|
||||
}
|
||||
}
|
||||
}, function(httpResponse) {
|
||||
@@ -136,6 +182,7 @@
|
||||
|
||||
$scope.refreshOptions = [
|
||||
{value: -1, name: 'No Refresh'},
|
||||
{value: 60, name: 'Every minute'},
|
||||
]
|
||||
|
||||
_.each(_.range(1, 13), function(i) {
|
||||
@@ -193,7 +240,7 @@
|
||||
$scope.queryResult = $scope.query.getQueryResult();
|
||||
});
|
||||
} else {
|
||||
$scope.query = new Query({query: "", name: "New Query", ttl: -1, user: currentUser.name});
|
||||
$scope.query = new Query({query: "", name: "New Query", ttl: -1, user: currentUser});
|
||||
$scope.lockButton(false);
|
||||
}
|
||||
|
||||
@@ -211,13 +258,33 @@
|
||||
$scope.queryResult = $scope.query.getQueryResult(0);
|
||||
$scope.lockButton(true);
|
||||
$scope.cancelling = false;
|
||||
}
|
||||
};
|
||||
|
||||
$scope.cancelExecution = function() {
|
||||
$scope.cancelling = true;
|
||||
$scope.queryResult.cancelExecution();
|
||||
}
|
||||
};
|
||||
|
||||
$scope.deleteVisualization = function($e, vis) {
|
||||
$e.preventDefault();
|
||||
if (confirm('Are you sure you want to delete ' + vis.name + ' ?')) {
|
||||
Visualization.delete(vis);
|
||||
if ($scope.selectedTab == vis.id) {
|
||||
$scope.selectedTab = DEFAULT_TAB;
|
||||
}
|
||||
$scope.query.visualizations =
|
||||
$scope.query.visualizations.filter(function(v) {
|
||||
return vis.id !== v.id;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
unbind = $scope.$watch('selectedTab == "add"', function(newPanel) {
|
||||
if (newPanel && $routeParams.queryId == undefined) {
|
||||
unbind();
|
||||
$scope.saveQuery();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
var QueriesCtrl = function($scope, $http, $location, $filter, Query) {
|
||||
@@ -244,9 +311,9 @@
|
||||
}
|
||||
|
||||
if ($scope.selectedTab.key == 'my') {
|
||||
return query.user == currentUser.name && query.name != 'New Query';
|
||||
return query.user.id == currentUser.id && query.name != 'New Query';
|
||||
} else if ($scope.selectedTab.key == 'drafts') {
|
||||
return query.user == currentUser.name && query.name == 'New Query';
|
||||
return query.user.id == currentUser.id && query.name == 'New Query';
|
||||
}
|
||||
|
||||
return query.name != 'New Query';
|
||||
@@ -271,7 +338,7 @@
|
||||
},
|
||||
{
|
||||
'label': 'Created By',
|
||||
'map': 'user'
|
||||
'map': 'user.name'
|
||||
},
|
||||
{
|
||||
'label': 'Created At',
|
||||
@@ -370,10 +437,10 @@
|
||||
}
|
||||
|
||||
angular.module('redash.controllers', [])
|
||||
.controller('DashboardCtrl', ['$scope', '$routeParams', '$http', 'Dashboard', DashboardCtrl])
|
||||
.controller('DashboardCtrl', ['$scope', '$routeParams', '$http', '$timeout', 'Dashboard', DashboardCtrl])
|
||||
.controller('WidgetCtrl', ['$scope', '$http', '$location', 'Query', WidgetCtrl])
|
||||
.controller('QueriesCtrl', ['$scope', '$http', '$location', '$filter', 'Query', QueriesCtrl])
|
||||
.controller('QueryFiddleCtrl', ['$scope', '$window', '$routeParams', '$http', '$location', 'growl', 'notifications', 'Query', QueryFiddleCtrl])
|
||||
.controller('QueryFiddleCtrl', ['$scope', '$window', '$location', '$routeParams', '$http', '$location', 'growl', 'notifications', 'Query', 'Visualization', QueryFiddleCtrl])
|
||||
.controller('IndexCtrl', ['$scope', 'Dashboard', IndexCtrl])
|
||||
.controller('MainCtrl', ['$scope', 'Dashboard', 'notifications', MainCtrl]);
|
||||
})();
|
||||
|
||||
@@ -1,239 +1,314 @@
|
||||
var directives = angular.module('redash.directives', []);
|
||||
directives.directive('rdTabs', ['$location', '$rootScope', function($location, $rootScope) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
tabsCollection: '=',
|
||||
selectedTab: '='
|
||||
},
|
||||
template: '<ul class="nav nav-tabs"><li ng-class="{active: tab==selectedTab}" ng-repeat="tab in tabsCollection"><a href="#{{tab.key}}">{{tab.name}}</a></li></ul>',
|
||||
replace: true,
|
||||
link: function($scope, element, attrs) {
|
||||
$scope.selectTab = function(tabKey) {
|
||||
$scope.selectedTab = _.find($scope.tabsCollection, function(tab) { return tab.key == tabKey; });
|
||||
}
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
$scope.$watch(function() { return $location.hash()}, function(hash) {
|
||||
if (hash) {
|
||||
$scope.selectTab($location.hash());
|
||||
} else {
|
||||
$scope.selectTab($scope.tabsCollection[0].key);
|
||||
}
|
||||
});
|
||||
var directives = angular.module('redash.directives', []);
|
||||
|
||||
directives.directive('rdTab', function() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
'id': '@',
|
||||
'name': '@'
|
||||
},
|
||||
transclude: true,
|
||||
template: '<li class="rd-tab" ng-class="{active: id==selectedTab}"><a href="#{{id}}">{{name}}<span ng-transclude></span></a></li>',
|
||||
replace: true,
|
||||
link: function(scope) {
|
||||
scope.$watch(function(){return scope.$parent.selectedTab}, function(tab) {
|
||||
scope.selectedTab = tab;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}])
|
||||
});
|
||||
|
||||
directives.directive('editDashboardForm', ['$http', '$location', '$timeout', 'Dashboard', function($http, $location, $timeout, Dashboard) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
dashboard: '='
|
||||
},
|
||||
templateUrl: '/views/edit_dashboard.html',
|
||||
replace: true,
|
||||
link: function($scope, element, attrs) {
|
||||
$scope.$watch('dashboard.widgets', function() {
|
||||
if ($scope.dashboard.widgets) {
|
||||
$scope.layout = [];
|
||||
_.each($scope.dashboard.widgets, function(row, rowIndex) {
|
||||
_.each(row, function(widget, colIndex) {
|
||||
$scope.layout.push({
|
||||
id: widget.id,
|
||||
col: colIndex+1,
|
||||
row: rowIndex+1,
|
||||
ySize: 1,
|
||||
xSize: widget.width,
|
||||
name: widget.query.name
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
$timeout(function () {
|
||||
$(".gridster ul").gridster({
|
||||
widget_margins: [5, 5],
|
||||
widget_base_dimensions: [260, 100],
|
||||
min_cols: 2,
|
||||
max_cols: 2,
|
||||
serialize_params: function ($w, wgd) {
|
||||
return { col: wgd.col, row: wgd.row, id: $w.data('widget-id') }
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
$scope.saveDashboard = function() {
|
||||
$scope.saveInProgress = true;
|
||||
// TODO: we should use the dashboard service here.
|
||||
if ($scope.dashboard.id) {
|
||||
var positions = $(element).find('.gridster ul').data('gridster').serialize();
|
||||
var layout = [];
|
||||
_.each(_.sortBy(positions, function (pos) {
|
||||
return pos.row * 10 + pos.col;
|
||||
}), function (pos) {
|
||||
var row = pos.row - 1;
|
||||
var col = pos.col - 1;
|
||||
layout[row] = layout[row] || [];
|
||||
if (col > 0 && layout[row][col - 1] == undefined) {
|
||||
layout[row][col - 1] = pos.id;
|
||||
} else {
|
||||
layout[row][col] = pos.id;
|
||||
}
|
||||
|
||||
});
|
||||
$scope.dashboard.layout = layout;
|
||||
|
||||
layout = JSON.stringify(layout);
|
||||
$http.post('/api/dashboards/' + $scope.dashboard.id, {'name': $scope.dashboard.name, 'layout': layout}).success(function(response) {
|
||||
$scope.dashboard = new Dashboard(response);
|
||||
$scope.saveInProgress = false;
|
||||
$(element).modal('hide');
|
||||
})
|
||||
} else {
|
||||
$http.post('/api/dashboards', {'name': $scope.dashboard.name}).success(function(response) {
|
||||
$(element).modal('hide');
|
||||
$location.path('/dashboard/' + response.slug).replace();
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}])
|
||||
|
||||
|
||||
directives.directive('newWidgetForm', ['$http', function($http) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
dashboard: '='
|
||||
},
|
||||
templateUrl: '/views/new_widget_form.html',
|
||||
replace: true,
|
||||
link: function($scope, element, attrs) {
|
||||
$scope.widgetTypes = [{name: 'Chart', value: 'chart'}, {name: 'Table', value: 'grid'}, {name: 'Cohort', value: 'cohort'}];
|
||||
$scope.widgetSizes = [{name: 'Regular Size', value: 1}, {name: 'Double Size', value: 2}];
|
||||
|
||||
var reset = function() {
|
||||
$scope.saveInProgress = false;
|
||||
$scope.widgetType = 'chart';
|
||||
$scope.widgetSize = 1;
|
||||
$scope.queryId = null;
|
||||
}
|
||||
|
||||
reset();
|
||||
|
||||
$scope.saveWidget = function() {
|
||||
$scope.saveInProgress = true;
|
||||
|
||||
var widget = {
|
||||
'query_id': $scope.queryId,
|
||||
'dashboard_id': $scope.dashboard.id,
|
||||
'type': $scope.widgetType,
|
||||
'options': {},
|
||||
'width': $scope.widgetSize
|
||||
directives.directive('rdTabs', ['$location', function($location) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
tabsCollection: '=',
|
||||
selectedTab: '='
|
||||
},
|
||||
template: '<ul class="nav nav-tabs"><li ng-class="{active: tab==selectedTab}" ng-repeat="tab in tabsCollection"><a href="#{{tab.key}}">{{tab.name}}</a></li></ul>',
|
||||
replace: true,
|
||||
link: function($scope, element, attrs) {
|
||||
$scope.selectTab = function(tabKey) {
|
||||
$scope.selectedTab = _.find($scope.tabsCollection, function(tab) { return tab.key == tabKey; });
|
||||
}
|
||||
|
||||
$http.post('/api/widgets', widget).success(function(response) {
|
||||
// update dashboard layout
|
||||
$scope.dashboard.layout = response['layout'];
|
||||
if (response['new_row']) {
|
||||
$scope.dashboard.widgets.push([response['widget']]);
|
||||
$scope.$watch(function() { return $location.hash()}, function(hash) {
|
||||
if (hash) {
|
||||
$scope.selectTab($location.hash());
|
||||
} else {
|
||||
$scope.dashboard.widgets[$scope.dashboard.widgets.length-1].push(response['widget']);
|
||||
$scope.selectTab($scope.tabsCollection[0].key);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}]);
|
||||
|
||||
directives.directive('editDashboardForm', ['$http', '$location', '$timeout', 'Dashboard', function($http, $location, $timeout, Dashboard) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
dashboard: '='
|
||||
},
|
||||
templateUrl: '/views/edit_dashboard.html',
|
||||
replace: true,
|
||||
link: function($scope, element, attrs) {
|
||||
var gridster = element.find(".gridster ul").gridster({
|
||||
widget_margins: [5, 5],
|
||||
widget_base_dimensions: [260, 100],
|
||||
min_cols: 2,
|
||||
max_cols: 2,
|
||||
serialize_params: function($w, wgd) {
|
||||
return {
|
||||
col: wgd.col,
|
||||
row: wgd.row,
|
||||
id: $w.data('widget-id')
|
||||
}
|
||||
}
|
||||
}).data('gridster');
|
||||
|
||||
var gsItemTemplate = '<li data-widget-id="{id}" class="widget panel panel-default gs-w">' +
|
||||
'<div class="panel-heading">{name}' +
|
||||
'</div></li>';
|
||||
|
||||
$scope.$watch('dashboard.widgets', function(widgets) {
|
||||
$timeout(function () {
|
||||
gridster.remove_all_widgets();
|
||||
|
||||
if (widgets && widgets.length) {
|
||||
var layout = [];
|
||||
|
||||
_.each(widgets, function(row, rowIndex) {
|
||||
_.each(row, function(widget, colIndex) {
|
||||
layout.push({
|
||||
id: widget.id,
|
||||
col: colIndex+1,
|
||||
row: rowIndex+1,
|
||||
ySize: 1,
|
||||
xSize: widget.width,
|
||||
name: widget.visualization.query.name
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
_.each(layout, function(item) {
|
||||
var el = gsItemTemplate.replace('{id}', item.id).replace('{name}', item.name);
|
||||
gridster.add_widget(el, item.xSize, item.ySize, item.col, item.row);
|
||||
|
||||
});
|
||||
}
|
||||
});
|
||||
}, true);
|
||||
|
||||
$scope.saveDashboard = function() {
|
||||
$scope.saveInProgress = true;
|
||||
// TODO: we should use the dashboard service here.
|
||||
if ($scope.dashboard.id) {
|
||||
var positions = $(element).find('.gridster ul').data('gridster').serialize();
|
||||
var layout = [];
|
||||
_.each(_.sortBy(positions, function (pos) {
|
||||
return pos.row * 10 + pos.col;
|
||||
}), function (pos) {
|
||||
var row = pos.row - 1;
|
||||
var col = pos.col - 1;
|
||||
layout[row] = layout[row] || [];
|
||||
if (col > 0 && layout[row][col - 1] == undefined) {
|
||||
layout[row][col - 1] = pos.id;
|
||||
} else {
|
||||
layout[row][col] = pos.id;
|
||||
}
|
||||
|
||||
});
|
||||
$scope.dashboard.layout = layout;
|
||||
|
||||
layout = JSON.stringify(layout);
|
||||
$http.post('/api/dashboards/' + $scope.dashboard.id, {'name': $scope.dashboard.name, 'layout': layout}).success(function(response) {
|
||||
$scope.dashboard = new Dashboard(response);
|
||||
$scope.saveInProgress = false;
|
||||
$(element).modal('hide');
|
||||
})
|
||||
} else {
|
||||
$http.post('/api/dashboards', {'name': $scope.dashboard.name}).success(function(response) {
|
||||
$(element).modal('hide');
|
||||
$location.path('/dashboard/' + response.slug).replace();
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}]);
|
||||
|
||||
directives.directive('newWidgetForm', ['$http', 'Query', function($http, Query) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
dashboard: '='
|
||||
},
|
||||
templateUrl: '/views/new_widget_form.html',
|
||||
replace: true,
|
||||
link: function($scope, element, attrs) {
|
||||
$scope.widgetSizes = [{name: 'Regular', value: 1}, {name: 'Double', value: 2}];
|
||||
|
||||
var reset = function() {
|
||||
$scope.saveInProgress = false;
|
||||
$scope.widgetSize = 1;
|
||||
$scope.queryId = null;
|
||||
$scope.selectedVis = null;
|
||||
$scope.query = null;
|
||||
|
||||
}
|
||||
|
||||
reset();
|
||||
|
||||
$scope.loadVisualizations = function() {
|
||||
if (!$scope.queryId) {
|
||||
return;
|
||||
}
|
||||
|
||||
// close the dialog
|
||||
$('#add_query_dialog').modal('hide');
|
||||
reset();
|
||||
Query.get({
|
||||
id: $scope.queryId
|
||||
}, function(query) {
|
||||
if (query) {
|
||||
$scope.query = query;
|
||||
if(query.visualizations.length) {
|
||||
$scope.selectedVis = query.visualizations[0];
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
$scope.saveWidget = function() {
|
||||
$scope.saveInProgress = true;
|
||||
|
||||
var widget = {
|
||||
'visualization_id': $scope.selectedVis.id,
|
||||
'dashboard_id': $scope.dashboard.id,
|
||||
'options': {},
|
||||
'width': $scope.widgetSize
|
||||
}
|
||||
|
||||
$http.post('/api/widgets', widget).success(function(response) {
|
||||
// update dashboard layout
|
||||
$scope.dashboard.layout = response['layout'];
|
||||
if (response['new_row']) {
|
||||
$scope.dashboard.widgets.push([response['widget']]);
|
||||
} else {
|
||||
$scope.dashboard.widgets[$scope.dashboard.widgets.length-1].push(response['widget']);
|
||||
}
|
||||
|
||||
// close the dialog
|
||||
$('#add_query_dialog').modal('hide');
|
||||
reset();
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}])
|
||||
|
||||
// From: http://jsfiddle.net/joshdmiller/NDFHg/
|
||||
directives.directive('editInPlace', function () {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
value: '=',
|
||||
ignoreBlanks: '=',
|
||||
editable: '='
|
||||
},
|
||||
template: function(tElement, tAttrs) {
|
||||
var elType = tAttrs.editor || 'input';
|
||||
var placeholder = tAttrs.placeholder || 'Click to edit';
|
||||
return '<span ng-click="editable && edit()" ng-bind="value" ng-class="{editable: editable}"></span>' +
|
||||
'<span ng-click="editable && edit()" ng-show="editable && !value" ng-class="{editable: editable}">' + placeholder + '</span>' +
|
||||
'<{elType} ng-model="value" class="form-control" rows="2"></{elType}>'.replace('{elType}', elType);
|
||||
},
|
||||
link: function ($scope, element, attrs) {
|
||||
// Let's get a reference to the input element, as we'll want to reference it.
|
||||
var inputElement = angular.element(element.children()[2]);
|
||||
|
||||
// This directive should have a set class so we can style it.
|
||||
element.addClass('edit-in-place');
|
||||
|
||||
// Initially, we're not editing.
|
||||
$scope.editing = false;
|
||||
|
||||
// ng-click handler to activate edit-in-place
|
||||
$scope.edit = function () {
|
||||
if ($scope.ignoreBlanks) {
|
||||
$scope.oldValue = $scope.value;
|
||||
}
|
||||
|
||||
$scope.editing = true;
|
||||
|
||||
// We control display through a class on the directive itself. See the CSS.
|
||||
element.addClass('active');
|
||||
|
||||
// And we must focus the element.
|
||||
// `angular.element()` provides a chainable array, like jQuery so to access a native DOM function,
|
||||
// we have to reference the first element in the array.
|
||||
inputElement[0].focus();
|
||||
};
|
||||
|
||||
$(inputElement).blur(function() {
|
||||
if ($scope.ignoreBlanks && _.isEmpty($scope.value)) {
|
||||
$scope.value = $scope.oldValue;
|
||||
}
|
||||
$scope.editing = false;
|
||||
element.removeClass('active');
|
||||
})
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
}])
|
||||
// http://stackoverflow.com/a/17904092/1559840
|
||||
directives.directive('jsonText', function() {
|
||||
return {
|
||||
restrict: 'A',
|
||||
require: 'ngModel',
|
||||
link: function(scope, element, attr, ngModel) {
|
||||
function into(input) {
|
||||
return JSON.parse(input);
|
||||
}
|
||||
function out(data) {
|
||||
return JSON.stringify(data, undefined, 2);
|
||||
}
|
||||
ngModel.$parsers.push(into);
|
||||
ngModel.$formatters.push(out);
|
||||
|
||||
// From: http://jsfiddle.net/joshdmiller/NDFHg/
|
||||
directives.directive('editInPlace', function () {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
value: '=',
|
||||
ignoreBlanks: '=',
|
||||
editable: '='
|
||||
},
|
||||
template: function(tElement, tAttrs) {
|
||||
var elType = tAttrs.editor || 'input';
|
||||
var placeholder = tAttrs.placeholder || 'Click to edit';
|
||||
return '<span ng-click="editable && edit()" ng-bind="value" ng-class="{editable: editable}"></span>' +
|
||||
'<span ng-click="editable && edit()" ng-show="editable && !value" ng-class="{editable: editable}">' + placeholder + '</span>' +
|
||||
'<{elType} ng-model="value" class="form-control" rows="2"></{elType}>'.replace('{elType}', elType);
|
||||
},
|
||||
link: function ($scope, element, attrs) {
|
||||
// Let's get a reference to the input element, as we'll want to reference it.
|
||||
var inputElement = angular.element(element.children()[2]);
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// This directive should have a set class so we can style it.
|
||||
element.addClass('edit-in-place');
|
||||
directives.directive('rdTimer', ['$timeout', function ($timeout) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: { timestamp: '=' },
|
||||
template: '{{currentTime}}',
|
||||
controller: ['$scope' ,function ($scope) {
|
||||
$scope.currentTime = "00:00:00";
|
||||
var currentTimeout = null;
|
||||
|
||||
// Initially, we're not editing.
|
||||
$scope.editing = false;
|
||||
|
||||
// ng-click handler to activate edit-in-place
|
||||
$scope.edit = function () {
|
||||
if ($scope.ignoreBlanks) {
|
||||
$scope.oldValue = $scope.value;
|
||||
var updateTime = function() {
|
||||
$scope.currentTime = moment(moment() - moment($scope.timestamp)).utc().format("HH:mm:ss")
|
||||
currentTimeout = $timeout(updateTime, 1000);
|
||||
}
|
||||
|
||||
$scope.editing = true;
|
||||
|
||||
// We control display through a class on the directive itself. See the CSS.
|
||||
element.addClass('active');
|
||||
|
||||
// And we must focus the element.
|
||||
// `angular.element()` provides a chainable array, like jQuery so to access a native DOM function,
|
||||
// we have to reference the first element in the array.
|
||||
inputElement[0].focus();
|
||||
};
|
||||
|
||||
$(inputElement).blur(function() {
|
||||
if ($scope.ignoreBlanks && _.isEmpty($scope.value)) {
|
||||
$scope.value = $scope.oldValue;
|
||||
var cancelTimer = function() {
|
||||
if (currentTimeout) {
|
||||
$timeout.cancel(currentTimeout);
|
||||
currentTimeout = null;
|
||||
}
|
||||
}
|
||||
$scope.editing = false;
|
||||
element.removeClass('active');
|
||||
})
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
directives.directive('rdTimer', ['$timeout', function ($timeout) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: { timestamp: '=' },
|
||||
template: '{{currentTime}}',
|
||||
controller: ['$scope' ,function ($scope) {
|
||||
$scope.currentTime = "00:00:00";
|
||||
var currentTimeout = null;
|
||||
updateTime();
|
||||
|
||||
var updateTime = function() {
|
||||
$scope.currentTime = moment(moment() - moment($scope.timestamp)).utc().format("HH:mm:ss")
|
||||
currentTimeout = $timeout(updateTime, 1000);
|
||||
}
|
||||
|
||||
var cancelTimer = function() {
|
||||
if (currentTimeout) {
|
||||
$timeout.cancel(currentTimeout);
|
||||
currentTimeout = null;
|
||||
}
|
||||
}
|
||||
|
||||
updateTime();
|
||||
|
||||
$scope.$on('$destroy', function () {
|
||||
cancelTimer();
|
||||
});
|
||||
}]
|
||||
};
|
||||
}]);
|
||||
$scope.$on('$destroy', function () {
|
||||
cancelTimer();
|
||||
});
|
||||
}]
|
||||
};
|
||||
}]);
|
||||
})();
|
||||
|
||||
@@ -47,4 +47,15 @@ angular.module('redash.filters', []).
|
||||
}
|
||||
return 12;
|
||||
}
|
||||
})
|
||||
|
||||
.filter('capitalize', function () {
|
||||
return function (text) {
|
||||
if (text) {
|
||||
return text[0].toUpperCase() + text.slice(1).toLowerCase();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
});
|
||||
@@ -1,83 +1,274 @@
|
||||
'use strict';
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
angular.module('highchart', [])
|
||||
.directive('chart', ['$timeout', function ($timeout) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
template: '<div></div>',
|
||||
scope: {
|
||||
options: "=options",
|
||||
series: "=series"
|
||||
},
|
||||
transclude: true,
|
||||
replace: true,
|
||||
|
||||
link: function (scope, element, attrs) {
|
||||
var chartsDefaults = {
|
||||
chart: {
|
||||
renderTo: element[0],
|
||||
type: attrs.type || null,
|
||||
height: attrs.height || null,
|
||||
width: attrs.width || null
|
||||
}
|
||||
var defaultOptions = {
|
||||
title: {
|
||||
"text": null
|
||||
},
|
||||
xAxis: {
|
||||
type: 'datetime'
|
||||
},
|
||||
yAxis: {
|
||||
title: {
|
||||
text: null
|
||||
}
|
||||
},
|
||||
tooltip: {
|
||||
valueDecimals: 2,
|
||||
formatter: function () {
|
||||
if (!this.points) {
|
||||
this.points = [this.point];
|
||||
};
|
||||
|
||||
var deepCopy = true;
|
||||
var newSettings = {};
|
||||
$.extend(deepCopy, newSettings, chartsDefaults, scope.options);
|
||||
if (moment.isMoment(this.x)) {
|
||||
var s = '<b>' + moment(this.x).format("DD/MM/YY HH:mm") + '</b>',
|
||||
pointsCount = this.points.length;
|
||||
|
||||
// Making sure that the DOM is ready before creating the chart element, so it gets proper width.
|
||||
$timeout(function(){
|
||||
scope.chart = new Highcharts.Chart(newSettings);
|
||||
$.each(this.points, function (i, point) {
|
||||
s += '<br/><span style="color:' + point.series.color + '">' + point.series.name + '</span>: ' +
|
||||
Highcharts.numberFormat(point.y);
|
||||
|
||||
//Update when charts data changes
|
||||
scope.$watch(function () {
|
||||
return (scope.series && scope.series.length) || 0;
|
||||
}, function (length) {
|
||||
if (!length || length == 0) {
|
||||
scope.chart.showLoading();
|
||||
if (pointsCount > 1 && point.percentage) {
|
||||
s += " (" + Highcharts.numberFormat(point.percentage) + "%)";
|
||||
}
|
||||
});
|
||||
} else {
|
||||
var points = this.points;
|
||||
var name = points[0].key || points[0].name;
|
||||
|
||||
var s = "<b>" + name + "</b>";
|
||||
|
||||
$.each(points, function (i, point) {
|
||||
if (points.length > 1) {
|
||||
s += '<br/><span style="color:' + point.series.color + '">' + point.series.name + '</span>: ' + Highcharts.numberFormat(point.y);
|
||||
} else {
|
||||
while(scope.chart.series.length > 0) {
|
||||
scope.chart.series[0].remove(true);
|
||||
s += ": " + Highcharts.numberFormat(point.y);
|
||||
if (point.percentage < 100) {
|
||||
s += ' (' +Highcharts.numberFormat(point.percentage) + '%)';
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (_.some(scope.series[0].data, function(p) { return angular.isString(p.x) })) {
|
||||
scope.chart.xAxis[0].update({type: 'category'});
|
||||
return s;
|
||||
},
|
||||
shared: true
|
||||
},
|
||||
exporting: {
|
||||
chartOptions: {
|
||||
title: {
|
||||
text: ''
|
||||
}
|
||||
},
|
||||
buttons: {
|
||||
contextButton: {
|
||||
menuItems: [
|
||||
{
|
||||
text: 'Toggle % Stacking',
|
||||
onclick: function () {
|
||||
var newStacking = "normal";
|
||||
if (this.series[0].options.stacking == "normal") {
|
||||
newStacking = "percent";
|
||||
}
|
||||
|
||||
_.each(this.series, function (series) {
|
||||
series.update({stacking: newStacking}, true);
|
||||
});
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
credits: {
|
||||
enabled: false
|
||||
},
|
||||
plotOptions: {
|
||||
area: {
|
||||
marker: {
|
||||
enabled: false,
|
||||
symbol: 'circle',
|
||||
radius: 2,
|
||||
states: {
|
||||
hover: {
|
||||
enabled: true
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
column: {
|
||||
stacking: "normal",
|
||||
pointPadding: 0,
|
||||
borderWidth: 1,
|
||||
groupPadding: 0,
|
||||
shadow: false
|
||||
},
|
||||
line: {
|
||||
marker: {
|
||||
radius: 1
|
||||
},
|
||||
lineWidth: 2,
|
||||
states: {
|
||||
hover: {
|
||||
lineWidth: 2,
|
||||
marker: {
|
||||
radius: 3
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
pie: {
|
||||
allowPointSelect: true,
|
||||
cursor: 'pointer',
|
||||
dataLabels: {
|
||||
enabled: true,
|
||||
color: '#000000',
|
||||
connectorColor: '#000000',
|
||||
format: '<b>{point.name}</b>: {point.percentage:.1f} %'
|
||||
}
|
||||
},
|
||||
scatter: {
|
||||
marker: {
|
||||
radius: 5,
|
||||
states: {
|
||||
hover: {
|
||||
enabled: true,
|
||||
lineColor: 'rgb(100,100,100)'
|
||||
}
|
||||
}
|
||||
},
|
||||
tooltip: {
|
||||
headerFormat: '<b>{series.name}</b><br>',
|
||||
pointFormat: '{point.x}, {point.y}'
|
||||
}
|
||||
}
|
||||
},
|
||||
series: []
|
||||
};
|
||||
|
||||
angular.module('highchart', [])
|
||||
.directive('chart', ['$timeout', function ($timeout) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
template: '<div></div>',
|
||||
scope: {
|
||||
options: "=options",
|
||||
series: "=series"
|
||||
},
|
||||
transclude: true,
|
||||
replace: true,
|
||||
|
||||
link: function (scope, element, attrs) {
|
||||
var chartsDefaults = {
|
||||
chart: {
|
||||
renderTo: element[0],
|
||||
type: attrs.type || null,
|
||||
height: attrs.height || null,
|
||||
width: attrs.width || null
|
||||
}
|
||||
};
|
||||
|
||||
var chartOptions = $.extend(true, {}, defaultOptions, chartsDefaults);
|
||||
|
||||
// $timeout makes sure that this function invoked after the DOM ready. When draw/init
|
||||
// invoked after the DOM is ready, we see first an empty HighCharts objects and later
|
||||
// they get filled up. Which gives the feeling that the charts loading faster (otherwise
|
||||
// we stare at an empty screen until the HighCharts object is ready).
|
||||
$timeout(function(){
|
||||
// Update when options change
|
||||
scope.$watch('options', function (newOptions) {
|
||||
initChart(newOptions);
|
||||
}, true);
|
||||
|
||||
//Update when charts data changes
|
||||
scope.$watch(function () {
|
||||
// TODO: this might be an issue in case the series change, but they stay
|
||||
// with the same length
|
||||
return (scope.series && scope.series.length) || 0;
|
||||
}, function (length) {
|
||||
if (!length || length == 0) {
|
||||
scope.chart.showLoading();
|
||||
} else {
|
||||
drawChart();
|
||||
};
|
||||
}, true);
|
||||
});
|
||||
|
||||
function initChart(options) {
|
||||
if (scope.chart) {
|
||||
scope.chart.destroy();
|
||||
};
|
||||
|
||||
$.extend(true, chartOptions, options);
|
||||
|
||||
scope.chart = new Highcharts.Chart(chartOptions);
|
||||
drawChart();
|
||||
}
|
||||
|
||||
function drawChart() {
|
||||
while (scope.chart.series.length > 0) {
|
||||
scope.chart.series[0].remove(false);
|
||||
};
|
||||
|
||||
if (scope.series.length > 0 && _.some(scope.series[0].data, function (p) {
|
||||
return (angular.isString(p.x) || angular.isDefined(p.name));
|
||||
})) {
|
||||
scope.chart.xAxis[0].update({type: 'category'});
|
||||
|
||||
if (!angular.isDefined(scope.series[0].data[0].name)) {
|
||||
// We need to make sure that for each category, each series has a value.
|
||||
var categories = _.union.apply(this, _.map(scope.series, function(s) { return _.pluck(s.data,'x')}));
|
||||
var categories = _.union.apply(this, _.map(scope.series, function (s) {
|
||||
return _.pluck(s.data, 'x')
|
||||
}));
|
||||
|
||||
_.each(scope.series, function(s) {
|
||||
_.each(scope.series, function (s) {
|
||||
// TODO: move this logic to Query#getChartData
|
||||
var yValues = _.groupBy(s.data, 'x');
|
||||
|
||||
var newData = _.sortBy(_.map(categories, function(category) {
|
||||
var newData = _.sortBy(_.map(categories, function (category) {
|
||||
return {
|
||||
name: category,
|
||||
y: yValues[category] && yValues[category][0].y
|
||||
}
|
||||
}), 'name');
|
||||
}), 'y').reverse();
|
||||
|
||||
s.data = newData;
|
||||
});
|
||||
} else {
|
||||
scope.chart.xAxis[0].update({type: 'datetime'});
|
||||
}
|
||||
} else {
|
||||
scope.chart.xAxis[0].update({type: 'datetime'});
|
||||
}
|
||||
|
||||
scope.chart.counters.color = 0;
|
||||
scope.chart.counters.color = 0;
|
||||
|
||||
_.each(scope.series, function(s) {
|
||||
scope.chart.addSeries(s);
|
||||
})
|
||||
_.each(scope.series, function (s) {
|
||||
// here we override the series with the visualization config
|
||||
s = _.extend(s, chartOptions['series']);
|
||||
|
||||
scope.chart.redraw();
|
||||
scope.chart.hideLoading();
|
||||
};
|
||||
}, true);
|
||||
});
|
||||
if (s.type == 'area') {
|
||||
_.each(s.data, function (p) {
|
||||
// This is an insane hack: somewhere deep in HighChart's code,
|
||||
// when you stack areas, it tries to convert the string representation
|
||||
// of point's x into a number. With the default implementation of toString
|
||||
// it fails....
|
||||
|
||||
if (moment.isMoment(p.x)) {
|
||||
p.x.toString = function () {
|
||||
return String(this.toDate().getTime());
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
}
|
||||
};
|
||||
scope.chart.addSeries(s, false);
|
||||
});
|
||||
|
||||
}]);
|
||||
scope.chart.redraw();
|
||||
scope.chart.hideLoading();
|
||||
}
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
}]);
|
||||
})();
|
||||
@@ -1,197 +1,4 @@
|
||||
var renderers = angular.module('redash.renderers', []);
|
||||
var defaultChartOptions = {
|
||||
"title": {
|
||||
"text": null
|
||||
},
|
||||
"tooltip": {
|
||||
valueDecimals: 2,
|
||||
formatter: function () {
|
||||
if (moment.isMoment(this.x)) {
|
||||
var s = '<b>' + moment(this.x).format("DD/MM/YY HH:mm") + '</b>',
|
||||
pointsCount = this.points.length;
|
||||
|
||||
$.each(this.points, function (i, point) {
|
||||
s += '<br/><span style="color:'+point.series.color+'">' + point.series.name + '</span>: ' +
|
||||
Highcharts.numberFormat(point.y);
|
||||
|
||||
if (pointsCount > 1 && point.percentage) {
|
||||
s += " (" + Highcharts.numberFormat(point.percentage) + "%)";
|
||||
}
|
||||
});
|
||||
} else {
|
||||
var s = "<b>" + this.points[0].key + "</b>";
|
||||
$.each(this.points, function (i, point) {
|
||||
s+= '<br/><span style="color:'+point.series.color+'">' + point.series.name + '</span>: ' +
|
||||
Highcharts.numberFormat(point.y);
|
||||
});
|
||||
}
|
||||
|
||||
return s;
|
||||
},
|
||||
shared: true
|
||||
},
|
||||
xAxis: {
|
||||
type: 'datetime'
|
||||
},
|
||||
yAxis: {
|
||||
title: {
|
||||
text: null
|
||||
}
|
||||
},
|
||||
exporting: {
|
||||
chartOptions: {
|
||||
title: {
|
||||
text: this.description
|
||||
}
|
||||
},
|
||||
buttons: {
|
||||
contextButton: {
|
||||
menuItems: [
|
||||
{
|
||||
text: 'Toggle % Stacking',
|
||||
onclick: function () {
|
||||
var newStacking = "normal";
|
||||
if (this.series[0].options.stacking == "normal") {
|
||||
newStacking = "percent";
|
||||
}
|
||||
|
||||
_.each(this.series, function (series) {
|
||||
series.update({stacking: newStacking}, true);
|
||||
});
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
credits: {
|
||||
enabled: false
|
||||
},
|
||||
plotOptions: {
|
||||
"column": {
|
||||
"stacking": "normal",
|
||||
"pointPadding": 0,
|
||||
"borderWidth": 1,
|
||||
"groupPadding": 0,
|
||||
"shadow": false
|
||||
}
|
||||
},
|
||||
"series": []
|
||||
};
|
||||
|
||||
renderers.directive('chartRenderer', function () {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
queryResult: '=',
|
||||
options: '=?'
|
||||
},
|
||||
template: "<chart options='chartOptions' series='chartSeries' class='graph'></chart>",
|
||||
replace: false,
|
||||
controller: ['$scope', function ($scope) {
|
||||
$scope.chartSeries = [];
|
||||
$scope.chartOptions = defaultChartOptions;
|
||||
|
||||
$scope.$watch('queryResult && queryResult.getData()', function (data) {
|
||||
if (!data || $scope.queryResult.getData() == null) {
|
||||
$scope.chartSeries.splice(0, $scope.chartSeries.length);
|
||||
} else {
|
||||
$scope.chartSeries.splice(0, $scope.chartSeries.length);
|
||||
|
||||
_.each($scope.queryResult.getChartData(), function (s) {
|
||||
$scope.chartSeries.push(_.extend(s, {'stacking': 'normal'}, $scope.options));
|
||||
});
|
||||
}
|
||||
});
|
||||
}]
|
||||
}
|
||||
})
|
||||
|
||||
renderers.directive('gridRenderer', function () {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
queryResult: '=',
|
||||
itemsPerPage: '='
|
||||
},
|
||||
templateUrl: "/views/grid_renderer.html",
|
||||
replace: false,
|
||||
controller: ['$scope', function ($scope) {
|
||||
$scope.gridColumns = [];
|
||||
$scope.gridData = [];
|
||||
$scope.gridConfig = {
|
||||
isPaginationEnabled: true,
|
||||
itemsByPage: $scope.itemsPerPage || 15,
|
||||
maxSize: 8
|
||||
};
|
||||
|
||||
$scope.$watch('queryResult && queryResult.getData()', function (data) {
|
||||
if (!data) {
|
||||
return;
|
||||
}
|
||||
|
||||
if ($scope.queryResult.getData() == null) {
|
||||
$scope.gridColumns = [];
|
||||
$scope.gridData = [];
|
||||
$scope.filters = [];
|
||||
} else {
|
||||
|
||||
|
||||
$scope.filters = $scope.queryResult.getFilters();
|
||||
|
||||
var gridData = _.map($scope.queryResult.getData(), function (row) {
|
||||
var newRow = {};
|
||||
_.each(row, function (val, key) {
|
||||
// TODO: hack to detect date fields, needed only for backward compatability
|
||||
if (val > 1000 * 1000 * 1000 * 100) {
|
||||
newRow[$scope.queryResult.getColumnCleanName(key)] = moment(val);
|
||||
} else {
|
||||
newRow[$scope.queryResult.getColumnCleanName(key)] = val;
|
||||
}
|
||||
|
||||
})
|
||||
return newRow;
|
||||
});
|
||||
|
||||
$scope.gridColumns = _.map($scope.queryResult.getColumnCleanNames(), function (col, i) {
|
||||
var columnDefinition = {
|
||||
'label': $scope.queryResult.getColumnFriendlyNames()[i],
|
||||
'map': col
|
||||
};
|
||||
|
||||
if (gridData.length > 0) {
|
||||
var exampleData = gridData[0][col];
|
||||
if (angular.isNumber(exampleData)) {
|
||||
columnDefinition['formatFunction'] = 'number';
|
||||
columnDefinition['formatParameter'] = 2;
|
||||
} else if (moment.isMoment(exampleData)) {
|
||||
columnDefinition['formatFunction'] = function(value) {
|
||||
return value.format("DD/MM/YY HH:mm");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return columnDefinition;
|
||||
});
|
||||
|
||||
$scope.gridData = _.clone(gridData);
|
||||
|
||||
$scope.$watch('filters', function (filters) {
|
||||
$scope.gridData = _.filter(gridData, function (row) {
|
||||
return _.reduce(filters, function (memo, filter) {
|
||||
if (filter.current == 'All') {
|
||||
return memo && true;
|
||||
}
|
||||
|
||||
return (memo && row[$scope.queryResult.getColumnCleanName(filter.name)] == filter.current);
|
||||
}, true);
|
||||
});
|
||||
}, true);
|
||||
}
|
||||
});
|
||||
}]
|
||||
}
|
||||
})
|
||||
|
||||
renderers.directive('pivotTableRenderer', function () {
|
||||
return {
|
||||
@@ -216,52 +23,4 @@ renderers.directive('pivotTableRenderer', function () {
|
||||
});
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
renderers.directive('cohortRenderer', function() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
queryResult: '='
|
||||
},
|
||||
template: "",
|
||||
replace: false,
|
||||
link: function($scope, element, attrs) {
|
||||
$scope.$watch('queryResult && queryResult.getData()', function (data) {
|
||||
if (!data) {
|
||||
return;
|
||||
}
|
||||
|
||||
if ($scope.queryResult.getData() == null) {
|
||||
|
||||
} else {
|
||||
var sortedData = _.sortBy($scope.queryResult.getData(), "date");
|
||||
var grouped = _.groupBy(sortedData, "date");
|
||||
var data = _.map(grouped, function(values, date) {
|
||||
var row = [values[0].total];
|
||||
_.each(values, function(value) { row.push(value.value); });
|
||||
return row;
|
||||
});
|
||||
|
||||
var initialDate = moment(sortedData[0].date).toDate(),
|
||||
container = angular.element(element)[0];
|
||||
|
||||
Cornelius.draw({
|
||||
initialDate: initialDate,
|
||||
container: container,
|
||||
cohort: data,
|
||||
title: null,
|
||||
timeInterval: 'daily',
|
||||
labels: {
|
||||
time: 'Activation Day',
|
||||
people: 'Users'
|
||||
},
|
||||
formatHeaderLabel: function (i) {
|
||||
return "Day " + (i - 1);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
})
|
||||
});
|
||||
@@ -253,7 +253,7 @@
|
||||
}
|
||||
|
||||
return QueryResult;
|
||||
}
|
||||
};
|
||||
|
||||
var Query = function ($resource, QueryResult) {
|
||||
var Query = $resource('/api/queries/:id', {id: '@id'});
|
||||
@@ -263,6 +263,7 @@
|
||||
ttl = this.ttl;
|
||||
}
|
||||
|
||||
|
||||
var queryResult = null;
|
||||
if (this.latest_query_data && ttl != 0) {
|
||||
queryResult = new QueryResult({'query_result': this.latest_query_data});
|
||||
@@ -273,17 +274,16 @@
|
||||
}
|
||||
|
||||
return queryResult;
|
||||
}
|
||||
};
|
||||
|
||||
Query.prototype.getHash = function() {
|
||||
return [this.name, this.description, this.query].join('!#');
|
||||
}
|
||||
};
|
||||
|
||||
return Query;
|
||||
}
|
||||
};
|
||||
|
||||
angular.module('redash.services', [])
|
||||
.factory('QueryResult', ['$resource', '$timeout', QueryResult])
|
||||
.factory('Query', ['$resource', 'QueryResult', Query])
|
||||
|
||||
})();
|
||||
|
||||
149
rd_ui/app/scripts/visualizations/base.js
Normal file
149
rd_ui/app/scripts/visualizations/base.js
Normal file
@@ -0,0 +1,149 @@
|
||||
(function () {
|
||||
var VisualizationProvider = function() {
|
||||
this.visualizations = {};
|
||||
this.visualizationTypes = {};
|
||||
var defaultConfig = {
|
||||
defaultOptions: {},
|
||||
skipTypes: false,
|
||||
editorTemplate: null
|
||||
}
|
||||
|
||||
this.registerVisualization = function(config) {
|
||||
var visualization = _.extend({}, defaultConfig, config);
|
||||
|
||||
// TODO: this is prone to errors; better refactor.
|
||||
if (_.isEmpty(this.visualizations)) {
|
||||
this.defaultVisualization = visualization;
|
||||
}
|
||||
|
||||
this.visualizations[config.type] = visualization;
|
||||
|
||||
if (!config.skipTypes) {
|
||||
this.visualizationTypes[config.name] = config.type;
|
||||
};
|
||||
};
|
||||
|
||||
this.getSwitchTemplate = function(property) {
|
||||
var pattern = /(<[a-zA-Z0-9-]*?)( |>)/
|
||||
|
||||
var mergedTemplates = _.reduce(this.visualizations, function(templates, visualization) {
|
||||
if (visualization[property]) {
|
||||
var ngSwitch = '$1 ng-switch-when="' + visualization.type + '" $2';
|
||||
var template = visualization[property].replace(pattern, ngSwitch);
|
||||
|
||||
return templates + "\n" + template;
|
||||
}
|
||||
|
||||
return templates;
|
||||
}, "");
|
||||
|
||||
mergedTemplates = '<div ng-switch on="visualization.type">'+ mergedTemplates + "</div>";
|
||||
|
||||
return mergedTemplates;
|
||||
}
|
||||
|
||||
this.$get = ['$resource', function($resource) {
|
||||
var Visualization = $resource('/api/visualizations/:id', {id: '@id'});
|
||||
Visualization.visualizations = this.visualizations;
|
||||
Visualization.visualizationTypes = this.visualizationTypes;
|
||||
Visualization.renderVisualizationsTemplate = this.getSwitchTemplate('renderTemplate');
|
||||
Visualization.editorTemplate = this.getSwitchTemplate('editorTemplate');
|
||||
Visualization.defaultVisualization = this.defaultVisualization;
|
||||
|
||||
return Visualization;
|
||||
}];
|
||||
};
|
||||
|
||||
var VisualizationRenderer = function(Visualization) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
visualization: '=',
|
||||
queryResult: '='
|
||||
},
|
||||
// TODO: using switch here (and in the options editor) might introduce errors and bad
|
||||
// performance wise. It's better to eventually show the correct template based on the
|
||||
// visualization type and not make the browser render all of them.
|
||||
template: Visualization.renderVisualizationsTemplate,
|
||||
replace: false
|
||||
}
|
||||
};
|
||||
|
||||
var VisualizationOptionsEditor = function(Visualization) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
template: Visualization.editorTemplate,
|
||||
replace: false
|
||||
}
|
||||
};
|
||||
|
||||
var EditVisualizationForm = function(Visualization, growl) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
templateUrl: '/views/visualizations/edit_visualization.html',
|
||||
replace: true,
|
||||
scope: {
|
||||
query: '=',
|
||||
queryResult: '=',
|
||||
visualization: '=?'
|
||||
},
|
||||
link: function (scope, element, attrs) {
|
||||
scope.visTypes = Visualization.visualizationTypes;
|
||||
|
||||
scope.newVisualization = function(q) {
|
||||
return {
|
||||
'query_id': q.id,
|
||||
'type': Visualization.defaultVisualization.type,
|
||||
'name': Visualization.defaultVisualization.name,
|
||||
'description': q.description || '',
|
||||
'options': Visualization.defaultVisualization.defaultOptions
|
||||
};
|
||||
}
|
||||
|
||||
if (!scope.visualization) {
|
||||
// create new visualization
|
||||
// wait for query to load to populate with defaults
|
||||
var unwatch = scope.$watch('query', function (q) {
|
||||
if (q && q.id) {
|
||||
unwatch();
|
||||
|
||||
scope.visualization = scope.newVisualization(q);
|
||||
}
|
||||
}, true);
|
||||
}
|
||||
|
||||
scope.$watch('visualization.type', function (type) {
|
||||
// if not edited by user, set name to match type
|
||||
if (type && scope.visualization && !scope.visForm.name.$dirty) {
|
||||
// poor man's titlecase
|
||||
scope.visualization.name = scope.visualization.type[0] + scope.visualization.type.slice(1).toLowerCase();
|
||||
}
|
||||
});
|
||||
|
||||
scope.submit = function () {
|
||||
Visualization.save(scope.visualization, function success(result) {
|
||||
growl.addSuccessMessage("Visualization saved");
|
||||
|
||||
scope.visualization = scope.newVisualization(scope.query);
|
||||
|
||||
var visIds = _.pluck(scope.query.visualizations, 'id');
|
||||
var index = visIds.indexOf(result.id);
|
||||
if (index > -1) {
|
||||
scope.query.visualizations[index] = result;
|
||||
} else {
|
||||
scope.query.visualizations.push(result);
|
||||
}
|
||||
}, function error() {
|
||||
growl.addErrorMessage("Visualization could not be saved");
|
||||
});
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
angular.module('redash.visualization', [])
|
||||
.provider('Visualization', VisualizationProvider)
|
||||
.directive('visualizationRenderer', ['Visualization', VisualizationRenderer])
|
||||
.directive('visualizationOptionsEditor', ['Visualization', VisualizationOptionsEditor])
|
||||
.directive('editVisulatizationForm', ['Visualization', 'growl', EditVisualizationForm])
|
||||
})();
|
||||
106
rd_ui/app/scripts/visualizations/chart.js
Normal file
106
rd_ui/app/scripts/visualizations/chart.js
Normal file
@@ -0,0 +1,106 @@
|
||||
(function () {
|
||||
var chartVisualization = angular.module('redash.visualization');
|
||||
|
||||
chartVisualization.config(['VisualizationProvider', function(VisualizationProvider) {
|
||||
var renderTemplate = '<chart-renderer options="visualization.options" query-result="queryResult"></chart-renderer>';
|
||||
var editTemplate = '<chart-editor></chart-editor>';
|
||||
var defaultOptions = {
|
||||
'series': {
|
||||
'type': 'column',
|
||||
'stacking': null
|
||||
}
|
||||
};
|
||||
|
||||
VisualizationProvider.registerVisualization({
|
||||
type: 'CHART',
|
||||
name: 'Chart',
|
||||
renderTemplate: renderTemplate,
|
||||
editorTemplate: editTemplate,
|
||||
defaultOptions: defaultOptions
|
||||
});
|
||||
}]);
|
||||
|
||||
chartVisualization.directive('chartRenderer', function () {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
queryResult: '=',
|
||||
options: '=?'
|
||||
},
|
||||
template: "<chart options='chartOptions' series='chartSeries' class='graph'></chart>",
|
||||
replace: false,
|
||||
controller: ['$scope', function ($scope) {
|
||||
$scope.chartSeries = [];
|
||||
$scope.chartOptions = {};
|
||||
|
||||
$scope.$watch('options', function(chartOptions) {
|
||||
if (chartOptions) {
|
||||
$scope.chartOptions = chartOptions;
|
||||
}
|
||||
});
|
||||
$scope.$watch('queryResult && queryResult.getData()', function (data) {
|
||||
if (!data || $scope.queryResult.getData() == null) {
|
||||
$scope.chartSeries.splice(0, $scope.chartSeries.length);
|
||||
} else {
|
||||
$scope.chartSeries.splice(0, $scope.chartSeries.length);
|
||||
|
||||
_.each($scope.queryResult.getChartData(), function (s) {
|
||||
$scope.chartSeries.push(_.extend(s, {'stacking': 'normal'}));
|
||||
});
|
||||
}
|
||||
});
|
||||
}]
|
||||
}
|
||||
});
|
||||
|
||||
chartVisualization.directive('chartEditor', function () {
|
||||
return {
|
||||
restrict: 'E',
|
||||
templateUrl: '/views/visualizations/chart_editor.html',
|
||||
link: function (scope, element, attrs) {
|
||||
scope.seriesTypes = {
|
||||
'Line': 'line',
|
||||
'Column': 'column',
|
||||
'Area': 'area',
|
||||
'Scatter': 'scatter',
|
||||
'Pie': 'pie'
|
||||
};
|
||||
|
||||
scope.stackingOptions = {
|
||||
"None": "none",
|
||||
"Normal": "normal",
|
||||
"Percent": "percent"
|
||||
};
|
||||
|
||||
scope.stacking = "none";
|
||||
|
||||
var chartOptionsUnwatch = null;
|
||||
|
||||
scope.$watch('visualization', function (visualization) {
|
||||
if (visualization && visualization.type == 'CHART') {
|
||||
if (scope.visualization.options.series.stacking === null) {
|
||||
scope.stacking = "none";
|
||||
} else if (scope.visualization.options.series.stacking === undefined) {
|
||||
scope.stacking = "normal";
|
||||
} else {
|
||||
scope.stacking = scope.visualization.options.series.stacking;
|
||||
}
|
||||
|
||||
chartOptionsUnwatch = scope.$watch("stacking", function (stacking) {
|
||||
if (stacking == "none") {
|
||||
scope.visualization.options.series.stacking = null;
|
||||
} else {
|
||||
scope.visualization.options.series.stacking = stacking;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
if (chartOptionsUnwatch) {
|
||||
chartOptionsUnwatch();
|
||||
chartOptionsUnwatch = null;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}());
|
||||
60
rd_ui/app/scripts/visualizations/cohort.js
Normal file
60
rd_ui/app/scripts/visualizations/cohort.js
Normal file
@@ -0,0 +1,60 @@
|
||||
(function () {
|
||||
var cohortVisualization = angular.module('redash.visualization');
|
||||
|
||||
cohortVisualization.config(['VisualizationProvider', function(VisualizationProvider) {
|
||||
VisualizationProvider.registerVisualization({
|
||||
type: 'COHORT',
|
||||
name: 'Cohort',
|
||||
renderTemplate: '<cohort-renderer options="visualization.options" query-result="queryResult"></cohort-renderer>'
|
||||
});
|
||||
}]);
|
||||
|
||||
cohortVisualization.directive('cohortRenderer', function() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
queryResult: '='
|
||||
},
|
||||
template: "",
|
||||
replace: false,
|
||||
link: function($scope, element, attrs) {
|
||||
$scope.$watch('queryResult && queryResult.getData()', function (data) {
|
||||
if (!data) {
|
||||
return;
|
||||
}
|
||||
|
||||
if ($scope.queryResult.getData() == null) {
|
||||
|
||||
} else {
|
||||
var sortedData = _.sortBy($scope.queryResult.getData(), "date");
|
||||
var grouped = _.groupBy(sortedData, "date");
|
||||
var data = _.map(grouped, function(values, date) {
|
||||
var row = [values[0].total];
|
||||
_.each(values, function(value) { row.push(value.value); });
|
||||
return row;
|
||||
});
|
||||
|
||||
var initialDate = moment(sortedData[0].date).toDate(),
|
||||
container = angular.element(element)[0];
|
||||
|
||||
Cornelius.draw({
|
||||
initialDate: initialDate,
|
||||
container: container,
|
||||
cohort: data,
|
||||
title: null,
|
||||
timeInterval: 'daily',
|
||||
labels: {
|
||||
time: 'Activation Day',
|
||||
people: 'Users'
|
||||
},
|
||||
formatHeaderLabel: function (i) {
|
||||
return "Day " + (i - 1);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
}());
|
||||
92
rd_ui/app/scripts/visualizations/table.js
Normal file
92
rd_ui/app/scripts/visualizations/table.js
Normal file
@@ -0,0 +1,92 @@
|
||||
(function () {
|
||||
var tableVisualization = angular.module('redash.visualization');
|
||||
|
||||
tableVisualization.config(['VisualizationProvider', function(VisualizationProvider) {
|
||||
VisualizationProvider.registerVisualization({
|
||||
type: 'TABLE',
|
||||
name: 'Table',
|
||||
renderTemplate: '<grid-renderer options="visualization.options" query-result="queryResult"></grid-renderer>',
|
||||
skipTypes: true
|
||||
});
|
||||
}]);
|
||||
|
||||
tableVisualization.directive('gridRenderer', function () {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
queryResult: '=',
|
||||
itemsPerPage: '='
|
||||
},
|
||||
templateUrl: "/views/grid_renderer.html",
|
||||
replace: false,
|
||||
controller: ['$scope', function ($scope) {
|
||||
$scope.gridColumns = [];
|
||||
$scope.gridData = [];
|
||||
$scope.gridConfig = {
|
||||
isPaginationEnabled: true,
|
||||
itemsByPage: $scope.itemsPerPage || 15,
|
||||
maxSize: 8
|
||||
};
|
||||
|
||||
$scope.$watch('queryResult && queryResult.getData()', function (data) {
|
||||
if (!data) {
|
||||
return;
|
||||
}
|
||||
|
||||
if ($scope.queryResult.getData() == null) {
|
||||
$scope.gridColumns = [];
|
||||
$scope.gridData = [];
|
||||
$scope.filters = [];
|
||||
} else {
|
||||
|
||||
|
||||
$scope.filters = $scope.queryResult.getFilters();
|
||||
|
||||
var gridData = _.map($scope.queryResult.getData(), function (row) {
|
||||
var newRow = {};
|
||||
_.each(row, function (val, key) {
|
||||
newRow[$scope.queryResult.getColumnCleanName(key)] = val;
|
||||
})
|
||||
return newRow;
|
||||
});
|
||||
|
||||
$scope.gridColumns = _.map($scope.queryResult.getColumnCleanNames(), function (col, i) {
|
||||
var columnDefinition = {
|
||||
'label': $scope.queryResult.getColumnFriendlyNames()[i],
|
||||
'map': col
|
||||
};
|
||||
|
||||
if (gridData.length > 0) {
|
||||
var exampleData = gridData[0][col];
|
||||
if (angular.isNumber(exampleData)) {
|
||||
columnDefinition['formatFunction'] = 'number';
|
||||
columnDefinition['formatParameter'] = 2;
|
||||
} else if (moment.isMoment(exampleData)) {
|
||||
columnDefinition['formatFunction'] = function(value) {
|
||||
return value.format("DD/MM/YY HH:mm");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return columnDefinition;
|
||||
});
|
||||
|
||||
$scope.gridData = _.clone(gridData);
|
||||
|
||||
$scope.$watch('filters', function (filters) {
|
||||
$scope.gridData = _.filter(gridData, function (row) {
|
||||
return _.reduce(filters, function (memo, filter) {
|
||||
if (filter.current == 'All') {
|
||||
return memo && true;
|
||||
}
|
||||
|
||||
return (memo && row[$scope.queryResult.getColumnCleanName(filter.name)] == filter.current);
|
||||
}, true);
|
||||
});
|
||||
}, true);
|
||||
}
|
||||
});
|
||||
}]
|
||||
}
|
||||
})
|
||||
}());
|
||||
37
rd_ui/app/styles/login.css
Normal file
37
rd_ui/app/styles/login.css
Normal file
@@ -0,0 +1,37 @@
|
||||
.main {
|
||||
max-width: 320px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.login-or {
|
||||
position: relative;
|
||||
font-size: 18px;
|
||||
color: #aaa;
|
||||
margin-top: 10px;
|
||||
margin-bottom: 10px;
|
||||
padding-top: 10px;
|
||||
padding-bottom: 10px;
|
||||
}
|
||||
|
||||
.span-or {
|
||||
display: block;
|
||||
position: absolute;
|
||||
left: 50%;
|
||||
top: -2px;
|
||||
margin-left: -25px;
|
||||
background-color: #fff;
|
||||
width: 50px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.hr-or {
|
||||
background-color: #cdcdcd;
|
||||
height: 1px;
|
||||
margin-top: 0px !important;
|
||||
margin-bottom: 0px !important;
|
||||
}
|
||||
|
||||
/*h3 {*/
|
||||
/*text-align: center;*/
|
||||
/*line-height: 300%;*/
|
||||
/*}*/
|
||||
@@ -2,6 +2,10 @@ body {
|
||||
padding-top: 70px;
|
||||
}
|
||||
|
||||
a.link {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
a.page-title {
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
@@ -56,6 +60,10 @@ a.navbar-brand {
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
.panel-heading > a {
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
/* angular-growl */
|
||||
.growl {
|
||||
position: fixed;
|
||||
@@ -193,4 +201,20 @@ to add those CSS styles here. */
|
||||
-webkit-border-radius: 6px 0 6px 6px;
|
||||
-moz-border-radius: 6px 0 6px 6px;
|
||||
border-radius: 6px 0 6px 6px;
|
||||
}
|
||||
|
||||
.rd-tab .remove {
|
||||
cursor: pointer;
|
||||
color: #A09797;
|
||||
padding: 0 3px 1px 4px;
|
||||
font-size: 11px;
|
||||
}
|
||||
.rd-tab .remove:hover {
|
||||
color: white;
|
||||
background-color: #FF8080;
|
||||
border-radius: 50%;
|
||||
}
|
||||
|
||||
visualization-renderer > div {
|
||||
overflow: scroll;
|
||||
}
|
||||
@@ -4,11 +4,13 @@
|
||||
<div class="container">
|
||||
<h2 id="dashboard_title">
|
||||
{{dashboard.name}}
|
||||
|
||||
<button type="button" class="btn btn-default btn-xs" ng-class="{active: refreshEnabled}" tooltip="Enable/Disable Auto Refresh" ng-click="triggerRefresh()"><span class="glyphicon glyphicon-refresh"></span></button>
|
||||
<span ng-show="dashboard.canEdit()">
|
||||
<button type="button" class="btn btn-default btn-xs" data-toggle="modal" href="#edit_dashboard_dialog" tooltip="Edit Dashboard (Name/Layout)"><span
|
||||
class="glyphicon glyphicon-cog"></span></button>
|
||||
<button type="button" class="btn btn-default btn-xs" data-toggle="modal"
|
||||
href="#add_query_dialog" tooltip="Add Widget (Chart/Table)"><span class="glyphicon glyphicon-import"></span>
|
||||
href="#add_query_dialog" tooltip="Add Widget (Chart/Table)"><span class="glyphicon glyphicon-plus"></span>
|
||||
</button>
|
||||
</span>
|
||||
</h2>
|
||||
@@ -21,7 +23,7 @@
|
||||
|
||||
<div class="panel panel-default">
|
||||
<div class="panel-heading">
|
||||
<h3 class="panel-title" style="cursor: pointer;" ng-click="open(query)">
|
||||
<h3 class="panel-title" style="cursor: pointer;" ng-click="open(query, widget.visualization)">
|
||||
<p>
|
||||
<span ng-bind="query.name"></span>
|
||||
</p>
|
||||
@@ -29,18 +31,15 @@
|
||||
</h3>
|
||||
</div>
|
||||
|
||||
<div ng-switch on="widget.type" class="panel-body">
|
||||
<chart-renderer ng-switch-when="chart" query-result="queryResult" options="widget.options"></chart-renderer>
|
||||
<grid-renderer ng-switch-when="grid" query-result="queryResult"></grid-renderer>
|
||||
<cohort-renderer ng-switch-when="cohort" query-result="queryResult"></cohort-renderer>
|
||||
</div>
|
||||
<visualization-renderer visualization="widget.visualization" query-result="queryResult"></visualization-renderer class="panel-body">
|
||||
|
||||
<div class="panel-footer">
|
||||
<span class="label label-default"
|
||||
tooltip="next update {{nextUpdateTime}} (query runtime: {{queryResult.getRuntime() | durationHumanize}})"
|
||||
tooltip-placement="bottom">Updated: <span am-time-ago="queryResult.getUpdatedAt()"></span></span>
|
||||
|
||||
<span class="pull-right">
|
||||
<a class="btn btn-default btn-xs" ng-href="/queries/{{query.id}}"><span class="glyphicon glyphicon-link"></span></a>
|
||||
<a class="btn btn-default btn-xs" ng-href="/queries/{{query.id}}#{{widget.visualization.id}}"><span class="glyphicon glyphicon-link"></span></a>
|
||||
<button type="button" class="btn btn-default btn-xs" ng-show="dashboard.canEdit()" ng-click="deleteWidget()" title="Remove Widget"><span class="glyphicon glyphicon-trash"></span></button>
|
||||
</span>
|
||||
|
||||
|
||||
@@ -10,17 +10,9 @@
|
||||
<input type="text" class="form-control" placeholder="Dashboard Name" ng-model="dashboard.name">
|
||||
</p>
|
||||
|
||||
<p ng-show="layout!='null'">
|
||||
<div class="gridster">
|
||||
<ul>
|
||||
<li ng-repeat="widget in layout" data-row="{{widget.row}}" data-col="{{widget.col}}"
|
||||
data-sizey="{{widget.ySize}}" data-sizex="{{widget.xSize}}" data-widget-id="{{widget.id}}"
|
||||
class="widget panel panel-default">
|
||||
<div class="panel-heading">{{widget.name}}</div>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</p>
|
||||
<div class="gridster">
|
||||
<ul></ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-default" ng-disabled="saveInProgress" data-dismiss="modal">Close</button>
|
||||
|
||||
@@ -7,15 +7,27 @@
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<p>
|
||||
<input type="text" class="form-control" placeholder="Query Id" ng-model="queryId">
|
||||
<form class="form-inline" role="form" ng-submit="loadVisualizations()">
|
||||
<div class="form-group">
|
||||
<input class="form-control" placeholder="Query Id" ng-model="queryId">
|
||||
</div>
|
||||
<button type="submit" class="btn btn-primary" ng-disabled="!queryId">
|
||||
<span class="glyphicon glyphicon-refresh"></span> Load
|
||||
</button>
|
||||
</form>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<select class="form-control" ng-model="widgetType" ng-options="c.value as c.name for c in widgetTypes"></select>
|
||||
</p>
|
||||
<p>
|
||||
<select class="form-control" ng-model="widgetSize" ng-options="c.value as c.name for c in widgetSizes"></select>
|
||||
</p>
|
||||
<div ng-show="query">
|
||||
<div class="form-group">
|
||||
<label for="">Choose Visualation</label>
|
||||
<select ng-model="selectedVis" ng-options="vis as vis.name group by vis.type for vis in query.visualizations" class="form-control"></select>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="">Widget Size</label>
|
||||
<select class="form-control" ng-model="widgetSize" ng-options="c.value as c.name for c in widgetSizes"></select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-default" ng-disabled="saveInProgress" data-dismiss="modal">Close</button>
|
||||
|
||||
@@ -34,7 +34,7 @@
|
||||
<span ng-show="queryResult.getRuntime()>=0">Query runtime: {{queryResult.getRuntime() | durationHumanize}} | </span>
|
||||
<span ng-show="queryResult.query_result.retrieved_at">Last update time: <span am-time-ago="queryResult.query_result.retrieved_at"></span> | </span>
|
||||
<span ng-show="queryResult.getStatus() == 'done'">Rows: {{queryResult.getData().length}} | </span>
|
||||
Created by: {{query.user}}
|
||||
Created by: {{query.user.name}}
|
||||
<div class="pull-right">Refresh query: <select ng-model="query.ttl" ng-options="c.value as c.name for c in refreshOptions"></select><br></div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -51,22 +51,52 @@
|
||||
</div>
|
||||
|
||||
<div class="row" ng-show="queryResult.getStatus() == 'done'">
|
||||
<rd-tabs tabs-collection='tabs' selected-tab='selectedTab'></rd-tabs>
|
||||
<ul class="nav nav-tabs">
|
||||
<rd-tab id="table" name="Table"></rd-tab>
|
||||
<rd-tab id="pivot" name="Pivot Table"></rd-tab>
|
||||
<!-- hide the table visualization -->
|
||||
<rd-tab id="{{vis.id}}" name="{{vis.name}}" ng-hide="vis.type=='TABLE'" ng-repeat="vis in query.visualizations">
|
||||
<span class="remove" ng-click="deleteVisualization($event, vis)" ng-show="currentUser.canEdit(query)"> ×</span>
|
||||
</rd-tab>
|
||||
<rd-tab id="add" name="+ New" removeable="true" ng-show="currentUser.canEdit(query)"></rd-tab>
|
||||
</ul>
|
||||
|
||||
<div ng-show="selectedTab.key == 'chart'" class="col-lg-12">
|
||||
<chart-renderer query-result="queryResult"></chart-renderer>
|
||||
</div>
|
||||
|
||||
<div class="col-lg-12" ng-show="selectedTab.key == 'table'">
|
||||
<div class="col-lg-12" ng-show="selectedTab == 'table'">
|
||||
<grid-renderer query-result="queryResult" items-per-page="50"></grid-renderer>
|
||||
</div>
|
||||
|
||||
<div class="col-lg-12" ng-show="selectedTab.key == 'pivot'">
|
||||
<div class="col-lg-12" ng-show="selectedTab == 'pivot'">
|
||||
<pivot-table-renderer query-result="queryResult"></pivot-table-renderer>
|
||||
</div>
|
||||
|
||||
<div class="col-lg-12" ng-show="selectedTab.key == 'cohort'">
|
||||
<cohort-renderer query-result="queryResult"></cohort-renderer>
|
||||
<div class="col-lg-12" ng-show="selectedTab == vis.id" ng-repeat="vis in query.visualizations">
|
||||
<div class="row" ng-show="currentUser.canEdit(query)">
|
||||
<p>
|
||||
<div class="col-lg-12">
|
||||
<edit-visulatization-form visualization="vis" query="query" query-result="queryResult"></edit-visulatization-form>
|
||||
</div>
|
||||
</p>
|
||||
</div>
|
||||
<div class="row">
|
||||
<p>
|
||||
<div class="col-lg-12">
|
||||
<visualization-renderer visualization="vis" query-result="queryResult"></visualization-renderer>
|
||||
</div>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="col-lg-12" ng-show="selectedTab == 'add'">
|
||||
<div class="row">
|
||||
<p>
|
||||
<div class="col-lg-6">
|
||||
<edit-visulatization-form visualization="newVisualization" query="query"></edit-visulatization-form>
|
||||
</div>
|
||||
<div class="col-lg-6">
|
||||
<visualization-renderer visualization="newVisualization" query-result="queryResult"></visualization-renderer>
|
||||
</div>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
7
rd_ui/app/views/visualizations/chart_editor.html
Normal file
7
rd_ui/app/views/visualizations/chart_editor.html
Normal file
@@ -0,0 +1,7 @@
|
||||
<div class="form-group">
|
||||
<label class="control-label">Chart Type</label>
|
||||
<select required ng-model="visualization.options.series.type" ng-options="value as key for (key, value) in seriesTypes" class="form-control"></select>
|
||||
|
||||
<label class="control-label">Stacking</label>
|
||||
<select required ng-model="stacking" ng-options="value as key for (key, value) in stackingOptions" class="form-control"></select>
|
||||
</div>
|
||||
18
rd_ui/app/views/visualizations/edit_visualization.html
Normal file
18
rd_ui/app/views/visualizations/edit_visualization.html
Normal file
@@ -0,0 +1,18 @@
|
||||
<form role="form" name="visForm" ng-submit="submit()">
|
||||
<div class="form-group">
|
||||
<label class="control-label">Name</label>
|
||||
<input name="name" type="text" class="form-control" ng-model="visualization.name" placeholder="{{visualization.type | capitalize}}">
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label class="control-label">Visualization Type</label>
|
||||
<select required ng-model="visualization.type" ng-options="value as key for (key, value) in visTypes" class="form-control" ng-change="typeChanged()"></select>
|
||||
</div>
|
||||
|
||||
<visualization-options-editor></visualization-options-editor>
|
||||
|
||||
<div class="form-group">
|
||||
<button type="submit" class="btn btn-primary">Save</button>
|
||||
</div>
|
||||
|
||||
</form>
|
||||
47
redash/__init__.py
Normal file
47
redash/__init__.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import json
|
||||
import urlparse
|
||||
from flask import Flask, make_response
|
||||
from flask.ext.restful import Api
|
||||
from flask_peewee.db import Database
|
||||
|
||||
import redis
|
||||
from statsd import StatsClient
|
||||
from redash import settings, utils
|
||||
|
||||
__version__ = '0.3.5'
|
||||
|
||||
app = Flask(__name__,
|
||||
template_folder=settings.STATIC_ASSETS_PATH,
|
||||
static_folder=settings.STATIC_ASSETS_PATH,
|
||||
static_path='/static')
|
||||
|
||||
api = Api(app)
|
||||
|
||||
# configure our database
|
||||
settings.DATABASE_CONFIG.update({'threadlocals': True})
|
||||
app.config['DATABASE'] = settings.DATABASE_CONFIG
|
||||
db = Database(app)
|
||||
|
||||
from redash.authentication import setup_authentication
|
||||
auth = setup_authentication(app)
|
||||
|
||||
@api.representation('application/json')
|
||||
def json_representation(data, code, headers=None):
|
||||
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
|
||||
resp.headers.extend(headers or {})
|
||||
return resp
|
||||
|
||||
|
||||
redis_url = urlparse.urlparse(settings.REDIS_URL)
|
||||
if redis_url.path:
|
||||
redis_db = redis_url.path[1]
|
||||
else:
|
||||
redis_db = 0
|
||||
|
||||
redis_connection = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=redis_db, password=redis_url.password)
|
||||
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
|
||||
|
||||
from redash import data
|
||||
data_manager = data.Manager(redis_connection, db, statsd_client)
|
||||
|
||||
from redash import controllers
|
||||
104
redash/authentication.py
Normal file
104
redash/authentication.py
Normal file
@@ -0,0 +1,104 @@
|
||||
import functools
|
||||
import hashlib
|
||||
import hmac
|
||||
from flask import current_app, request, make_response, g, redirect, url_for
|
||||
from flask.ext.googleauth import GoogleAuth, login
|
||||
from flask.ext.login import LoginManager, login_user, current_user
|
||||
import time
|
||||
import logging
|
||||
from werkzeug.contrib.fixers import ProxyFix
|
||||
from redash import models, settings
|
||||
|
||||
login_manager = LoginManager()
|
||||
logger = logging.getLogger('authentication')
|
||||
|
||||
def sign(key, path, expires):
|
||||
if not key:
|
||||
return None
|
||||
|
||||
h = hmac.new(str(key), msg=path, digestmod=hashlib.sha1)
|
||||
h.update(str(expires))
|
||||
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
class HMACAuthentication(object):
|
||||
@staticmethod
|
||||
def api_key_authentication():
|
||||
signature = request.args.get('signature')
|
||||
expires = float(request.args.get('expires') or 0)
|
||||
query_id = request.view_args.get('query_id', None)
|
||||
|
||||
# TODO: 3600 should be a setting
|
||||
if signature and query_id and time.time() < expires <= time.time() + 3600:
|
||||
query = models.Query.get(models.Query.id == query_id)
|
||||
calculated_signature = sign(query.api_key, request.path, expires)
|
||||
|
||||
if query.api_key and signature == calculated_signature:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def is_user_logged_in():
|
||||
return current_user.is_authenticated()
|
||||
|
||||
def required(self, fn):
|
||||
@functools.wraps(fn)
|
||||
def decorated(*args, **kwargs):
|
||||
if self.is_user_logged_in():
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
if self.api_key_authentication():
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
return make_response(redirect(url_for("login", next=request.url)))
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
def validate_email(email):
|
||||
if not settings.GOOGLE_APPS_DOMAIN:
|
||||
return True
|
||||
|
||||
return email in settings.ALLOWED_EXTERNAL_USERS or email.endswith("@%s" % settings.GOOGLE_APPS_DOMAIN)
|
||||
|
||||
|
||||
def create_and_login_user(app, user):
|
||||
if not validate_email(user.email):
|
||||
return
|
||||
|
||||
try:
|
||||
user_object = models.User.get(models.User.email == user.email)
|
||||
if user_object.name != user.name:
|
||||
logger.debug("Updating user name (%r -> %r)", user_object.name, user.name)
|
||||
user_object.name = user.name
|
||||
user_object.save()
|
||||
except models.User.DoesNotExist:
|
||||
logger.debug("Creating user object (%r)", user.name)
|
||||
user_object = models.User.create(name=user.name, email=user.email,
|
||||
is_admin=(user.email in settings.ADMINS))
|
||||
|
||||
login_user(user_object, remember=True)
|
||||
|
||||
login.connect(create_and_login_user)
|
||||
|
||||
|
||||
@login_manager.user_loader
|
||||
def load_user(user_id):
|
||||
return models.User.select().where(models.User.id == user_id).first()
|
||||
|
||||
|
||||
def setup_authentication(app):
|
||||
if settings.GOOGLE_OPENID_ENABLED:
|
||||
openid_auth = GoogleAuth(app, url_prefix="/google_auth")
|
||||
# If we don't have a list of external users, we can use Google's federated login, which limits
|
||||
# the domain with which you can sign in.
|
||||
if not settings.ALLOWED_EXTERNAL_USERS and settings.GOOGLE_APPS_DOMAIN:
|
||||
openid_auth._OPENID_ENDPOINT = "https://www.google.com/a/%s/o8/ud?be=o8" % settings.GOOGLE_APPS_DOMAIN
|
||||
|
||||
login_manager.init_app(app)
|
||||
app.wsgi_app = ProxyFix(app.wsgi_app)
|
||||
app.secret_key = settings.COOKIE_SECRET
|
||||
|
||||
return HMACAuthentication()
|
||||
375
redash/controllers.py
Normal file
375
redash/controllers.py
Normal file
@@ -0,0 +1,375 @@
|
||||
"""
|
||||
Flask-restful based API implementation for re:dash.
|
||||
|
||||
Currently the Flask server is used to serve the static assets (and the Angular.js app),
|
||||
but this is only due to configuration issues and temporary.
|
||||
"""
|
||||
import csv
|
||||
import hashlib
|
||||
import json
|
||||
import numbers
|
||||
import cStringIO
|
||||
import datetime
|
||||
|
||||
from flask import render_template, send_from_directory, make_response, request, jsonify, redirect, \
|
||||
session, url_for
|
||||
from flask.ext.restful import Resource, abort
|
||||
from flask_login import current_user, login_user, logout_user
|
||||
|
||||
import sqlparse
|
||||
from redash import settings, utils
|
||||
from redash import data
|
||||
|
||||
from redash import app, auth, api, redis_connection, data_manager
|
||||
from redash import models
|
||||
|
||||
|
||||
@app.route('/ping', methods=['GET'])
|
||||
def ping():
|
||||
return 'PONG.'
|
||||
|
||||
|
||||
@app.route('/admin/<anything>')
|
||||
@app.route('/dashboard/<anything>')
|
||||
@app.route('/queries')
|
||||
@app.route('/queries/<anything>')
|
||||
@app.route('/')
|
||||
@auth.required
|
||||
def index(anything=None):
|
||||
email_md5 = hashlib.md5(current_user.email.lower()).hexdigest()
|
||||
gravatar_url = "https://www.gravatar.com/avatar/%s?s=40" % email_md5
|
||||
|
||||
user = {
|
||||
'gravatar_url': gravatar_url,
|
||||
'is_admin': current_user.is_admin,
|
||||
'id': current_user.id,
|
||||
'name': current_user.name,
|
||||
'email': current_user.email
|
||||
}
|
||||
|
||||
return render_template("index.html", user=json.dumps(user), analytics=settings.ANALYTICS)
|
||||
|
||||
|
||||
@app.route('/login', methods=['GET', 'POST'])
|
||||
def login():
|
||||
if current_user.is_authenticated():
|
||||
return redirect(request.args.get('next') or '/')
|
||||
|
||||
if not settings.PASSWORD_LOGIN_ENABLED:
|
||||
blueprint = app.extensions['googleauth'].blueprint
|
||||
return redirect(url_for("%s.login" % blueprint.name, next=request.args.get('next')))
|
||||
|
||||
if request.method == 'POST':
|
||||
user = models.User.select().where(models.User.email == request.form['username']).first()
|
||||
if user and user.verify_password(request.form['password']):
|
||||
remember = ('remember' in request.form)
|
||||
login_user(user, remember=remember)
|
||||
return redirect(request.args.get('next') or '/')
|
||||
|
||||
return render_template("login.html",
|
||||
analytics=settings.ANALYTICS,
|
||||
next=request.args.get('next'),
|
||||
username=request.form.get('username', ''),
|
||||
show_google_openid=settings.GOOGLE_OPENID_ENABLED)
|
||||
|
||||
|
||||
@app.route('/logout')
|
||||
def logout():
|
||||
logout_user()
|
||||
session.pop('openid', None)
|
||||
|
||||
return redirect('/login')
|
||||
|
||||
@app.route('/status.json')
|
||||
@auth.required
|
||||
def status_api():
|
||||
status = {}
|
||||
info = redis_connection.info()
|
||||
status['redis_used_memory'] = info['used_memory_human']
|
||||
|
||||
status['queries_count'] = models.Query.select().count()
|
||||
status['query_results_count'] = models.QueryResult.select().count()
|
||||
status['dashboards_count'] = models.Dashboard.select().count()
|
||||
status['widgets_count'] = models.Widget.select().count()
|
||||
|
||||
status['workers'] = [redis_connection.hgetall(w)
|
||||
for w in redis_connection.smembers('workers')]
|
||||
|
||||
manager_status = redis_connection.hgetall('manager:status')
|
||||
status['manager'] = manager_status
|
||||
status['manager']['queue_size'] = redis_connection.zcard('jobs')
|
||||
|
||||
return jsonify(status)
|
||||
|
||||
|
||||
@app.route('/api/queries/format', methods=['POST'])
|
||||
@auth.required
|
||||
def format_sql_query():
|
||||
arguments = request.get_json(force=True)
|
||||
query = arguments.get("query", "")
|
||||
|
||||
return sqlparse.format(query, reindent=True, keyword_case='upper')
|
||||
|
||||
|
||||
class BaseResource(Resource):
|
||||
decorators = [auth.required]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(BaseResource, self).__init__(*args, **kwargs)
|
||||
self._user = None
|
||||
|
||||
@property
|
||||
def current_user(self):
|
||||
return current_user._get_current_object()
|
||||
|
||||
|
||||
class DashboardListAPI(BaseResource):
|
||||
def get(self):
|
||||
dashboards = [d.to_dict() for d in
|
||||
models.Dashboard.select().where(models.Dashboard.is_archived==False)]
|
||||
|
||||
return dashboards
|
||||
|
||||
def post(self):
|
||||
dashboard_properties = request.get_json(force=True)
|
||||
dashboard = models.Dashboard(name=dashboard_properties['name'],
|
||||
user=self.current_user,
|
||||
layout='[]')
|
||||
dashboard.save()
|
||||
return dashboard.to_dict()
|
||||
|
||||
|
||||
class DashboardAPI(BaseResource):
|
||||
def get(self, dashboard_slug=None):
|
||||
try:
|
||||
dashboard = models.Dashboard.get_by_slug(dashboard_slug)
|
||||
except models.Dashboard.DoesNotExist:
|
||||
abort(404)
|
||||
|
||||
return dashboard.to_dict(with_widgets=True)
|
||||
|
||||
def post(self, dashboard_slug):
|
||||
dashboard_properties = request.get_json(force=True)
|
||||
# TODO: either convert all requests to use slugs or ids
|
||||
dashboard = models.Dashboard.get_by_id(dashboard_slug)
|
||||
dashboard.layout = dashboard_properties['layout']
|
||||
dashboard.name = dashboard_properties['name']
|
||||
dashboard.save()
|
||||
|
||||
return dashboard.to_dict(with_widgets=True)
|
||||
|
||||
def delete(self, dashboard_slug):
|
||||
dashboard = models.Dashboard.get_by_slug(dashboard_slug)
|
||||
dashboard.is_archived = True
|
||||
dashboard.save()
|
||||
|
||||
api.add_resource(DashboardListAPI, '/api/dashboards', endpoint='dashboards')
|
||||
api.add_resource(DashboardAPI, '/api/dashboards/<dashboard_slug>', endpoint='dashboard')
|
||||
|
||||
|
||||
class WidgetListAPI(BaseResource):
|
||||
def post(self):
|
||||
widget_properties = request.get_json(force=True)
|
||||
widget_properties['options'] = json.dumps(widget_properties['options'])
|
||||
widget_properties.pop('id', None)
|
||||
widget_properties['dashboard'] = widget_properties.pop('dashboard_id')
|
||||
widget_properties['visualization'] = widget_properties.pop('visualization_id')
|
||||
widget = models.Widget(**widget_properties)
|
||||
widget.save()
|
||||
|
||||
layout = json.loads(widget.dashboard.layout)
|
||||
new_row = True
|
||||
|
||||
if len(layout) == 0 or widget.width == 2:
|
||||
layout.append([widget.id])
|
||||
elif len(layout[-1]) == 1:
|
||||
neighbour_widget = models.Widget.get(models.Widget.id == layout[-1][0])
|
||||
if neighbour_widget.width == 1:
|
||||
layout[-1].append(widget.id)
|
||||
new_row = False
|
||||
else:
|
||||
layout.append([widget.id])
|
||||
else:
|
||||
layout.append([widget.id])
|
||||
|
||||
widget.dashboard.layout = json.dumps(layout)
|
||||
widget.dashboard.save()
|
||||
|
||||
return {'widget': widget.to_dict(), 'layout': layout, 'new_row': new_row}
|
||||
|
||||
|
||||
class WidgetAPI(BaseResource):
|
||||
def delete(self, widget_id):
|
||||
widget = models.Widget.get(models.Widget.id == widget_id)
|
||||
# TODO: reposition existing ones
|
||||
layout = json.loads(widget.dashboard.layout)
|
||||
layout = map(lambda row: filter(lambda w: w != widget_id, row), layout)
|
||||
layout = filter(lambda row: len(row) > 0, layout)
|
||||
widget.dashboard.layout = json.dumps(layout)
|
||||
widget.dashboard.save()
|
||||
|
||||
widget.delete_instance()
|
||||
|
||||
api.add_resource(WidgetListAPI, '/api/widgets', endpoint='widgets')
|
||||
api.add_resource(WidgetAPI, '/api/widgets/<int:widget_id>', endpoint='widget')
|
||||
|
||||
|
||||
class QueryListAPI(BaseResource):
|
||||
def post(self):
|
||||
query_def = request.get_json(force=True)
|
||||
# id, created_at, api_key
|
||||
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data']:
|
||||
query_def.pop(field, None)
|
||||
|
||||
query_def['user'] = self.current_user
|
||||
query = models.Query(**query_def)
|
||||
query.save()
|
||||
|
||||
query.create_default_visualizations()
|
||||
|
||||
return query.to_dict(with_result=False)
|
||||
|
||||
def get(self):
|
||||
return [q.to_dict(with_result=False, with_stats=True) for q in models.Query.all_queries()]
|
||||
|
||||
|
||||
class QueryAPI(BaseResource):
|
||||
def post(self, query_id):
|
||||
query_def = request.get_json(force=True)
|
||||
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user']:
|
||||
query_def.pop(field, None)
|
||||
|
||||
if 'latest_query_data_id' in query_def:
|
||||
query_def['latest_query_data'] = query_def.pop('latest_query_data_id')
|
||||
|
||||
models.Query.update_instance(query_id, **query_def)
|
||||
|
||||
query = models.Query.get_by_id(query_id)
|
||||
|
||||
return query.to_dict(with_result=False, with_visualizations=True)
|
||||
|
||||
def get(self, query_id):
|
||||
q = models.Query.get(models.Query.id == query_id)
|
||||
if q:
|
||||
return q.to_dict(with_visualizations=True)
|
||||
else:
|
||||
abort(404, message="Query not found.")
|
||||
|
||||
api.add_resource(QueryListAPI, '/api/queries', endpoint='queries')
|
||||
api.add_resource(QueryAPI, '/api/queries/<query_id>', endpoint='query')
|
||||
|
||||
|
||||
class VisualizationListAPI(BaseResource):
|
||||
def post(self):
|
||||
kwargs = request.get_json(force=True)
|
||||
kwargs['options'] = json.dumps(kwargs['options'])
|
||||
kwargs['query'] = kwargs.pop('query_id')
|
||||
|
||||
vis = models.Visualization(**kwargs)
|
||||
vis.save()
|
||||
|
||||
return vis.to_dict(with_query=False)
|
||||
|
||||
|
||||
class VisualizationAPI(BaseResource):
|
||||
def post(self, visualization_id):
|
||||
kwargs = request.get_json(force=True)
|
||||
if 'options' in kwargs:
|
||||
kwargs['options'] = json.dumps(kwargs['options'])
|
||||
kwargs.pop('id', None)
|
||||
|
||||
update = models.Visualization.update(**kwargs).where(models.Visualization.id == visualization_id)
|
||||
update.execute()
|
||||
|
||||
vis = models.Visualization.get_by_id(visualization_id)
|
||||
|
||||
return vis.to_dict(with_query=False)
|
||||
|
||||
def delete(self, visualization_id):
|
||||
vis = models.Visualization.get(models.Visualization.id == visualization_id)
|
||||
vis.delete_instance()
|
||||
|
||||
api.add_resource(VisualizationListAPI, '/api/visualizations', endpoint='visualizations')
|
||||
api.add_resource(VisualizationAPI, '/api/visualizations/<visualization_id>', endpoint='visualization')
|
||||
|
||||
|
||||
class QueryResultListAPI(BaseResource):
|
||||
def post(self):
|
||||
params = request.json
|
||||
|
||||
if params['ttl'] == 0:
|
||||
query_result = None
|
||||
else:
|
||||
query_result = data_manager.get_query_result(params['query'], int(params['ttl']))
|
||||
|
||||
if query_result:
|
||||
return {'query_result': query_result.to_dict(parse_data=True)}
|
||||
else:
|
||||
job = data_manager.add_job(params['query'], data.Job.HIGH_PRIORITY)
|
||||
return {'job': job.to_dict()}
|
||||
|
||||
|
||||
class QueryResultAPI(BaseResource):
|
||||
def get(self, query_result_id):
|
||||
query_result = data_manager.get_query_result_by_id(query_result_id)
|
||||
if query_result:
|
||||
return {'query_result': query_result.to_dict(parse_data=True)}
|
||||
else:
|
||||
abort(404)
|
||||
|
||||
|
||||
class CsvQueryResultsAPI(BaseResource):
|
||||
def get(self, query_id, query_result_id=None):
|
||||
if not query_result_id:
|
||||
query = models.Query.get(models.Query.id == query_id)
|
||||
if query:
|
||||
query_result_id = query._data['latest_query_data']
|
||||
|
||||
query_result = query_result_id and data_manager.get_query_result_by_id(query_result_id)
|
||||
if query_result:
|
||||
s = cStringIO.StringIO()
|
||||
|
||||
query_data = json.loads(query_result.data)
|
||||
writer = csv.DictWriter(s, fieldnames=[col['name'] for col in query_data['columns']])
|
||||
writer.writer = utils.UnicodeWriter(s)
|
||||
writer.writeheader()
|
||||
for row in query_data['rows']:
|
||||
for k, v in row.iteritems():
|
||||
if isinstance(v, numbers.Number) and (v > 1000 * 1000 * 1000 * 100):
|
||||
row[k] = datetime.datetime.fromtimestamp(v/1000.0)
|
||||
|
||||
writer.writerow(row)
|
||||
|
||||
return make_response(s.getvalue(), 200, {'Content-Type': "text/csv; charset=UTF-8"})
|
||||
else:
|
||||
abort(404)
|
||||
|
||||
api.add_resource(CsvQueryResultsAPI, '/api/queries/<query_id>/results/<query_result_id>.csv',
|
||||
'/api/queries/<query_id>/results.csv',
|
||||
endpoint='csv_query_results')
|
||||
api.add_resource(QueryResultListAPI, '/api/query_results', endpoint='query_results')
|
||||
api.add_resource(QueryResultAPI, '/api/query_results/<query_result_id>', endpoint='query_result')
|
||||
|
||||
|
||||
class JobAPI(BaseResource):
|
||||
def get(self, job_id):
|
||||
# TODO: if finished, include the query result
|
||||
job = data.Job.load(data_manager.redis_connection, job_id)
|
||||
return {'job': job.to_dict()}
|
||||
|
||||
def delete(self, job_id):
|
||||
job = data.Job.load(data_manager.redis_connection, job_id)
|
||||
job.cancel()
|
||||
|
||||
api.add_resource(JobAPI, '/api/jobs/<job_id>', endpoint='job')
|
||||
|
||||
@app.route('/<path:filename>')
|
||||
def send_static(filename):
|
||||
return send_from_directory(settings.STATIC_ASSETS_PATH, filename)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(debug=True)
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,2 @@
|
||||
from manager import Manager
|
||||
from worker import Job
|
||||
import models
|
||||
import utils
|
||||
@@ -1,18 +1,16 @@
|
||||
"""
|
||||
Data manager. Used to manage and coordinate execution of queries.
|
||||
"""
|
||||
import collections
|
||||
from contextlib import contextmanager
|
||||
import collections
|
||||
import json
|
||||
import time
|
||||
import logging
|
||||
import psycopg2
|
||||
import qr
|
||||
import redis
|
||||
import time
|
||||
import query_runner
|
||||
import worker
|
||||
from utils import gen_query_hash
|
||||
|
||||
from redash.data import worker
|
||||
from redash.utils import gen_query_hash
|
||||
|
||||
class QueryResult(collections.namedtuple('QueryData', 'id query data runtime retrieved_at query_hash')):
|
||||
def to_dict(self, parse_data=False):
|
||||
@@ -25,10 +23,11 @@ class QueryResult(collections.namedtuple('QueryData', 'id query data runtime ret
|
||||
|
||||
|
||||
class Manager(object):
|
||||
def __init__(self, redis_connection, db_connection_string, db_max_connections):
|
||||
def __init__(self, redis_connection, db, statsd_client):
|
||||
self.statsd_client = statsd_client
|
||||
self.redis_connection = redis_connection
|
||||
self.db = db
|
||||
self.workers = []
|
||||
self.db_connection_string = db_connection_string
|
||||
self.queue = qr.PriorityQueue("jobs", **self.redis_connection.connection_pool.connection_kwargs)
|
||||
self.max_retries = 5
|
||||
self.status = {
|
||||
@@ -99,22 +98,39 @@ class Manager(object):
|
||||
|
||||
return job
|
||||
|
||||
def report_status(self):
|
||||
workers = [self.redis_connection.hgetall(w)
|
||||
for w in self.redis_connection.smembers('workers')]
|
||||
|
||||
for w in workers:
|
||||
self.statsd_client.gauge('worker_{}.seconds_since_update'.format(w['id']),
|
||||
time.time() - float(w['updated_at']))
|
||||
self.statsd_client.gauge('worker_{}.jobs_received'.format(w['id']), int(w['jobs_count']))
|
||||
self.statsd_client.gauge('worker_{}.jobs_done'.format(w['id']), int(w['done_jobs_count']))
|
||||
|
||||
manager_status = self.redis_connection.hgetall('manager:status')
|
||||
self.statsd_client.gauge('manager.seconds_since_refresh',
|
||||
time.time() - float(manager_status['last_refresh_at']))
|
||||
|
||||
def refresh_queries(self):
|
||||
sql = """SELECT queries.query, queries.ttl, retrieved_at
|
||||
FROM (SELECT query, min(ttl) as ttl FROM queries WHERE ttl > 0 GROUP by query) queries
|
||||
JOIN (SELECT query, max(retrieved_at) as retrieved_at
|
||||
FROM query_results
|
||||
GROUP BY query) query_results on query_results.query=queries.query
|
||||
WHERE queries.ttl > 0
|
||||
AND query_results.retrieved_at + ttl * interval '1 second' < now() at time zone 'utc';"""
|
||||
sql = """SELECT first_value(t1."query") over(partition by t1.query_hash)
|
||||
FROM "queries" AS t1
|
||||
INNER JOIN "query_results" AS t2 ON (t1."latest_query_data_id" = t2."id")
|
||||
WHERE ((t1."ttl" > 0) AND ((t2."retrieved_at" + t1.ttl * interval '1 second') <
|
||||
now() at time zone 'utc'));
|
||||
"""
|
||||
|
||||
self.status['last_refresh_at'] = time.time()
|
||||
self._save_status()
|
||||
|
||||
logging.info("Refreshing queries...")
|
||||
queries = self.run_query(sql)
|
||||
for query, ttl, retrieved_at in queries:
|
||||
self.add_job(query, worker.Job.LOW_PRIORITY)
|
||||
|
||||
for query in queries:
|
||||
self.add_job(query[0], worker.Job.LOW_PRIORITY)
|
||||
|
||||
self.statsd_client.gauge('manager.outdated_queries', len(queries))
|
||||
self.statsd_client.gauge('manager.queue_size', self.redis_connection.zcard('jobs'))
|
||||
|
||||
logging.info("Done refreshing queries... %d" % len(queries))
|
||||
|
||||
@@ -150,15 +166,28 @@ class Manager(object):
|
||||
|
||||
return data
|
||||
|
||||
def start_workers(self, workers_count, connection_string):
|
||||
def start_workers(self, workers_count, connection_type, connection_string):
|
||||
if self.workers:
|
||||
return self.workers
|
||||
|
||||
runner = query_runner.redshift(connection_string)
|
||||
|
||||
if connection_type == 'mysql':
|
||||
from redash.data import query_runner_mysql
|
||||
runner = query_runner_mysql.mysql(connection_string)
|
||||
elif connection_type == 'graphite':
|
||||
from redash.data import query_runner_graphite
|
||||
connection_params = json.loads(connection_string)
|
||||
if connection_params['auth']:
|
||||
connection_params['auth'] = tuple(connection_params['auth'])
|
||||
else:
|
||||
connection_params['auth'] = None
|
||||
runner = query_runner_graphite.graphite(connection_params)
|
||||
else:
|
||||
from redash.data import query_runner
|
||||
runner = query_runner.redshift(connection_string)
|
||||
|
||||
redis_connection_params = self.redis_connection.connection_pool.connection_kwargs
|
||||
self.workers = [worker.Worker(self, redis_connection_params, runner)
|
||||
for _ in range(workers_count)]
|
||||
self.workers = [worker.Worker(worker_id, self, redis_connection_params, runner)
|
||||
for worker_id in range(workers_count)]
|
||||
for w in self.workers:
|
||||
w.start()
|
||||
|
||||
@@ -171,17 +200,18 @@ class Manager(object):
|
||||
|
||||
@contextmanager
|
||||
def db_transaction(self):
|
||||
connection = psycopg2.connect(self.db_connection_string)
|
||||
cursor = connection.cursor()
|
||||
self.db.connect_db()
|
||||
|
||||
cursor = self.db.database.get_cursor()
|
||||
try:
|
||||
yield cursor
|
||||
except:
|
||||
connection.rollback()
|
||||
self.db.database.rollback()
|
||||
raise
|
||||
else:
|
||||
connection.commit()
|
||||
self.db.database.commit()
|
||||
finally:
|
||||
connection.close()
|
||||
self.db.close_db(None)
|
||||
|
||||
def _save_status(self):
|
||||
self.redis_connection.hmset('manager:status', self.status)
|
||||
@@ -6,17 +6,19 @@ QueryRunner is the function that the workers use, to execute queries. This is th
|
||||
Because the worker just pass the query, this can be used with any data store that has some sort of
|
||||
query language (for example: HiveQL).
|
||||
"""
|
||||
import logging
|
||||
import json
|
||||
import psycopg2
|
||||
import sys
|
||||
import select
|
||||
from .utils import JSONEncoder
|
||||
|
||||
import psycopg2
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
def redshift(connection_string):
|
||||
def column_friendly_name(column_name):
|
||||
return column_name
|
||||
|
||||
|
||||
def wait(conn):
|
||||
while 1:
|
||||
state = conn.poll()
|
||||
@@ -28,24 +30,24 @@ def redshift(connection_string):
|
||||
select.select([conn.fileno()], [], [])
|
||||
else:
|
||||
raise psycopg2.OperationalError("poll() returned %s" % state)
|
||||
|
||||
|
||||
def query_runner(query):
|
||||
connection = psycopg2.connect(connection_string, async=True)
|
||||
wait(connection)
|
||||
|
||||
|
||||
cursor = connection.cursor()
|
||||
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
wait(connection)
|
||||
|
||||
|
||||
column_names = [col.name for col in cursor.description]
|
||||
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||
columns = [{'name': col.name,
|
||||
'friendly_name': column_friendly_name(col.name),
|
||||
'type': None} for col in cursor.description]
|
||||
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
@@ -61,7 +63,7 @@ def redshift(connection_string):
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
return query_runner
|
||||
46
redash/data/query_runner_graphite.py
Normal file
46
redash/data/query_runner_graphite.py
Normal file
@@ -0,0 +1,46 @@
|
||||
"""
|
||||
QueryRunner for Graphite.
|
||||
"""
|
||||
import json
|
||||
import datetime
|
||||
import requests
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
|
||||
def graphite(connection_params):
|
||||
def transform_result(response):
|
||||
columns = [{'name': 'Time::x'}, {'name': 'value::y'}, {'name': 'name::series'}]
|
||||
rows = []
|
||||
|
||||
for series in response.json():
|
||||
for values in series['datapoints']:
|
||||
timestamp = datetime.datetime.fromtimestamp(int(values[1]))
|
||||
rows.append({'Time::x': timestamp, 'name::series': series['target'], 'value::y': values[0]})
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
return json.dumps(data, cls=JSONEncoder)
|
||||
|
||||
def query_runner(query):
|
||||
base_url = "%s/render?format=json&" % connection_params['url']
|
||||
url = "%s%s" % (base_url, "&".join(query.split("\n")))
|
||||
error = None
|
||||
data = None
|
||||
|
||||
try:
|
||||
response = requests.get(url, auth=connection_params['auth'],
|
||||
verify=connection_params['verify'])
|
||||
|
||||
if response.status_code == 200:
|
||||
data = transform_result(response)
|
||||
else:
|
||||
error = "Failed getting results (%d)" % response.status_code
|
||||
|
||||
except Exception, ex:
|
||||
data = None
|
||||
error = ex.message
|
||||
|
||||
return data, error
|
||||
|
||||
query_runner.annotate_query = False
|
||||
|
||||
return query_runner
|
||||
58
redash/data/query_runner_mysql.py
Normal file
58
redash/data/query_runner_mysql.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""
|
||||
QueryRunner is the function that the workers use, to execute queries. This is the Redshift
|
||||
(PostgreSQL in fact) version, but easily we can write another to support additional databases
|
||||
(MySQL and others).
|
||||
|
||||
Because the worker just pass the query, this can be used with any data store that has some sort of
|
||||
query language (for example: HiveQL).
|
||||
"""
|
||||
import logging
|
||||
import json
|
||||
import MySQLdb
|
||||
import sys
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
def mysql(connection_string):
|
||||
if connection_string.endswith(';'):
|
||||
connection_string = connection_string[0:-1]
|
||||
|
||||
def query_runner(query):
|
||||
connections_params = [entry.split('=')[1] for entry in connection_string.split(';')]
|
||||
connection = MySQLdb.connect(*connections_params)
|
||||
cursor = connection.cursor()
|
||||
|
||||
logging.debug("mysql got query: %s", query)
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
|
||||
data = cursor.fetchall()
|
||||
|
||||
num_fields = len(cursor.description)
|
||||
column_names = [i[0] for i in cursor.description]
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in data]
|
||||
|
||||
columns = [{'name': col_name,
|
||||
'friendly_name': col_name,
|
||||
'type': None} for col_name in column_names]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
cursor.close()
|
||||
except MySQLdb.Error, e:
|
||||
json_data = None
|
||||
error = e.args[1]
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
return query_runner
|
||||
@@ -11,8 +11,9 @@ import time
|
||||
import signal
|
||||
import setproctitle
|
||||
import redis
|
||||
from utils import gen_query_hash
|
||||
|
||||
from statsd import StatsClient
|
||||
from redash.utils import gen_query_hash
|
||||
from redash import settings
|
||||
|
||||
class Job(object):
|
||||
HIGH_PRIORITY = 1
|
||||
@@ -144,17 +145,20 @@ class Job(object):
|
||||
|
||||
|
||||
class Worker(threading.Thread):
|
||||
def __init__(self, manager, redis_connection_params, query_runner, sleep_time=0.1):
|
||||
def __init__(self, worker_id, manager, redis_connection_params, query_runner, sleep_time=0.1):
|
||||
self.manager = manager
|
||||
|
||||
self.statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT,
|
||||
prefix=settings.STATSD_PREFIX)
|
||||
self.redis_connection_params = {k: v for k, v in redis_connection_params.iteritems()
|
||||
if k in ('host', 'db', 'password', 'port')}
|
||||
self.continue_working = True
|
||||
self.query_runner = query_runner
|
||||
self.sleep_time = sleep_time
|
||||
self.child_pid = None
|
||||
self.worker_id = uuid.uuid1()
|
||||
self.worker_id = worker_id
|
||||
self.status = {
|
||||
'id': self.worker_id,
|
||||
'jobs_count': 0,
|
||||
'cancelled_jobs_count': 0,
|
||||
'done_jobs_count': 0,
|
||||
@@ -234,11 +238,16 @@ class Worker(threading.Thread):
|
||||
start_time = time.time()
|
||||
self.set_title("running query %s" % job_id)
|
||||
|
||||
annotated_query = "/* Pid: %s, Job Id: %s, Query hash: %s, Priority: %s */ %s" % \
|
||||
(pid, job.id, job.query_hash, job.priority, job.query)
|
||||
if getattr(self.query_runner, 'annotate_query', True):
|
||||
annotated_query = "/* Pid: %s, Job Id: %s, Query hash: %s, Priority: %s */ %s" % \
|
||||
(pid, job.id, job.query_hash, job.priority, job.query)
|
||||
else:
|
||||
annotated_query = job.query
|
||||
|
||||
# TODO: here's the part that needs to be forked, not all of the worker process...
|
||||
data, error = self.query_runner(annotated_query)
|
||||
with self.statsd_client.timer('worker_{}.query_runner.run_time'.format(self.worker_id)):
|
||||
data, error = self.query_runner(annotated_query)
|
||||
|
||||
run_time = time.time() - start_time
|
||||
logging.info("[%s][%s] query finished... data length=%s, error=%s",
|
||||
self.name, job.id, data and len(data), error)
|
||||
292
redash/models.py
Normal file
292
redash/models.py
Normal file
@@ -0,0 +1,292 @@
|
||||
import json
|
||||
import hashlib
|
||||
import time
|
||||
import datetime
|
||||
from flask.ext.peewee.utils import slugify
|
||||
from flask.ext.login import UserMixin
|
||||
from passlib.apps import custom_app_context as pwd_context
|
||||
import peewee
|
||||
from redash import db, utils
|
||||
|
||||
|
||||
class BaseModel(db.Model):
|
||||
@classmethod
|
||||
def get_by_id(cls, model_id):
|
||||
return cls.get(cls.id == model_id)
|
||||
|
||||
|
||||
class User(BaseModel, UserMixin):
|
||||
id = peewee.PrimaryKeyField()
|
||||
name = peewee.CharField(max_length=320)
|
||||
email = peewee.CharField(max_length=320, index=True, unique=True)
|
||||
password_hash = peewee.CharField(max_length=128, null=True)
|
||||
is_admin = peewee.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
db_table = 'users'
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'email': self.email,
|
||||
'is_admin': self.is_admin
|
||||
}
|
||||
|
||||
def __unicode__(self):
|
||||
return '%r, %r' % (self.name, self.email)
|
||||
|
||||
def hash_password(self, password):
|
||||
self.password_hash = pwd_context.encrypt(password)
|
||||
|
||||
def verify_password(self, password):
|
||||
return self.password_hash and pwd_context.verify(password, self.password_hash)
|
||||
|
||||
|
||||
class QueryResult(db.Model):
|
||||
id = peewee.PrimaryKeyField()
|
||||
query_hash = peewee.CharField(max_length=32, index=True)
|
||||
query = peewee.TextField()
|
||||
data = peewee.TextField()
|
||||
runtime = peewee.FloatField()
|
||||
retrieved_at = peewee.DateTimeField()
|
||||
|
||||
class Meta:
|
||||
db_table = 'query_results'
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'query_hash': self.query_hash,
|
||||
'query': self.query,
|
||||
'data': json.loads(self.data),
|
||||
'runtime': self.runtime,
|
||||
'retrieved_at': self.retrieved_at
|
||||
}
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at)
|
||||
|
||||
|
||||
class Query(BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
latest_query_data = peewee.ForeignKeyField(QueryResult, null=True)
|
||||
name = peewee.CharField(max_length=255)
|
||||
description = peewee.CharField(max_length=4096, null=True)
|
||||
query = peewee.TextField()
|
||||
query_hash = peewee.CharField(max_length=32)
|
||||
api_key = peewee.CharField(max_length=40)
|
||||
ttl = peewee.IntegerField()
|
||||
user_email = peewee.CharField(max_length=360, null=True)
|
||||
user = peewee.ForeignKeyField(User)
|
||||
created_at = peewee.DateTimeField(default=datetime.datetime.now)
|
||||
|
||||
class Meta:
|
||||
db_table = 'queries'
|
||||
|
||||
def create_default_visualizations(self):
|
||||
table_visualization = Visualization(query=self, name="Table",
|
||||
description='',
|
||||
type="TABLE", options="{}")
|
||||
table_visualization.save()
|
||||
|
||||
def to_dict(self, with_result=True, with_stats=False, with_visualizations=False, with_user=True):
|
||||
d = {
|
||||
'id': self.id,
|
||||
'latest_query_data_id': self._data.get('latest_query_data', None),
|
||||
'name': self.name,
|
||||
'description': self.description,
|
||||
'query': self.query,
|
||||
'query_hash': self.query_hash,
|
||||
'ttl': self.ttl,
|
||||
'api_key': self.api_key,
|
||||
'created_at': self.created_at,
|
||||
}
|
||||
|
||||
if with_user:
|
||||
d['user'] = self.user.to_dict()
|
||||
else:
|
||||
d['user_id'] = self._data['user']
|
||||
|
||||
if with_stats:
|
||||
d['avg_runtime'] = self.avg_runtime
|
||||
d['min_runtime'] = self.min_runtime
|
||||
d['max_runtime'] = self.max_runtime
|
||||
d['last_retrieved_at'] = self.last_retrieved_at
|
||||
d['times_retrieved'] = self.times_retrieved
|
||||
|
||||
if with_visualizations:
|
||||
d['visualizations'] = [vis.to_dict(with_query=False)
|
||||
for vis in self.visualizations]
|
||||
|
||||
if with_result and self.latest_query_data:
|
||||
d['latest_query_data'] = self.latest_query_data.to_dict()
|
||||
|
||||
return d
|
||||
|
||||
@classmethod
|
||||
def all_queries(cls):
|
||||
q = Query.select(Query, User,
|
||||
peewee.fn.Count(QueryResult.id).alias('times_retrieved'),
|
||||
peewee.fn.Avg(QueryResult.runtime).alias('avg_runtime'),
|
||||
peewee.fn.Min(QueryResult.runtime).alias('min_runtime'),
|
||||
peewee.fn.Max(QueryResult.runtime).alias('max_runtime'),
|
||||
peewee.fn.Max(QueryResult.retrieved_at).alias('last_retrieved_at'))\
|
||||
.join(QueryResult, join_type=peewee.JOIN_LEFT_OUTER)\
|
||||
.switch(Query).join(User)\
|
||||
.group_by(Query.id, User.id)
|
||||
|
||||
return q
|
||||
|
||||
@classmethod
|
||||
def update_instance(cls, query_id, **kwargs):
|
||||
if 'query' in kwargs:
|
||||
kwargs['query_hash'] = utils.gen_query_hash(kwargs['query'])
|
||||
|
||||
update = cls.update(**kwargs).where(cls.id == query_id)
|
||||
return update.execute()
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.query_hash = utils.gen_query_hash(self.query)
|
||||
self._set_api_key()
|
||||
super(Query, self).save(*args, **kwargs)
|
||||
|
||||
def _set_api_key(self):
|
||||
if not self.api_key:
|
||||
self.api_key = hashlib.sha1(
|
||||
u''.join((str(time.time()), self.query, str(self._data['user']), self.name)).encode('utf-8')).hexdigest()
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.id)
|
||||
|
||||
|
||||
class Dashboard(BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
slug = peewee.CharField(max_length=140, index=True)
|
||||
name = peewee.CharField(max_length=100)
|
||||
user_email = peewee.CharField(max_length=360, null=True)
|
||||
user = peewee.ForeignKeyField(User)
|
||||
layout = peewee.TextField()
|
||||
is_archived = peewee.BooleanField(default=False, index=True)
|
||||
created_at = peewee.DateTimeField(default=datetime.datetime.now)
|
||||
|
||||
class Meta:
|
||||
db_table = 'dashboards'
|
||||
|
||||
def to_dict(self, with_widgets=False):
|
||||
layout = json.loads(self.layout)
|
||||
|
||||
if with_widgets:
|
||||
widgets = Widget.select(Widget, Visualization, Query, QueryResult, User)\
|
||||
.where(Widget.dashboard == self.id)\
|
||||
.join(Visualization)\
|
||||
.join(Query)\
|
||||
.join(User)\
|
||||
.switch(Query)\
|
||||
.join(QueryResult)
|
||||
widgets = {w.id: w.to_dict() for w in widgets}
|
||||
widgets_layout = map(lambda row: map(lambda widget_id: widgets.get(widget_id, None), row), layout)
|
||||
else:
|
||||
widgets_layout = None
|
||||
|
||||
return {
|
||||
'id': self.id,
|
||||
'slug': self.slug,
|
||||
'name': self.name,
|
||||
'user_id': self._data['user'],
|
||||
'layout': layout,
|
||||
'widgets': widgets_layout
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_by_slug(cls, slug):
|
||||
return cls.get(cls.slug == slug)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.slug:
|
||||
self.slug = slugify(self.name)
|
||||
|
||||
tries = 1
|
||||
while self.select().where(Dashboard.slug == self.slug).first() is not None:
|
||||
self.slug = slugify(self.name) + "_{0}".format(tries)
|
||||
tries += 1
|
||||
|
||||
super(Dashboard, self).save(*args, **kwargs)
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s=%s" % (self.id, self.name)
|
||||
|
||||
|
||||
class Visualization(BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
type = peewee.CharField(max_length=100)
|
||||
query = peewee.ForeignKeyField(Query, related_name='visualizations')
|
||||
name = peewee.CharField(max_length=255)
|
||||
description = peewee.CharField(max_length=4096, null=True)
|
||||
options = peewee.TextField()
|
||||
|
||||
class Meta:
|
||||
db_table = 'visualizations'
|
||||
|
||||
def to_dict(self, with_query=True):
|
||||
d = {
|
||||
'id': self.id,
|
||||
'type': self.type,
|
||||
'name': self.name,
|
||||
'description': self.description,
|
||||
'options': json.loads(self.options),
|
||||
}
|
||||
|
||||
if with_query:
|
||||
d['query'] = self.query.to_dict()
|
||||
|
||||
return d
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s %s" % (self.id, self.type)
|
||||
|
||||
|
||||
class Widget(db.Model):
|
||||
id = peewee.PrimaryKeyField()
|
||||
visualization = peewee.ForeignKeyField(Visualization, related_name='widgets')
|
||||
|
||||
width = peewee.IntegerField()
|
||||
options = peewee.TextField()
|
||||
dashboard = peewee.ForeignKeyField(Dashboard, related_name='widgets', index=True)
|
||||
created_at = peewee.DateTimeField(default=datetime.datetime.now)
|
||||
|
||||
# unused; kept for backward compatability:
|
||||
type = peewee.CharField(max_length=100, null=True)
|
||||
query_id = peewee.IntegerField(null=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'widgets'
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'width': self.width,
|
||||
'options': json.loads(self.options),
|
||||
'visualization': self.visualization.to_dict(),
|
||||
'dashboard_id': self._data['dashboard']
|
||||
}
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s" % self.id
|
||||
|
||||
all_models = (User, QueryResult, Query, Dashboard, Visualization, Widget)
|
||||
|
||||
|
||||
def create_db(create_tables, drop_tables):
|
||||
db.connect_db()
|
||||
|
||||
for model in all_models:
|
||||
if drop_tables and model.table_exists():
|
||||
# TODO: submit PR to peewee to allow passing cascade option to drop_table.
|
||||
db.database.execute_sql('DROP TABLE %s CASCADE' % model._meta.db_table)
|
||||
#model.drop_table()
|
||||
|
||||
if create_tables:
|
||||
model.create_table()
|
||||
|
||||
db.close_db(None)
|
||||
70
redash/settings.py
Normal file
70
redash/settings.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import json
|
||||
import os
|
||||
import urlparse
|
||||
|
||||
|
||||
def parse_db_url(url):
|
||||
url_parts = urlparse.urlparse(url)
|
||||
connection = {
|
||||
'engine': 'peewee.PostgresqlDatabase',
|
||||
}
|
||||
|
||||
if url_parts.hostname and not url_parts.path:
|
||||
connection['name'] = url_parts.hostname
|
||||
else:
|
||||
connection['name'] = url_parts.path[1:]
|
||||
connection['host'] = url_parts.hostname
|
||||
connection['port'] = url_parts.port
|
||||
connection['user'] = url_parts.username
|
||||
connection['password'] = url_parts.password
|
||||
|
||||
return connection
|
||||
|
||||
|
||||
def fix_assets_path(path):
|
||||
fullpath = os.path.join(os.path.dirname(__file__), path)
|
||||
return fullpath
|
||||
|
||||
|
||||
def array_from_string(str):
|
||||
array = str.split(',')
|
||||
if "" in array:
|
||||
array.remove("")
|
||||
|
||||
return array
|
||||
|
||||
|
||||
def parse_boolean(str):
|
||||
return json.loads(str.lower())
|
||||
|
||||
|
||||
REDIS_URL = os.environ.get('REDASH_REDIS_URL', "redis://localhost:6379")
|
||||
|
||||
STATSD_HOST = os.environ.get('REDASH_STATSD_HOST', "127.0.0.1")
|
||||
STATSD_PORT = int(os.environ.get('REDASH_STATSD_PORT', "8125"))
|
||||
STATSD_PREFIX = os.environ.get('REDASH_STATSD_PREFIX', "redash")
|
||||
|
||||
# "pg", "graphite" or "mysql"
|
||||
CONNECTION_ADAPTER = os.environ.get("REDASH_CONNECTION_ADAPTER", "pg")
|
||||
# Connection string for the database that is used to run queries against. Examples:
|
||||
# -- mysql: CONNECTION_STRING = "Server=;User=;Pwd=;Database="
|
||||
# -- pg: CONNECTION_STRING = "user= password= host= port=5439 dbname="
|
||||
# -- graphite: CONNECTION_STRING = {"url": "https://graphite.yourcompany.com", "auth": ["user", "password"], "verify": true}
|
||||
CONNECTION_STRING = os.environ.get("REDASH_CONNECTION_STRING", "user= password= host= port=5439 dbname=")
|
||||
|
||||
# Connection settings for re:dash's own database (where we store the queries, results, etc)
|
||||
DATABASE_CONFIG = parse_db_url(os.environ.get("REDASH_DATABASE_URL", "postgresql://postgres"))
|
||||
|
||||
# Google Apps domain to allow access from; any user with email in this Google Apps will be allowed
|
||||
# access
|
||||
GOOGLE_APPS_DOMAIN = os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", "")
|
||||
GOOGLE_OPENID_ENABLED = parse_boolean(os.environ.get("REDASH_GOOGLE_OPENID_ENABLED", "true"))
|
||||
PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "false"))
|
||||
# Email addresses of admin users (comma separated)
|
||||
ADMINS = array_from_string(os.environ.get("REDASH_ADMINS", ''))
|
||||
ALLOWED_EXTERNAL_USERS = array_from_string(os.environ.get("REDASH_ALLOWED_EXTERNAL_USERS", ''))
|
||||
STATIC_ASSETS_PATH = fix_assets_path(os.environ.get("REDASH_STATIC_ASSETS_PATH", "../rd_ui/app/"))
|
||||
WORKERS_COUNT = int(os.environ.get("REDASH_WORKERS_COUNT", "2"))
|
||||
COOKIE_SECRET = os.environ.get("REDASH_COOKIE_SECRET", "c292a0a3aa32397cdb050e233733900f")
|
||||
LOG_LEVEL = os.environ.get("REDASH_LOG_LEVEL", "INFO")
|
||||
ANALYTICS = os.environ.get("REDASH_ANALYTICS", "")
|
||||
@@ -36,6 +36,10 @@ class JSONEncoder(json.JSONEncoder):
|
||||
super(JSONEncoder, self).default(o)
|
||||
|
||||
|
||||
def json_dumps(data):
|
||||
return json.dumps(data, cls=JSONEncoder)
|
||||
|
||||
|
||||
class UnicodeWriter:
|
||||
"""
|
||||
A CSV writer which will write rows to CSV file "f",
|
||||
29
requirements.txt
Normal file
29
requirements.txt
Normal file
@@ -0,0 +1,29 @@
|
||||
Flask==0.10.1
|
||||
Flask-GoogleAuth==0.4
|
||||
Flask-RESTful==0.2.10
|
||||
Flask-Login==0.2.9
|
||||
passlib==1.6.2
|
||||
Jinja2==2.7.2
|
||||
MarkupSafe==0.18
|
||||
WTForms==1.0.5
|
||||
Werkzeug==0.9.4
|
||||
aniso8601==0.82
|
||||
atfork==0.1.2
|
||||
blinker==1.3
|
||||
flask-peewee==0.6.5
|
||||
itsdangerous==0.23
|
||||
peewee==2.2.0
|
||||
psycopg2==2.5.1
|
||||
python-dateutil==2.1
|
||||
pytz==2013.9
|
||||
qr==0.6.0
|
||||
redis==2.7.5
|
||||
requests==2.2.0
|
||||
setproctitle==1.1.8
|
||||
six==1.5.2
|
||||
sqlparse==0.1.8
|
||||
wsgiref==0.1.2
|
||||
wtf-peewee==0.2.2
|
||||
Flask-Script==0.6.6
|
||||
honcho==0.5.0
|
||||
statsd==2.1.2
|
||||
24
tests/__init__.py
Normal file
24
tests/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from unittest import TestCase
|
||||
from redash import settings, db, app
|
||||
import redash.models
|
||||
|
||||
# TODO: this isn't pretty...
|
||||
settings.DATABASE_CONFIG = {
|
||||
'name': 'circle_test',
|
||||
'engine': 'peewee.PostgresqlDatabase',
|
||||
'threadlocals': True
|
||||
}
|
||||
app.config['DATABASE'] = settings.DATABASE_CONFIG
|
||||
db.load_database()
|
||||
|
||||
for model in redash.models.all_models:
|
||||
model._meta.database = db.database
|
||||
|
||||
|
||||
class BaseTestCase(TestCase):
|
||||
def setUp(self):
|
||||
redash.models.create_db(True, True)
|
||||
|
||||
def tearDown(self):
|
||||
db.close_db(None)
|
||||
redash.models.create_db(False, True)
|
||||
76
tests/factories.py
Normal file
76
tests/factories.py
Normal file
@@ -0,0 +1,76 @@
|
||||
import datetime
|
||||
import redash.models
|
||||
|
||||
|
||||
class ModelFactory(object):
|
||||
def __init__(self, model, **kwargs):
|
||||
self.model = model
|
||||
self.kwargs = kwargs
|
||||
|
||||
def _get_kwargs(self, override_kwargs):
|
||||
kwargs = self.kwargs.copy()
|
||||
kwargs.update(override_kwargs)
|
||||
|
||||
for key, arg in kwargs.items():
|
||||
if callable(arg):
|
||||
kwargs[key] = arg()
|
||||
|
||||
return kwargs
|
||||
|
||||
def instance(self, **override_kwargs):
|
||||
kwargs = self._get_kwargs(override_kwargs)
|
||||
|
||||
return self.model(**kwargs)
|
||||
|
||||
def create(self, **override_kwargs):
|
||||
kwargs = self._get_kwargs(override_kwargs)
|
||||
return self.model.create(**kwargs)
|
||||
|
||||
|
||||
class Sequence(object):
|
||||
def __init__(self, string):
|
||||
self.sequence = 0
|
||||
self.string = string
|
||||
|
||||
def __call__(self):
|
||||
self.sequence += 1
|
||||
|
||||
return self.string.format(self.sequence)
|
||||
|
||||
|
||||
user_factory = ModelFactory(redash.models.User,
|
||||
name='John Doe', email=Sequence('test{}@example.com'),
|
||||
is_admin=False)
|
||||
|
||||
|
||||
dashboard_factory = ModelFactory(redash.models.Dashboard,
|
||||
name='test', user=user_factory.create, layout='[]')
|
||||
|
||||
|
||||
query_factory = ModelFactory(redash.models.Query,
|
||||
name='New Query',
|
||||
description='',
|
||||
query='SELECT 1',
|
||||
ttl=-1,
|
||||
user=user_factory.create)
|
||||
|
||||
query_result_factory = ModelFactory(redash.models.QueryResult,
|
||||
data='{"columns":{}, "rows":[]}',
|
||||
runtime=1,
|
||||
retrieved_at=datetime.datetime.now(),
|
||||
query=query_factory.create,
|
||||
query_hash='')
|
||||
|
||||
visualization_factory = ModelFactory(redash.models.Visualization,
|
||||
type='CHART',
|
||||
query=query_factory.create,
|
||||
name='Chart',
|
||||
description='',
|
||||
options='{}')
|
||||
|
||||
widget_factory = ModelFactory(redash.models.Widget,
|
||||
type='chart',
|
||||
width=1,
|
||||
options='{}',
|
||||
dashboard=dashboard_factory.create,
|
||||
visualization=visualization_factory.create)
|
||||
66
tests/test_authentication.py
Normal file
66
tests/test_authentication.py
Normal file
@@ -0,0 +1,66 @@
|
||||
from unittest import TestCase
|
||||
from mock import patch
|
||||
from flask_googleauth import ObjectDict
|
||||
from tests import BaseTestCase
|
||||
from redash.authentication import validate_email, create_and_login_user
|
||||
from redash import settings, models
|
||||
from tests.factories import user_factory
|
||||
|
||||
|
||||
class TestEmailValidation(TestCase):
|
||||
def test_accepts_address_with_correct_domain(self):
|
||||
with patch.object(settings, 'GOOGLE_APPS_DOMAIN', 'example.com'):
|
||||
self.assertTrue(validate_email('example@example.com'))
|
||||
|
||||
def test_accepts_address_from_exception_list(self):
|
||||
with patch.multiple(settings, GOOGLE_APPS_DOMAIN='example.com', ALLOWED_EXTERNAL_USERS=['whatever@whatever.com']):
|
||||
self.assertTrue(validate_email('whatever@whatever.com'))
|
||||
|
||||
def test_accept_any_address_when_domain_empty(self):
|
||||
with patch.object(settings, 'GOOGLE_APPS_DOMAIN', None):
|
||||
self.assertTrue(validate_email('whatever@whatever.com'))
|
||||
|
||||
def test_rejects_address_with_incorrect_domain(self):
|
||||
with patch.object(settings, 'GOOGLE_APPS_DOMAIN', 'example.com'):
|
||||
self.assertFalse(validate_email('whatever@whatever.com'))
|
||||
|
||||
|
||||
class TestCreateAndLoginUser(BaseTestCase):
|
||||
def test_logins_valid_user(self):
|
||||
user = user_factory.create(email='test@example.com')
|
||||
|
||||
with patch.object(settings, 'GOOGLE_APPS_DOMAIN', 'example.com'), patch('redash.authentication.login_user') as login_user_mock:
|
||||
create_and_login_user(None, user)
|
||||
login_user_mock.assert_called_once_with(user, remember=True)
|
||||
|
||||
def test_creates_vaild_new_user(self):
|
||||
openid_user = ObjectDict({'email': 'test@example.com', 'name': 'Test User'})
|
||||
|
||||
with patch.multiple(settings, GOOGLE_APPS_DOMAIN='example.com', ADMINS=['admin@example.com']), \
|
||||
patch('redash.authentication.login_user') as login_user_mock:
|
||||
|
||||
create_and_login_user(None, openid_user)
|
||||
|
||||
self.assertTrue(login_user_mock.called)
|
||||
user = models.User.get(models.User.email == openid_user.email)
|
||||
|
||||
self.assertFalse(user.is_admin)
|
||||
|
||||
def test_creates_vaild_new_user_and_sets_is_admin(self):
|
||||
openid_user = ObjectDict({'email': 'admin@example.com', 'name': 'Test User'})
|
||||
|
||||
with patch.multiple(settings, GOOGLE_APPS_DOMAIN='example.com', ADMINS=['admin@example.com']), \
|
||||
patch('redash.authentication.login_user') as login_user_mock:
|
||||
|
||||
create_and_login_user(None, openid_user)
|
||||
|
||||
self.assertTrue(login_user_mock.called)
|
||||
user = models.User.get(models.User.email == openid_user.email)
|
||||
self.assertTrue(user.is_admin)
|
||||
|
||||
def test_ignores_invliad_user(self):
|
||||
user = ObjectDict({'email': 'test@whatever.com'})
|
||||
|
||||
with patch.object(settings, 'GOOGLE_APPS_DOMAIN', 'example.com'), patch('redash.authentication.login_user') as login_user_mock:
|
||||
create_and_login_user(None, user)
|
||||
self.assertFalse(login_user_mock.called)
|
||||
440
tests/test_controllers.py
Normal file
440
tests/test_controllers.py
Normal file
@@ -0,0 +1,440 @@
|
||||
from contextlib import contextmanager
|
||||
import json
|
||||
import time
|
||||
from unittest import TestCase
|
||||
from flask import url_for
|
||||
from flask.ext.login import current_user
|
||||
from mock import patch
|
||||
from tests import BaseTestCase
|
||||
from tests.factories import dashboard_factory, widget_factory, visualization_factory, query_factory, \
|
||||
query_result_factory, user_factory
|
||||
from redash import app, models, settings
|
||||
from redash.utils import json_dumps
|
||||
from redash.authentication import sign
|
||||
|
||||
|
||||
settings.GOOGLE_APPS_DOMAIN = "example.com"
|
||||
|
||||
@contextmanager
|
||||
def authenticated_user(c, user=None):
|
||||
if not user:
|
||||
user = user_factory.create()
|
||||
|
||||
with c.session_transaction() as sess:
|
||||
sess['user_id'] = user.id
|
||||
|
||||
yield
|
||||
|
||||
|
||||
def json_request(method, path, data=None):
|
||||
if data:
|
||||
response = method(path, data=json_dumps(data))
|
||||
else:
|
||||
response = method(path)
|
||||
|
||||
if response.data:
|
||||
response.json = json.loads(response.data)
|
||||
else:
|
||||
response.json = None
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class AuthenticationTestMixin():
|
||||
def test_redirects_when_not_authenticated(self):
|
||||
with app.test_client() as c:
|
||||
for path in self.paths:
|
||||
rv = c.get(path)
|
||||
self.assertEquals(302, rv.status_code)
|
||||
|
||||
def test_returns_content_when_authenticated(self):
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
for path in self.paths:
|
||||
rv = c.get(path)
|
||||
self.assertEquals(200, rv.status_code)
|
||||
|
||||
|
||||
class TestAuthentication(BaseTestCase):
|
||||
def test_redirects_for_nonsigned_in_user(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get("/")
|
||||
self.assertEquals(302, rv.status_code)
|
||||
|
||||
|
||||
class PingTest(TestCase):
|
||||
def test_ping(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/ping')
|
||||
self.assertEquals(200, rv.status_code)
|
||||
self.assertEquals('PONG.', rv.data)
|
||||
|
||||
|
||||
class IndexTest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
self.paths = ['/', '/dashboard/example', '/queries/1', '/admin/status']
|
||||
super(IndexTest, self).setUp()
|
||||
|
||||
|
||||
class StatusTest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
self.paths = ['/status.json']
|
||||
super(StatusTest, self).setUp()
|
||||
|
||||
|
||||
class DashboardAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
self.paths = ['/api/dashboards']
|
||||
super(DashboardAPITest, self).setUp()
|
||||
|
||||
def test_get_dashboard(self):
|
||||
d1 = dashboard_factory.create()
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = c.get('/api/dashboards/{0}'.format(d1.slug))
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertDictEqual(json.loads(rv.data), d1.to_dict(with_widgets=True))
|
||||
|
||||
def test_get_non_existint_dashbaord(self):
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = c.get('/api/dashboards/not_existing')
|
||||
self.assertEquals(rv.status_code, 404)
|
||||
|
||||
def test_create_new_dashboard(self):
|
||||
user = user_factory.create()
|
||||
with app.test_client() as c, authenticated_user(c, user=user):
|
||||
dashboard_name = 'Test Dashboard'
|
||||
rv = json_request(c.post, '/api/dashboards', data={'name': dashboard_name})
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertEquals(rv.json['name'], 'Test Dashboard')
|
||||
self.assertEquals(rv.json['user_id'], user.id)
|
||||
self.assertEquals(rv.json['layout'], [])
|
||||
|
||||
def test_update_dashboard(self):
|
||||
d = dashboard_factory.create()
|
||||
new_name = 'New Name'
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.post, '/api/dashboards/{0}'.format(d.id),
|
||||
data={'name': new_name, 'layout': '[]'})
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertEquals(rv.json['name'], new_name)
|
||||
|
||||
def test_delete_dashboard(self):
|
||||
d = dashboard_factory.create()
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.delete, '/api/dashboards/{0}'.format(d.slug))
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
|
||||
d = models.Dashboard.get_by_slug(d.slug)
|
||||
self.assertTrue(d.is_archived)
|
||||
|
||||
|
||||
class WidgetAPITest(BaseTestCase):
|
||||
def create_widget(self, dashboard, visualization, width=1):
|
||||
data = {
|
||||
'visualization_id': visualization.id,
|
||||
'dashboard_id': dashboard.id,
|
||||
'options': {},
|
||||
'width': width
|
||||
}
|
||||
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.post, '/api/widgets', data=data)
|
||||
|
||||
return rv
|
||||
|
||||
def test_create_widget(self):
|
||||
dashboard = dashboard_factory.create()
|
||||
vis = visualization_factory.create()
|
||||
|
||||
rv = self.create_widget(dashboard, vis)
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
|
||||
dashboard = models.Dashboard.get(models.Dashboard.id == dashboard.id)
|
||||
self.assertEquals(unicode(rv.json['layout']), dashboard.layout)
|
||||
|
||||
self.assertEquals(dashboard.widgets, 1)
|
||||
self.assertEquals(rv.json['layout'], [[rv.json['widget']['id']]])
|
||||
self.assertEquals(rv.json['new_row'], True)
|
||||
|
||||
rv2 = self.create_widget(dashboard, vis)
|
||||
self.assertEquals(dashboard.widgets, 2)
|
||||
self.assertEquals(rv2.json['layout'],
|
||||
[[rv.json['widget']['id'], rv2.json['widget']['id']]])
|
||||
self.assertEquals(rv2.json['new_row'], False)
|
||||
|
||||
rv3 = self.create_widget(dashboard, vis)
|
||||
self.assertEquals(rv3.json['new_row'], True)
|
||||
rv4 = self.create_widget(dashboard, vis, width=2)
|
||||
self.assertEquals(rv4.json['layout'],
|
||||
[[rv.json['widget']['id'], rv2.json['widget']['id']],
|
||||
[rv3.json['widget']['id']],
|
||||
[rv4.json['widget']['id']]])
|
||||
self.assertEquals(rv4.json['new_row'], True)
|
||||
|
||||
def test_delete_widget(self):
|
||||
widget = widget_factory.create()
|
||||
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.delete, '/api/widgets/{0}'.format(widget.id))
|
||||
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
dashboard = models.Dashboard.get_by_slug(widget.dashboard.slug)
|
||||
self.assertEquals(dashboard.widgets.count(), 0)
|
||||
self.assertEquals(dashboard.layout, '[]')
|
||||
|
||||
# TODO: test how it updates the layout
|
||||
|
||||
|
||||
class QueryAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
self.paths = ['/api/queries']
|
||||
super(QueryAPITest, self).setUp()
|
||||
|
||||
def test_update_query(self):
|
||||
query = query_factory.create()
|
||||
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.post, '/api/queries/{0}'.format(query.id), data={'name': 'Testing'})
|
||||
self.assertEqual(rv.status_code, 200)
|
||||
self.assertEquals(rv.json['name'], 'Testing')
|
||||
|
||||
def test_create_query(self):
|
||||
user = user_factory.create()
|
||||
query_data = {
|
||||
'name': 'Testing',
|
||||
'query': 'SELECT 1',
|
||||
'ttl': 3600
|
||||
}
|
||||
|
||||
with app.test_client() as c, authenticated_user(c, user=user):
|
||||
rv = json_request(c.post, '/api/queries', data=query_data)
|
||||
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertDictContainsSubset(query_data, rv.json)
|
||||
self.assertEquals(rv.json['user']['id'], user.id)
|
||||
self.assertIsNotNone(rv.json['api_key'])
|
||||
self.assertIsNotNone(rv.json['query_hash'])
|
||||
|
||||
query = models.Query.get_by_id(rv.json['id'])
|
||||
self.assertEquals(len(list(query.visualizations)), 1)
|
||||
|
||||
def test_get_query(self):
|
||||
query = query_factory.create()
|
||||
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.get, '/api/queries/{0}'.format(query.id))
|
||||
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
d = query.to_dict(with_visualizations=True)
|
||||
d.pop('created_at')
|
||||
self.assertDictContainsSubset(d, rv.json)
|
||||
|
||||
def test_get_all_queries(self):
|
||||
queries = [query_factory.create() for _ in range(10)]
|
||||
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.get, '/api/queries')
|
||||
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertEquals(len(rv.json), 10)
|
||||
|
||||
|
||||
class VisualizationAPITest(BaseTestCase):
|
||||
def test_create_visualization(self):
|
||||
query = query_factory.create()
|
||||
data = {
|
||||
'query_id': query.id,
|
||||
'name': 'Chart',
|
||||
'description':'',
|
||||
'options': {},
|
||||
'type': 'CHART'
|
||||
}
|
||||
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.post, '/api/visualizations', data=data)
|
||||
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
data.pop('query_id')
|
||||
self.assertDictContainsSubset(data, rv.json)
|
||||
|
||||
def test_delete_visualization(self):
|
||||
visualization = visualization_factory.create()
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.delete, '/api/visualizations/{0}'.format(visualization.id))
|
||||
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertEquals(models.Visualization.select().count(), 0)
|
||||
|
||||
def test_update_visualization(self):
|
||||
visualization = visualization_factory.create()
|
||||
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.post, '/api/visualizations/{0}'.format(visualization.id),
|
||||
data={'name': 'After Update'})
|
||||
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertEquals(rv.json['name'], 'After Update')
|
||||
|
||||
|
||||
class QueryResultAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
self.paths = []
|
||||
super(QueryResultAPITest, self).setUp()
|
||||
|
||||
|
||||
class JobAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
self.paths = []
|
||||
super(JobAPITest, self).setUp()
|
||||
|
||||
|
||||
class CsvQueryResultAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
super(CsvQueryResultAPITest, self).setUp()
|
||||
self.paths = []
|
||||
self.query_result = query_result_factory.create()
|
||||
self.path = '/api/queries/{0}/results/{1}.csv'.format(self.query_result.query.id, self.query_result.id)
|
||||
|
||||
# TODO: factor out the HMAC authentication tests
|
||||
|
||||
def signature(self, expires):
|
||||
return sign(self.query_result.query.api_key, self.path, expires)
|
||||
|
||||
def test_redirect_when_unauthenticated(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get(self.path)
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
|
||||
def test_redirect_for_wrong_signature(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query_result.query.id, self.query_result.id), query_string={'signature': 'whatever', 'expires': 0})
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
|
||||
def test_redirect_for_correct_signature_and_wrong_expires(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query_result.query.id, self.query_result.id), query_string={'signature': self.signature(0), 'expires': 0})
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
|
||||
def test_redirect_for_correct_signature_and_no_expires(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query_result.query.id, self.query_result.id), query_string={'signature': self.signature(time.time()+3600)})
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
|
||||
def test_redirect_for_correct_signature_and_expires_too_long(self):
|
||||
with app.test_client() as c:
|
||||
expires = time.time()+(10*3600)
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query_result.query.id, self.query_result.id), query_string={'signature': self.signature(expires), 'expires': expires})
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
|
||||
def test_returns_200_for_correct_signature(self):
|
||||
with app.test_client() as c:
|
||||
expires = time.time()+3600
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query_result.query.id, self.query_result.id), query_string={'signature': self.signature(expires), 'expires': expires})
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
|
||||
def test_returns_200_for_authenticated_user(self):
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query_result.query.id, self.query_result.id))
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
|
||||
|
||||
class TestLogin(BaseTestCase):
|
||||
def setUp(self):
|
||||
settings.PASSWORD_LOGIN_ENABLED = True
|
||||
super(TestLogin, self).setUp()
|
||||
|
||||
def test_redirects_to_google_login_if_password_disabled(self):
|
||||
with app.test_client() as c, patch.object(settings, 'PASSWORD_LOGIN_ENABLED', False):
|
||||
rv = c.get('/login')
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
self.assertTrue(rv.location.endswith(url_for('GoogleAuth.login')))
|
||||
|
||||
def test_get_login_form(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/login')
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
|
||||
def test_submit_non_existing_user(self):
|
||||
with app.test_client() as c, patch('redash.controllers.login_user') as login_user_mock:
|
||||
rv = c.post('/login', data={'username': 'arik', 'password': 'password'})
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertFalse(login_user_mock.called)
|
||||
|
||||
def test_submit_correct_user_and_password(self):
|
||||
user = user_factory.create()
|
||||
user.hash_password('password')
|
||||
user.save()
|
||||
|
||||
with app.test_client() as c, patch('redash.controllers.login_user') as login_user_mock:
|
||||
rv = c.post('/login', data={'username': user.email, 'password': 'password'})
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
login_user_mock.assert_called_with(user, remember=False)
|
||||
|
||||
def test_submit_correct_user_and_password_and_remember_me(self):
|
||||
user = user_factory.create()
|
||||
user.hash_password('password')
|
||||
user.save()
|
||||
|
||||
with app.test_client() as c, patch('redash.controllers.login_user') as login_user_mock:
|
||||
rv = c.post('/login', data={'username': user.email, 'password': 'password', 'remember': True})
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
login_user_mock.assert_called_with(user, remember=True)
|
||||
|
||||
def test_submit_correct_user_and_password_with_next(self):
|
||||
user = user_factory.create()
|
||||
user.hash_password('password')
|
||||
user.save()
|
||||
|
||||
with app.test_client() as c, patch('redash.controllers.login_user') as login_user_mock:
|
||||
rv = c.post('/login?next=/test',
|
||||
data={'username': user.email, 'password': 'password'})
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
self.assertEquals(rv.location, 'http://localhost/test')
|
||||
login_user_mock.assert_called_with(user, remember=False)
|
||||
|
||||
def test_submit_incorrect_user(self):
|
||||
with app.test_client() as c, patch('redash.controllers.login_user') as login_user_mock:
|
||||
rv = c.post('/login', data={'username': 'non-existing', 'password': 'password'})
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertFalse(login_user_mock.called)
|
||||
|
||||
def test_submit_incorrect_password(self):
|
||||
user = user_factory.create()
|
||||
user.hash_password('password')
|
||||
user.save()
|
||||
|
||||
with app.test_client() as c, patch('redash.controllers.login_user') as login_user_mock:
|
||||
rv = c.post('/login', data={'username': user.email, 'password': 'badbadpassword'})
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertFalse(login_user_mock.called)
|
||||
|
||||
def test_submit_incorrect_password(self):
|
||||
user = user_factory.create()
|
||||
|
||||
with app.test_client() as c, patch('redash.controllers.login_user') as login_user_mock:
|
||||
rv = c.post('/login', data={'username': user.email, 'password': ''})
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertFalse(login_user_mock.called)
|
||||
|
||||
def test_user_already_loggedin(self):
|
||||
with app.test_client() as c, authenticated_user(c), patch('redash.controllers.login_user') as login_user_mock:
|
||||
rv = c.get('/login')
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
self.assertFalse(login_user_mock.called)
|
||||
|
||||
# TODO: brute force protection?
|
||||
|
||||
|
||||
class TestLogout(BaseTestCase):
|
||||
def test_logout_when_not_loggedin(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/logout')
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
self.assertFalse(current_user.is_authenticated())
|
||||
|
||||
def test_logout_when_loggedin(self):
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = c.get('/')
|
||||
self.assertTrue(current_user.is_authenticated())
|
||||
rv = c.get('/logout')
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
self.assertFalse(current_user.is_authenticated())
|
||||
28
tests/test_models.py
Normal file
28
tests/test_models.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from tests import BaseTestCase
|
||||
from redash import models
|
||||
from factories import dashboard_factory, query_factory
|
||||
|
||||
|
||||
class DashboardTest(BaseTestCase):
|
||||
def test_appends_suffix_to_slug_when_duplicate(self):
|
||||
d1 = dashboard_factory.create()
|
||||
self.assertEquals(d1.slug, 'test')
|
||||
|
||||
d2 = dashboard_factory.create(user=d1.user)
|
||||
self.assertNotEquals(d1.slug, d2.slug)
|
||||
|
||||
d3 = dashboard_factory.create(user=d1.user)
|
||||
self.assertNotEquals(d1.slug, d3.slug)
|
||||
self.assertNotEquals(d2.slug, d3.slug)
|
||||
|
||||
|
||||
class QueryTest(BaseTestCase):
|
||||
def test_changing_query_text_changes_hash(self):
|
||||
q = query_factory.create()
|
||||
|
||||
old_hash = q.query_hash
|
||||
models.Query.update_instance(q.id, query="SELECT 2;")
|
||||
|
||||
q = models.Query.get_by_id(q.id)
|
||||
|
||||
self.assertNotEquals(old_hash, q.query_hash)
|
||||
27
tests/test_settings.py
Normal file
27
tests/test_settings.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from redash import settings as settings
|
||||
from unittest import TestCase
|
||||
|
||||
|
||||
class TestDatabaseUrlParser(TestCase):
|
||||
def test_only_database_name(self):
|
||||
config = settings.parse_db_url("postgresql://postgres")
|
||||
self.assertEquals(config['name'], 'postgres')
|
||||
|
||||
def test_host_and_database_name(self):
|
||||
config = settings.parse_db_url("postgresql://localhost/postgres")
|
||||
self.assertEquals(config['name'], 'postgres')
|
||||
self.assertEquals(config['host'], 'localhost')
|
||||
|
||||
def test_host_with_port_and_database_name(self):
|
||||
config = settings.parse_db_url("postgresql://localhost:5432/postgres")
|
||||
self.assertEquals(config['name'], 'postgres')
|
||||
self.assertEquals(config['host'], 'localhost')
|
||||
self.assertEquals(config['port'], 5432)
|
||||
|
||||
def test_full_url(self):
|
||||
config = settings.parse_db_url("postgresql://user:pass@localhost:5432/postgres")
|
||||
self.assertEquals(config['name'], 'postgres')
|
||||
self.assertEquals(config['host'], 'localhost')
|
||||
self.assertEquals(config['port'], 5432)
|
||||
self.assertEquals(config['user'], 'user')
|
||||
self.assertEquals(config['password'], 'pass')
|
||||
Reference in New Issue
Block a user