mirror of
https://github.com/getredash/redash.git
synced 2025-12-25 01:03:20 -05:00
Compare commits
138 Commits
v0.1.31
...
v0.3.2+b10
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c82433e6b4 | ||
|
|
2e84852519 | ||
|
|
da746d15a0 | ||
|
|
1b519269d8 | ||
|
|
5ffaf1aead | ||
|
|
b704406164 | ||
|
|
5c9fe40702 | ||
|
|
fe7c4f96aa | ||
|
|
83909a07fa | ||
|
|
cd99927881 | ||
|
|
8bbb485d5b | ||
|
|
b2ec77668e | ||
|
|
f8302ab65a | ||
|
|
e632cf1c42 | ||
|
|
640557df4f | ||
|
|
9b7227a88b | ||
|
|
aabc912862 | ||
|
|
02d6567347 | ||
|
|
6f8767d1fc | ||
|
|
bc787efc86 | ||
|
|
e0d46c3942 | ||
|
|
5a2bed29aa | ||
|
|
8fbcd0c34d | ||
|
|
97df37536c | ||
|
|
373b9c6a97 | ||
|
|
009726c62d | ||
|
|
69c07a41e9 | ||
|
|
64afd62a1f | ||
|
|
4318468957 | ||
|
|
1af3fc1c96 | ||
|
|
1e11f8032a | ||
|
|
a1a7ca8a0a | ||
|
|
52758fa66e | ||
|
|
fa43ff1365 | ||
|
|
bd15162fb7 | ||
|
|
cc980edc66 | ||
|
|
7fd094ba39 | ||
|
|
68ef489d8c | ||
|
|
21ff1d7482 | ||
|
|
669b1d9a63 | ||
|
|
29531a361c | ||
|
|
c40cf2e7e8 | ||
|
|
7bf391e772 | ||
|
|
fbb84af955 | ||
|
|
d954eb63ef | ||
|
|
1b14161535 | ||
|
|
bcf854604b | ||
|
|
f265d9174a | ||
|
|
970e0e2d04 | ||
|
|
9055865e1c | ||
|
|
f9b6aca8e8 | ||
|
|
d084b5a03c | ||
|
|
a6ab0ff2aa | ||
|
|
1bce924d83 | ||
|
|
f571e8ac6e | ||
|
|
27bf2e642b | ||
|
|
d4ca903a07 | ||
|
|
0f8bbdc9f2 | ||
|
|
fb9f814b00 | ||
|
|
b4f88196dc | ||
|
|
78e748548c | ||
|
|
199cddfbdb | ||
|
|
c0ca602017 | ||
|
|
3471b9853e | ||
|
|
6765d7d89f | ||
|
|
250aa17e63 | ||
|
|
2942d20ac3 | ||
|
|
d32799b2dc | ||
|
|
ff62fbbcf4 | ||
|
|
69ec362a8d | ||
|
|
41d00543d0 | ||
|
|
f890e590e1 | ||
|
|
2aec982577 | ||
|
|
b66d5daad0 | ||
|
|
6ff07b99dc | ||
|
|
1586860e15 | ||
|
|
99dac8f6fd | ||
|
|
5fb910b886 | ||
|
|
fb826ec838 | ||
|
|
5198cc17d3 | ||
|
|
261ecfcb11 | ||
|
|
6582bce0d3 | ||
|
|
db91ca82c1 | ||
|
|
cb7fbc16b0 | ||
|
|
c6c639f16f | ||
|
|
cb5968bc5f | ||
|
|
693b25efc5 | ||
|
|
6eddaeda61 | ||
|
|
349bfa9139 | ||
|
|
b0f75678ee | ||
|
|
0a0f7d7365 | ||
|
|
6d1ff98bda | ||
|
|
065324d256 | ||
|
|
69f7c3417e | ||
|
|
806f57c627 | ||
|
|
e4c7844cae | ||
|
|
6ebfa16740 | ||
|
|
43cfdb8727 | ||
|
|
b31c5be70e | ||
|
|
d84d047470 | ||
|
|
42a0659012 | ||
|
|
6386f0f9aa | ||
|
|
9aaf17d478 | ||
|
|
1f908f9040 | ||
|
|
b51ef059f5 | ||
|
|
a9e135c94f | ||
|
|
212ade2da7 | ||
|
|
f939bf6108 | ||
|
|
3360cd934b | ||
|
|
f35a0970ac | ||
|
|
97ca722a11 | ||
|
|
e554c9bdd7 | ||
|
|
567a732e1e | ||
|
|
5b532d03a0 | ||
|
|
cd838e5a7e | ||
|
|
bb096be00c | ||
|
|
7b78bfe191 | ||
|
|
a45ba0bf30 | ||
|
|
5ce3699a58 | ||
|
|
1cd836ac8d | ||
|
|
c83705119d | ||
|
|
fdd2cfe1d1 | ||
|
|
8327baa2f6 | ||
|
|
84df2fb85c | ||
|
|
cab6f9e58d | ||
|
|
d2ace5c1cf | ||
|
|
5eddddb7b5 | ||
|
|
6408b9e5e1 | ||
|
|
b0159c8246 | ||
|
|
b056e49ec5 | ||
|
|
fef5c287d7 | ||
|
|
09c65ee9dc | ||
|
|
a2385a1779 | ||
|
|
95529ce8f0 | ||
|
|
1a6e5b425a | ||
|
|
87e0962c5a | ||
|
|
1625149221 | ||
|
|
2b13ef1063 |
5
.coveragerc
Normal file
5
.coveragerc
Normal file
@@ -0,0 +1,5 @@
|
||||
[report]
|
||||
omit =
|
||||
*/settings.py
|
||||
*/python?.?/*
|
||||
*/site-packages/nose/*
|
||||
9
.env.example
Normal file
9
.env.example
Normal file
@@ -0,0 +1,9 @@
|
||||
REDASH_CONNECTION_ADAPTER=pg
|
||||
REDASH_CONNECTION_STRING="dbname=data"
|
||||
REDASH_STATIC_ASSETS_PATH=../rd_ui/app/
|
||||
REDASH_GOOGLE_APPS_DOMAIN=
|
||||
REDASH_ADMINS=
|
||||
REDASH_WORKERS_COUNT=2
|
||||
REDASH_COOKIE_SECRET=
|
||||
REDASH_DATABASE_URL='postgresql://rd'
|
||||
REDASH_LOG_LEVEL = "INFO"
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -1,10 +1,12 @@
|
||||
.coveralls.yml
|
||||
.idea
|
||||
*.pyc
|
||||
rd_service/settings.py
|
||||
.coverage
|
||||
rd_ui/dist
|
||||
.DS_Store
|
||||
|
||||
# Vagrant related
|
||||
.vagrant
|
||||
Berksfile.lock
|
||||
rd_service/dump.rdb
|
||||
redash/dump.rdb
|
||||
.env
|
||||
|
||||
2
Honchofile
Normal file
2
Honchofile
Normal file
@@ -0,0 +1,2 @@
|
||||
web: ./manage.py runserver -p $PORT
|
||||
worker: ./manage.py runworkers
|
||||
9
Makefile
9
Makefile
@@ -1,6 +1,6 @@
|
||||
NAME=redash
|
||||
VERSION=0.1
|
||||
FULL_VERSION=$(VERSION).$(CIRCLE_BUILD_NUM)
|
||||
VERSION=`python ./manage.py version`
|
||||
FULL_VERSION=$(VERSION)+b$(CIRCLE_BUILD_NUM)
|
||||
FILENAME=$(CIRCLE_ARTIFACTS)/$(NAME).$(FULL_VERSION).tar.gz
|
||||
|
||||
deps:
|
||||
@@ -10,7 +10,10 @@ deps:
|
||||
cd rd_ui && grunt build
|
||||
|
||||
pack:
|
||||
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
|
||||
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
|
||||
|
||||
upload:
|
||||
python bin/upload_version.py $(FULL_VERSION) $(FILENAME)
|
||||
|
||||
test:
|
||||
nosetests --with-coverage --cover-package=redash tests/*.py
|
||||
|
||||
65
README.md
65
README.md
@@ -1,4 +1,5 @@
|
||||
# [_re:dash_](https://github.com/everythingme/redash)
|
||||

|
||||
|
||||
**_re:dash_** is our take on freeing the data within our company in a way that will better fit our culture and usage patterns.
|
||||
|
||||
@@ -21,10 +22,15 @@ You can try out the demo instance: http://rd-demo.herokuapp.com/ (login with any
|
||||
|
||||
Due to Heroku dev plan limits, it has a small database of flights (see schema [here](http://rd-demo.herokuapp.com/dashboard/schema)). Also due to another Heroku limitation, it is running with the regular user, hence you can DELETE or INSERT data/tables. Please be nice and don't do this.
|
||||
|
||||
## Getting help
|
||||
|
||||
* [Google Group (mailing list)](https://groups.google.com/forum/#!forum/redash-users): the best place to get updates about new releases or ask general questions.
|
||||
* #redash IRC channel on [Freenode](http://www.freenode.net/).
|
||||
|
||||
## Technology
|
||||
|
||||
* Python
|
||||
* [AngularJS](http://angularjs.org/)
|
||||
* [Tornado](http://tornadoweb.org)
|
||||
* [PostgreSQL](http://www.postgresql.org/) / [AWS Redshift](http://aws.amazon.com/redshift/)
|
||||
* [Redis](http://redis.io)
|
||||
|
||||
@@ -40,62 +46,33 @@ It's very likely that in the future we will switch to [D3.js](http://d3js.org/)
|
||||
|
||||
## Getting Started
|
||||
|
||||
1. Clone the repo:
|
||||
```bash
|
||||
git clone git@github.com:EverythingMe/redash.git
|
||||
```
|
||||
2. Create settings file from the example one (& update relevant settings):
|
||||
```bash
|
||||
cp rd_service/settings_example.py rd_service/settings.py
|
||||
```
|
||||
It's highly recommended that the user you use to connect to the data database (the one you query) is read-only.
|
||||
3. Create the operational databases from rd_service/data/tables.sql
|
||||
3. Install `npm` packages (mainly: Bower & Grunt):
|
||||
```bash
|
||||
cd rd_ui
|
||||
npm install
|
||||
```
|
||||
4. Install `bower` packages:
|
||||
```bash
|
||||
bower install
|
||||
```
|
||||
5. Build the UI:
|
||||
```bash
|
||||
grunt build
|
||||
```
|
||||
6. Install PIP packages:
|
||||
```bash
|
||||
pip install -r ../rd_service/requirements.txt
|
||||
```
|
||||
6. Start the API server:
|
||||
```bash
|
||||
cd ../rd_service
|
||||
python server.py
|
||||
```
|
||||
7. Start the workers:
|
||||
```bash
|
||||
python cli.py worker
|
||||
```
|
||||
8. Open `http://localhost:8888/` and query away.
|
||||
1. Download the [latest release](https://github.com/everythingme/redash/releases).
|
||||
2. Make sure you have `Python` v2.7, `pip`, PostgreSQL and Redis installed.
|
||||
3. Install Python requirements: `pip install -r requirements.txt`.
|
||||
4. Make a copy of the examples settings file: `cp redash/settings_example.py redash/settings.py` and edit the relevant settings.
|
||||
5. Create database: `./manage.py database create_tables`.
|
||||
6. Start the web server: `./manage.py runserver`.
|
||||
7. Start the worker: `./manage.py runworker`.
|
||||
8. Open `http://localhost:5000/` and query away.
|
||||
|
||||
**Need help setting re:dash or one of the dependencies up?** Ping @arikfr on the IRC #redash channel or send a message to the [mailing list](https://groups.google.com/forum/#!forum/redash-users), and he will gladly help.
|
||||
|
||||
## Roadmap
|
||||
|
||||
We plan to release new minor version every 2-3 weeks. Of course, if we get additional help from contributors it will help speed things up.
|
||||
|
||||
Below you can see the "big" features of the next 3 releases (for full list, click on the link):
|
||||
|
||||
### [v0.2](https://github.com/EverythingMe/redash/issues?milestone=1&state=open)
|
||||
|
||||
- Ability to generate multiple visualizations for a single query (dataset) in a more flexible way than today. Also easier extensbility points to add additional visualizations.
|
||||
- Dashboard filters: ability to filter/slice the data you see in a single dashboard using filters (date or selectors).
|
||||
- UI Improvements (better notifications & flows, improved queries page)
|
||||
- Comments on queries.
|
||||
- Support for API access using API keys, instead of Google Login.
|
||||
- UI Improvements (better notifications & flows, improved queries page)
|
||||
|
||||
### [v0.3](https://github.com/EverythingMe/redash/issues?milestone=2&state=open)
|
||||
|
||||
- Support for API access using API keys, instead of Google Login.
|
||||
- Dashboard filters: ability to filter/slice the data you see in a single dashboard using filters (date or selectors).
|
||||
- Multiple databases support (including other database type than PostgreSQL).
|
||||
- Scheduled reports by email.
|
||||
- Comments on queries.
|
||||
|
||||
### [v0.4](https://github.com/EverythingMe/redash/issues?milestone=3&state=open)
|
||||
|
||||
|
||||
@@ -15,7 +15,8 @@ if __name__ == '__main__':
|
||||
params = json.dumps({
|
||||
'tag_name': 'v{0}'.format(version),
|
||||
'name': 're:dash v{0}'.format(version),
|
||||
'target_commitish': commit_sha
|
||||
'target_commitish': commit_sha,
|
||||
'prerelease': True
|
||||
})
|
||||
|
||||
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
|
||||
|
||||
@@ -8,8 +8,14 @@ machine:
|
||||
dependencies:
|
||||
pre:
|
||||
- make deps
|
||||
- pip install requests
|
||||
- pip install requests coverage nose
|
||||
- pip install -r requirements.txt
|
||||
cache_directories:
|
||||
- rd_ui/node_modules/
|
||||
- rd_ui/app/bower_components/
|
||||
test:
|
||||
override:
|
||||
- make test
|
||||
post:
|
||||
- make pack
|
||||
deployment:
|
||||
|
||||
2
dev_requirements.txt
Normal file
2
dev_requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
nose==1.3.0
|
||||
coverage==3.7.1
|
||||
60
rd_service/cli.py → manage.py
Normal file → Executable file
60
rd_service/cli.py → manage.py
Normal file → Executable file
@@ -1,30 +1,37 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
CLI to start the workers.
|
||||
|
||||
TODO: move API server startup here.
|
||||
CLI to manage redash.
|
||||
"""
|
||||
import atfork
|
||||
atfork.monkeypatch_os_fork_functions()
|
||||
import atfork.stdlib_fixer
|
||||
atfork.stdlib_fixer.fix_logging_module()
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import urlparse
|
||||
import redis
|
||||
import time
|
||||
import settings
|
||||
import data
|
||||
from redash import settings, app, db, models, data_manager, __version__
|
||||
from flask.ext.script import Manager
|
||||
|
||||
manager = Manager(app)
|
||||
database_manager = Manager(help="Manages the database (create/drop tables).")
|
||||
|
||||
@manager.command
|
||||
def version():
|
||||
"""Displays re:dash version."""
|
||||
print __version__
|
||||
|
||||
|
||||
def start_workers(data_manager):
|
||||
@manager.command
|
||||
def runworkers():
|
||||
"""Starts the re:dash query executors/workers."""
|
||||
|
||||
try:
|
||||
old_workers = data_manager.redis_connection.smembers('workers')
|
||||
data_manager.redis_connection.delete('workers')
|
||||
|
||||
logging.info("Cleaning old workers: %s", old_workers)
|
||||
|
||||
data_manager.start_workers(settings.WORKERS_COUNT, settings.CONNECTION_STRING)
|
||||
data_manager.start_workers(settings.WORKERS_COUNT, settings.CONNECTION_ADAPTER, settings.CONNECTION_STRING)
|
||||
logging.info("Workers started.")
|
||||
|
||||
while True:
|
||||
@@ -38,22 +45,29 @@ def start_workers(data_manager):
|
||||
logging.warning("Exiting; waiting for threads")
|
||||
data_manager.stop_workers()
|
||||
|
||||
@manager.shell
|
||||
def make_shell_context():
|
||||
return dict(app=app, db=db, models=models)
|
||||
|
||||
@database_manager.command
|
||||
def create_tables():
|
||||
"""Creates the database tables."""
|
||||
from redash.models import create_db
|
||||
|
||||
create_db(True, False)
|
||||
|
||||
@database_manager.command
|
||||
def drop_tables():
|
||||
"""Drop the database tables."""
|
||||
from redash.models import create_db
|
||||
|
||||
create_db(False, True)
|
||||
|
||||
manager.add_command("database", database_manager)
|
||||
|
||||
if __name__ == '__main__':
|
||||
channel = logging.StreamHandler()
|
||||
logging.getLogger().addHandler(channel)
|
||||
logging.getLogger().setLevel(settings.LOG_LEVEL)
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("command")
|
||||
args = parser.parse_args()
|
||||
|
||||
url = urlparse.urlparse(settings.REDIS_URL)
|
||||
redis_connection = redis.StrictRedis(host=url.hostname, port=url.port, db=0, password=url.password)
|
||||
data_manager = data.Manager(redis_connection, settings.INTERNAL_DB_CONNECTION_STRING, settings.MAX_CONNECTIONS)
|
||||
|
||||
if args.command == "worker":
|
||||
start_workers(data_manager)
|
||||
else:
|
||||
print "Unknown command"
|
||||
|
||||
manager.run()
|
||||
13
migrations/add_created_at_field.py
Normal file
13
migrations/add_created_at_field.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db
|
||||
from redash import models
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = Migrator(db.database)
|
||||
with db.database.transaction():
|
||||
migrator.add_column(models.Dashboard, models.Dashboard.created_at, 'created_at')
|
||||
migrator.add_column(models.Widget, models.Widget.created_at, 'created_at')
|
||||
|
||||
db.close_db(None)
|
||||
13
migrations/change_query_id_on_widgets_to_null.py
Normal file
13
migrations/change_query_id_on_widgets_to_null.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db
|
||||
from redash import models
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = Migrator(db.database)
|
||||
with db.database.transaction():
|
||||
migrator.set_nullable(models.Widget, models.Widget.query_id, True)
|
||||
migrator.set_nullable(models.Widget, models.Widget.type, True)
|
||||
|
||||
db.close_db(None)
|
||||
70
migrations/create_visualizations.py
Normal file
70
migrations/create_visualizations.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import json
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
default_options = {"series": {"type": "column"}}
|
||||
|
||||
db.connect_db()
|
||||
|
||||
if not models.Visualization.table_exists():
|
||||
print "Creating visualization table..."
|
||||
models.Visualization.create_table()
|
||||
|
||||
with db.database.transaction():
|
||||
migrator = Migrator(db.database)
|
||||
print "Adding visualization_id to widgets:"
|
||||
field = models.Widget.visualization
|
||||
field.null = True
|
||||
migrator.add_column(models.Widget, models.Widget.visualization, 'visualization_id')
|
||||
|
||||
print 'Creating TABLE visualizations for all queries...'
|
||||
for query in models.Query.select():
|
||||
vis = models.Visualization(query=query, name="Table",
|
||||
description=query.description or "",
|
||||
type="TABLE", options="{}")
|
||||
vis.save()
|
||||
|
||||
print 'Creating COHORT visualizations for all queries named like %cohort%...'
|
||||
for query in models.Query.select().where(models.Query.name ** "%cohort%"):
|
||||
vis = models.Visualization(query=query, name="Cohort",
|
||||
description=query.description or "",
|
||||
type="COHORT", options="{}")
|
||||
vis.save()
|
||||
|
||||
print 'Create visualization for all widgets (unless exists already):'
|
||||
for widget in models.Widget.select():
|
||||
print 'Processing widget id: %d:' % widget.id
|
||||
vis_type = widget.type.upper()
|
||||
if vis_type == 'GRID':
|
||||
vis_type = 'TABLE'
|
||||
|
||||
query = models.Query.get_by_id(widget.query_id)
|
||||
vis = query.visualizations.where(models.Visualization.type == vis_type).first()
|
||||
if vis:
|
||||
print '... visualization type (%s) found.' % vis_type
|
||||
widget.visualization = vis
|
||||
widget.save()
|
||||
else:
|
||||
vis_name = vis_type.title()
|
||||
|
||||
options = json.loads(widget.options)
|
||||
vis_options = {"series": options} if options else default_options
|
||||
vis_options = json.dumps(vis_options)
|
||||
|
||||
vis = models.Visualization(query=query, name=vis_name,
|
||||
description=query.description or "",
|
||||
type=vis_type, options=vis_options)
|
||||
|
||||
print '... Created visualization for type: %s' % vis_type
|
||||
vis.save()
|
||||
widget.visualization = vis
|
||||
widget.save()
|
||||
|
||||
with db.database.transaction():
|
||||
migrator = Migrator(db.database)
|
||||
print "Setting visualization_id as not null..."
|
||||
migrator.set_nullable(models.Widget, models.Widget.visualization, False)
|
||||
|
||||
db.close_db(None)
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1,174 +0,0 @@
|
||||
"""
|
||||
Django ORM based models to describe the data model of re:dash.
|
||||
"""
|
||||
import hashlib
|
||||
import json
|
||||
import time
|
||||
from django.db import models
|
||||
from django.template.defaultfilters import slugify
|
||||
import utils
|
||||
|
||||
|
||||
class QueryResult(models.Model):
|
||||
id = models.AutoField(primary_key=True)
|
||||
query_hash = models.CharField(max_length=32)
|
||||
query = models.TextField()
|
||||
data = models.TextField()
|
||||
runtime = models.FloatField()
|
||||
retrieved_at = models.DateTimeField()
|
||||
|
||||
class Meta:
|
||||
app_label = 'redash'
|
||||
db_table = 'query_results'
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'query_hash': self.query_hash,
|
||||
'query': self.query,
|
||||
'data': json.loads(self.data),
|
||||
'runtime': self.runtime,
|
||||
'retrieved_at': self.retrieved_at
|
||||
}
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at)
|
||||
|
||||
|
||||
class Query(models.Model):
|
||||
id = models.AutoField(primary_key=True)
|
||||
latest_query_data = models.ForeignKey(QueryResult)
|
||||
name = models.CharField(max_length=255)
|
||||
description = models.CharField(max_length=4096)
|
||||
query = models.TextField()
|
||||
query_hash = models.CharField(max_length=32)
|
||||
api_key = models.CharField(max_length=40)
|
||||
ttl = models.IntegerField()
|
||||
user = models.CharField(max_length=360)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
app_label = 'redash'
|
||||
db_table = 'queries'
|
||||
|
||||
def to_dict(self, with_result=True, with_stats=False):
|
||||
d = {
|
||||
'id': self.id,
|
||||
'latest_query_data_id': self.latest_query_data_id,
|
||||
'name': self.name,
|
||||
'description': self.description,
|
||||
'query': self.query,
|
||||
'query_hash': self.query_hash,
|
||||
'ttl': self.ttl,
|
||||
'user': self.user,
|
||||
'api_key': self.api_key,
|
||||
'created_at': self.created_at,
|
||||
}
|
||||
|
||||
if with_stats:
|
||||
d['avg_runtime'] = self.avg_runtime
|
||||
d['min_runtime'] = self.min_runtime
|
||||
d['max_runtime'] = self.max_runtime
|
||||
d['last_retrieved_at'] = self.last_retrieved_at
|
||||
d['times_retrieved'] = self.times_retrieved
|
||||
|
||||
if with_result and self.latest_query_data_id:
|
||||
d['latest_query_data'] = self.latest_query_data.to_dict()
|
||||
|
||||
return d
|
||||
|
||||
@classmethod
|
||||
def all_queries(cls):
|
||||
query = """SELECT queries.*, query_stats.*
|
||||
FROM queries
|
||||
LEFT OUTER JOIN
|
||||
(SELECT qu.query_hash,
|
||||
count(0) AS "times_retrieved",
|
||||
avg(runtime) AS "avg_runtime",
|
||||
min(runtime) AS "min_runtime",
|
||||
max(runtime) AS "max_runtime",
|
||||
max(retrieved_at) AS "last_retrieved_at"
|
||||
FROM queries qu
|
||||
JOIN query_results qr ON qu.query_hash=qr.query_hash
|
||||
GROUP BY qu.query_hash) query_stats ON query_stats.query_hash = queries.query_hash
|
||||
"""
|
||||
return cls.objects.raw(query)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.query_hash = utils.gen_query_hash(self.query)
|
||||
self._set_api_key()
|
||||
super(Query, self).save(*args, **kwargs)
|
||||
|
||||
def _set_api_key(self):
|
||||
if not self.api_key:
|
||||
self.api_key = hashlib.sha1(
|
||||
u''.join([str(time.time()), self.query, self.user, self.name])).hexdigest()
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.id)
|
||||
|
||||
|
||||
class Dashboard(models.Model):
|
||||
id = models.AutoField(primary_key=True)
|
||||
slug = models.CharField(max_length=140)
|
||||
name = models.CharField(max_length=100)
|
||||
user = models.CharField(max_length=360)
|
||||
layout = models.TextField()
|
||||
is_archived = models.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
app_label = 'redash'
|
||||
db_table = 'dashboards'
|
||||
|
||||
def to_dict(self, with_widgets=False):
|
||||
layout = json.loads(self.layout)
|
||||
|
||||
if with_widgets:
|
||||
widgets = {w.id: w.to_dict() for w in self.widgets.all()}
|
||||
widgets_layout = map(lambda row: map(lambda widget_id: widgets.get(widget_id, None), row), layout)
|
||||
else:
|
||||
widgets_layout = None
|
||||
|
||||
return {
|
||||
'id': self.id,
|
||||
'slug': self.slug,
|
||||
'name': self.name,
|
||||
'user': self.user,
|
||||
'layout': layout,
|
||||
'widgets': widgets_layout
|
||||
}
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
# TODO: make sure slug is unique
|
||||
if not self.slug:
|
||||
self.slug = slugify(self.name)
|
||||
super(Dashboard, self).save(*args, **kwargs)
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s=%s" % (self.id, self.name)
|
||||
|
||||
|
||||
class Widget(models.Model):
|
||||
id = models.AutoField(primary_key=True)
|
||||
query = models.ForeignKey(Query)
|
||||
type = models.CharField(max_length=100)
|
||||
width = models.IntegerField()
|
||||
options = models.TextField()
|
||||
dashboard = models.ForeignKey(Dashboard, related_name='widgets')
|
||||
|
||||
class Meta:
|
||||
app_label = 'redash'
|
||||
db_table = 'widgets'
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'query': self.query.to_dict(),
|
||||
'type': self.type,
|
||||
'width': self.width,
|
||||
'options': json.loads(self.options),
|
||||
'dashboard_id': self.dashboard_id
|
||||
}
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s=>%s" % (self.id, self.dashboard_id)
|
||||
@@ -1,46 +0,0 @@
|
||||
BEGIN;
|
||||
CREATE TABLE "query_results" (
|
||||
"id" serial NOT NULL PRIMARY KEY,
|
||||
"query_hash" varchar(32) NOT NULL,
|
||||
"query" text NOT NULL,
|
||||
"data" text NOT NULL,
|
||||
"runtime" double precision NOT NULL,
|
||||
"retrieved_at" timestamp with time zone NOT NULL
|
||||
)
|
||||
;
|
||||
CREATE TABLE "queries" (
|
||||
"id" serial NOT NULL PRIMARY KEY,
|
||||
"latest_query_data_id" integer REFERENCES "query_results" ("id") DEFERRABLE INITIALLY DEFERRED,
|
||||
"name" varchar(255) NOT NULL,
|
||||
"description" varchar(4096),
|
||||
"query" text NOT NULL,
|
||||
"query_hash" varchar(32) NOT NULL,
|
||||
"api_key" varchar(40),
|
||||
"ttl" integer NOT NULL,
|
||||
"user" varchar(360) NOT NULL,
|
||||
"created_at" timestamp with time zone NOT NULL
|
||||
)
|
||||
;
|
||||
CREATE TABLE "dashboards" (
|
||||
"id" serial NOT NULL PRIMARY KEY,
|
||||
"slug" varchar(140) NOT NULL,
|
||||
"name" varchar(100) NOT NULL,
|
||||
"user" varchar(360) NOT NULL,
|
||||
"layout" text NOT NULL,
|
||||
"is_archived" boolean NOT NULL
|
||||
)
|
||||
;
|
||||
CREATE TABLE "widgets" (
|
||||
"id" serial NOT NULL PRIMARY KEY,
|
||||
"query_id" integer NOT NULL REFERENCES "queries" ("id") DEFERRABLE INITIALLY DEFERRED,
|
||||
"type" varchar(100) NOT NULL,
|
||||
"width" integer NOT NULL,
|
||||
"options" text NOT NULL,
|
||||
"dashboard_id" integer NOT NULL REFERENCES "dashboards" ("id") DEFERRABLE INITIALLY DEFERRED
|
||||
)
|
||||
;
|
||||
CREATE INDEX "queries_latest_query_data_id" ON "queries" ("latest_query_data_id");
|
||||
CREATE INDEX "widgets_query_id" ON "widgets" ("query_id");
|
||||
CREATE INDEX "widgets_dashboard_id" ON "widgets" ("dashboard_id");
|
||||
|
||||
COMMIT;
|
||||
@@ -1,10 +0,0 @@
|
||||
psycopg2==2.5.1
|
||||
redis==2.7.5
|
||||
tornado==3.0.2
|
||||
sqlparse==0.1.8
|
||||
Django==1.5.4
|
||||
django-db-pool==0.0.10
|
||||
qr==0.6.0
|
||||
python-dateutil==2.1
|
||||
setproctitle==1.1.8
|
||||
atfork==0.1.2
|
||||
@@ -1,355 +0,0 @@
|
||||
"""
|
||||
Tornado based API implementation for re:dash.
|
||||
|
||||
Also at the moment the Tornado server is used to serve the static assets (and the Angular.js app),
|
||||
but this is only due to configuration issues and temporary.
|
||||
|
||||
Usage:
|
||||
python server.py [--port=8888] [--debug] [--static=..]
|
||||
|
||||
port - port to listen to
|
||||
debug - enable debug mode (extensive logging, restart on code change)
|
||||
static - static assets path
|
||||
|
||||
If static option isn't specified it will be taken from settings.py.
|
||||
"""
|
||||
import csv
|
||||
import hashlib
|
||||
import json
|
||||
import numbers
|
||||
import os
|
||||
import urlparse
|
||||
import logging
|
||||
import cStringIO
|
||||
import datetime
|
||||
import dateutil.parser
|
||||
import redis
|
||||
import sqlparse
|
||||
import tornado.ioloop
|
||||
import tornado.web
|
||||
import tornado.auth
|
||||
import tornado.options
|
||||
import settings
|
||||
import time
|
||||
from data import utils
|
||||
import data
|
||||
|
||||
|
||||
class BaseHandler(tornado.web.RequestHandler):
|
||||
def initialize(self):
|
||||
self.data_manager = self.application.settings.get('data_manager', None)
|
||||
self.redis_connection = self.application.settings['redis_connection']
|
||||
|
||||
def get_current_user(self):
|
||||
user = self.get_secure_cookie("user")
|
||||
return user
|
||||
|
||||
def write_json(self, response, encode=True):
|
||||
if encode:
|
||||
response = json.dumps(response, cls=utils.JSONEncoder)
|
||||
self.set_header("Content-Type", "application/json; charset=UTF-8")
|
||||
self.write(response)
|
||||
|
||||
|
||||
class BaseAuthenticatedHandler(BaseHandler):
|
||||
@tornado.web.authenticated
|
||||
def prepare(self):
|
||||
pass
|
||||
|
||||
|
||||
class PingHandler(tornado.web.RequestHandler):
|
||||
def get(self):
|
||||
self.write("PONG")
|
||||
|
||||
|
||||
class GoogleLoginHandler(tornado.web.RequestHandler,
|
||||
tornado.auth.GoogleMixin):
|
||||
@tornado.web.asynchronous
|
||||
@tornado.gen.coroutine
|
||||
def get(self):
|
||||
if self.get_argument("openid.mode", None):
|
||||
user = yield self.get_authenticated_user()
|
||||
|
||||
if user['email'] in settings.ALLOWED_USERS or user['email'].endswith("@%s" % settings.GOOGLE_APPS_DOMAIN):
|
||||
logging.info("Authenticated: %s", user['email'])
|
||||
self.set_secure_cookie("user", user['email'])
|
||||
self.redirect("/")
|
||||
else:
|
||||
logging.error("Failed logging in with: %s", user)
|
||||
self.authenticate_redirect()
|
||||
else:
|
||||
self.authenticate_redirect()
|
||||
|
||||
|
||||
class MainHandler(BaseAuthenticatedHandler):
|
||||
def get(self, *args):
|
||||
email_md5 = hashlib.md5(self.current_user.lower()).hexdigest()
|
||||
gravatar_url = "https://www.gravatar.com/avatar/%s?s=40" % email_md5
|
||||
|
||||
user = {
|
||||
'gravatar_url': gravatar_url,
|
||||
'is_admin': self.current_user in settings.ADMINS,
|
||||
'name': self.current_user
|
||||
}
|
||||
|
||||
self.render("index.html", user=json.dumps(user), analytics=settings.ANALYTICS)
|
||||
|
||||
|
||||
class QueryFormatHandler(BaseAuthenticatedHandler):
|
||||
def post(self):
|
||||
arguments = json.loads(self.request.body)
|
||||
query = arguments.get("query", "")
|
||||
|
||||
self.write(sqlparse.format(query, reindent=True, keyword_case='upper'))
|
||||
|
||||
|
||||
class StatusHandler(BaseAuthenticatedHandler):
|
||||
def get(self):
|
||||
status = {}
|
||||
info = self.redis_connection.info()
|
||||
status['redis_used_memory'] = info['used_memory_human']
|
||||
|
||||
status['queries_count'] = data.models.Query.objects.count()
|
||||
status['query_results_count'] = data.models.QueryResult.objects.count()
|
||||
status['dashboards_count'] = data.models.Dashboard.objects.count()
|
||||
status['widgets_count'] = data.models.Widget.objects.count()
|
||||
|
||||
status['workers'] = [self.redis_connection.hgetall(w)
|
||||
for w in self.redis_connection.smembers('workers')]
|
||||
|
||||
manager_status = self.redis_connection.hgetall('manager:status')
|
||||
status['manager'] = manager_status
|
||||
status['manager']['queue_size'] = self.redis_connection.zcard('jobs')
|
||||
|
||||
self.write_json(status)
|
||||
|
||||
|
||||
class WidgetsHandler(BaseAuthenticatedHandler):
|
||||
def post(self, widget_id=None):
|
||||
widget_properties = json.loads(self.request.body)
|
||||
widget_properties['options'] = json.dumps(widget_properties['options'])
|
||||
widget = data.models.Widget(**widget_properties)
|
||||
widget.save()
|
||||
|
||||
layout = json.loads(widget.dashboard.layout)
|
||||
new_row = True
|
||||
|
||||
if len(layout) == 0 or widget.width == 2:
|
||||
layout.append([widget.id])
|
||||
elif len(layout[-1]) == 1:
|
||||
neighbour_widget = data.models.Widget.objects.get(pk=layout[-1][0])
|
||||
if neighbour_widget.width == 1:
|
||||
layout[-1].append(widget.id)
|
||||
new_row = False
|
||||
else:
|
||||
layout.append([widget.id])
|
||||
else:
|
||||
layout.append([widget.id])
|
||||
|
||||
widget.dashboard.layout = json.dumps(layout)
|
||||
widget.dashboard.save()
|
||||
|
||||
self.write_json({'widget': widget.to_dict(), 'layout': layout, 'new_row': new_row})
|
||||
|
||||
def delete(self, widget_id):
|
||||
widget_id = int(widget_id)
|
||||
widget = data.models.Widget.objects.get(pk=widget_id)
|
||||
# TODO: reposition existing ones
|
||||
layout = json.loads(widget.dashboard.layout)
|
||||
layout = map(lambda row: filter(lambda w: w != widget_id, row), layout)
|
||||
layout = filter(lambda row: len(row) > 0, layout)
|
||||
widget.dashboard.layout = json.dumps(layout)
|
||||
widget.dashboard.save()
|
||||
|
||||
widget.delete()
|
||||
|
||||
|
||||
class DashboardHandler(BaseAuthenticatedHandler):
|
||||
def get(self, dashboard_slug=None):
|
||||
if dashboard_slug:
|
||||
dashboard = data.models.Dashboard.objects.prefetch_related('widgets__query__latest_query_data').get(slug=dashboard_slug)
|
||||
self.write_json(dashboard.to_dict(with_widgets=True))
|
||||
else:
|
||||
dashboards = [d.to_dict() for d in
|
||||
data.models.Dashboard.objects.filter(is_archived=False)]
|
||||
self.write_json(dashboards)
|
||||
|
||||
def post(self, dashboard_id):
|
||||
if dashboard_id:
|
||||
dashboard_properties = json.loads(self.request.body)
|
||||
dashboard = data.models.Dashboard.objects.get(pk=dashboard_id)
|
||||
dashboard.layout = dashboard_properties['layout']
|
||||
dashboard.name = dashboard_properties['name']
|
||||
dashboard.save()
|
||||
|
||||
self.write_json(dashboard.to_dict(with_widgets=True))
|
||||
else:
|
||||
dashboard_properties = json.loads(self.request.body)
|
||||
dashboard = data.models.Dashboard(name=dashboard_properties['name'],
|
||||
user=self.current_user,
|
||||
layout='[]')
|
||||
dashboard.save()
|
||||
self.write_json(dashboard.to_dict())
|
||||
|
||||
def delete(self, dashboard_slug):
|
||||
dashboard = data.models.Dashboard.objects.get(slug=dashboard_slug)
|
||||
dashboard.is_archived = True
|
||||
dashboard.save()
|
||||
|
||||
|
||||
class QueriesHandler(BaseAuthenticatedHandler):
|
||||
def post(self, id=None):
|
||||
query_def = json.loads(self.request.body)
|
||||
if 'created_at' in query_def:
|
||||
query_def['created_at'] = dateutil.parser.parse(query_def['created_at'])
|
||||
|
||||
query_def.pop('latest_query_data', None)
|
||||
|
||||
if id:
|
||||
query = data.models.Query(**query_def)
|
||||
fields = query_def.keys()
|
||||
fields.remove('id')
|
||||
query.save(update_fields=fields)
|
||||
else:
|
||||
query_def['user'] = self.current_user
|
||||
query = data.models.Query(**query_def)
|
||||
query.save()
|
||||
|
||||
self.write_json(query.to_dict(with_result=False))
|
||||
|
||||
def get(self, id=None):
|
||||
if id:
|
||||
q = data.models.Query.objects.get(pk=id)
|
||||
if q:
|
||||
self.write_json(q.to_dict())
|
||||
else:
|
||||
self.send_error(404)
|
||||
else:
|
||||
self.write_json([q.to_dict(with_result=False, with_stats=True) for q in data.models.Query.all_queries()])
|
||||
|
||||
|
||||
class QueryResultsHandler(BaseAuthenticatedHandler):
|
||||
def get(self, query_result_id):
|
||||
query_result = self.data_manager.get_query_result_by_id(query_result_id)
|
||||
if query_result:
|
||||
self.write_json({'query_result': query_result.to_dict(parse_data=True)})
|
||||
else:
|
||||
self.send_error(404)
|
||||
|
||||
def post(self, _):
|
||||
params = json.loads(self.request.body)
|
||||
|
||||
if params['ttl'] == 0:
|
||||
query_result = None
|
||||
else:
|
||||
query_result = self.data_manager.get_query_result(params['query'], int(params['ttl']))
|
||||
|
||||
if query_result:
|
||||
self.write_json({'query_result': query_result.to_dict(parse_data=True)})
|
||||
else:
|
||||
job = self.data_manager.add_job(params['query'], data.Job.HIGH_PRIORITY)
|
||||
self.write({'job': job.to_dict()})
|
||||
|
||||
|
||||
class CsvQueryResultsHandler(BaseAuthenticatedHandler):
|
||||
def get_current_user(self):
|
||||
user = super(CsvQueryResultsHandler, self).get_current_user()
|
||||
if not user:
|
||||
api_key = self.get_argument("api_key", None)
|
||||
query = data.models.Query.objects.get(pk=self.path_args[0])
|
||||
|
||||
if query.api_key and query.api_key == api_key:
|
||||
user = "API-Key=%s" % api_key
|
||||
|
||||
return user
|
||||
|
||||
def get(self, query_id, result_id=None):
|
||||
if not result_id:
|
||||
query = data.models.Query.objects.get(pk=query_id)
|
||||
if query:
|
||||
result_id = query.latest_query_data_id
|
||||
|
||||
query_result = result_id and self.data_manager.get_query_result_by_id(result_id)
|
||||
if query_result:
|
||||
self.set_header("Content-Type", "text/csv; charset=UTF-8")
|
||||
s = cStringIO.StringIO()
|
||||
|
||||
query_data = json.loads(query_result.data)
|
||||
writer = csv.DictWriter(s, fieldnames=[col['name'] for col in query_data['columns']])
|
||||
writer.writer = utils.UnicodeWriter(s)
|
||||
writer.writeheader()
|
||||
for row in query_data['rows']:
|
||||
for k, v in row.iteritems():
|
||||
if isinstance(v, numbers.Number) and (v > 1000 * 1000 * 1000 * 100):
|
||||
row[k] = datetime.datetime.fromtimestamp(v/1000.0)
|
||||
|
||||
writer.writerow(row)
|
||||
|
||||
self.write(s.getvalue())
|
||||
else:
|
||||
self.send_error(404)
|
||||
|
||||
|
||||
class JobsHandler(BaseAuthenticatedHandler):
|
||||
def get(self, job_id=None):
|
||||
if job_id:
|
||||
# TODO: if finished, include the query result
|
||||
job = data.Job.load(self.data_manager.redis_connection, job_id)
|
||||
self.write({'job': job.to_dict()})
|
||||
else:
|
||||
raise NotImplemented
|
||||
|
||||
def delete(self, job_id):
|
||||
job = data.Job.load(self.data_manager.redis_connection, job_id)
|
||||
job.cancel()
|
||||
|
||||
|
||||
def get_application(static_path, is_debug, redis_connection, data_manager):
|
||||
return tornado.web.Application([(r"/", MainHandler),
|
||||
(r"/ping", PingHandler),
|
||||
(r"/api/queries/([0-9]*)/results(?:/([0-9]*))?.csv", CsvQueryResultsHandler),
|
||||
(r"/api/queries/format", QueryFormatHandler),
|
||||
(r"/api/queries(?:/([0-9]*))?", QueriesHandler),
|
||||
(r"/api/query_results(?:/([0-9]*))?", QueryResultsHandler),
|
||||
(r"/api/jobs/(.*)", JobsHandler),
|
||||
(r"/api/widgets(?:/([0-9]*))?", WidgetsHandler),
|
||||
(r"/api/dashboards(?:/(.*))?", DashboardHandler),
|
||||
(r"/admin/(.*)", MainHandler),
|
||||
(r"/dashboard/(.*)", MainHandler),
|
||||
(r"/queries(.*)", MainHandler),
|
||||
(r"/login", GoogleLoginHandler),
|
||||
(r"/status.json", StatusHandler),
|
||||
(r"/(.*)", tornado.web.StaticFileHandler,
|
||||
{"path": static_path})],
|
||||
template_path=static_path,
|
||||
static_path=static_path,
|
||||
debug=is_debug,
|
||||
login_url="/login",
|
||||
cookie_secret=settings.COOKIE_SECRET,
|
||||
redis_connection=redis_connection,
|
||||
data_manager=data_manager)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
tornado.options.define("port", default=8888, type=int)
|
||||
tornado.options.define("debug", default=False, type=bool)
|
||||
tornado.options.define("static", default=settings.STATIC_ASSETS_PATH, type=str)
|
||||
|
||||
tornado.options.parse_command_line()
|
||||
|
||||
root_path = os.path.dirname(__file__)
|
||||
static_path = os.path.abspath(os.path.join(root_path, tornado.options.options.static))
|
||||
|
||||
url = urlparse.urlparse(settings.REDIS_URL)
|
||||
redis_connection = redis.StrictRedis(host=url.hostname, port=url.port, db=0, password=url.password)
|
||||
data_manager = data.Manager(redis_connection, settings.INTERNAL_DB_CONNECTION_STRING,
|
||||
settings.MAX_CONNECTIONS)
|
||||
|
||||
logging.info("re:dash web server stating on port: %d...", tornado.options.options.port)
|
||||
logging.info("UI assets path: %s...", static_path)
|
||||
|
||||
application = get_application(static_path, tornado.options.options.debug,
|
||||
redis_connection, data_manager)
|
||||
|
||||
application.listen(tornado.options.options.port)
|
||||
tornado.ioloop.IOLoop.instance().start()
|
||||
@@ -1,36 +0,0 @@
|
||||
"""
|
||||
Example settings module. You should make your own copy as settings.py and enter the real settings.
|
||||
"""
|
||||
|
||||
import django.conf
|
||||
|
||||
REDIS_URL = "redis://localhost:6379"
|
||||
# Connection string for the database that is used to run queries against
|
||||
CONNECTION_STRING = "user= password= host= port=5439 dbname="
|
||||
# Connection string for the operational databases (where we store the queries, results, etc)
|
||||
INTERNAL_DB_CONNECTION_STRING = "dbname=postgres"
|
||||
# Google Apps domain to allow access from; any user with email in this Google Apps will be allowed
|
||||
# access
|
||||
GOOGLE_APPS_DOMAIN = ""
|
||||
# Email addresses of specific users not from the above set Google Apps Domain, that you want to
|
||||
# allow access to re:dash
|
||||
ALLOWED_USERS = []
|
||||
# Email addresses of admin users
|
||||
ADMINS = []
|
||||
STATIC_ASSETS_PATH = "../rd_ui/dist/"
|
||||
WORKERS_COUNT = 2
|
||||
MAX_CONNECTIONS = 3
|
||||
COOKIE_SECRET = "c292a0a3aa32397cdb050e233733900f"
|
||||
LOG_LEVEL = "INFO"
|
||||
ANALYTICS = ""
|
||||
|
||||
# Configuration of the operational database for the Django models
|
||||
django.conf.settings.configure(DATABASES = { 'default': {
|
||||
'ENGINE': 'dbpool.db.backends.postgresql_psycopg2',
|
||||
'OPTIONS': {'MAX_CONNS': 10, 'MIN_CONNS': 1},
|
||||
'NAME': 'postgres',
|
||||
'USER': '',
|
||||
'PASSWORD': '',
|
||||
'HOST': '',
|
||||
'PORT': '',
|
||||
},}, TIME_ZONE = 'UTC')
|
||||
@@ -31,6 +31,7 @@
|
||||
</button>
|
||||
<a class="navbar-brand" href="/"><strong>re:dash</strong></a>
|
||||
</div>
|
||||
{% raw %}
|
||||
<div class="collapse navbar-collapse navbar-ex1-collapse">
|
||||
<ul class="nav navbar-nav">
|
||||
<li class="active" ng-show="pageTitle"><a class="page-title" ng-bind="pageTitle"></a></li>
|
||||
@@ -42,13 +43,13 @@
|
||||
<a href="#" ng-bind="name"></a>
|
||||
<ul class="dropdown-menu">
|
||||
<li ng-repeat="dashboard in group" role="presentation">
|
||||
<a role="menu-item" ng-href="/dashboard/{{!dashboard.slug}}" ng-bind="dashboard.name"></a>
|
||||
<a role="menu-item" ng-href="/dashboard/{{dashboard.slug}}" ng-bind="dashboard.name"></a>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
</span>
|
||||
<li ng-repeat="dashboard in otherDashboards">
|
||||
<a role="menu-item" ng-href="/dashboard/{{!dashboard.slug}}" ng-bind="dashboard.name"></a>
|
||||
<a role="menu-item" ng-href="/dashboard/{{dashboard.slug}}" ng-bind="dashboard.name"></a>
|
||||
</li>
|
||||
<li class="divider"></li>
|
||||
<li><a data-toggle="modal" href="#new_dashboard_dialog">New Dashboard</a></li>
|
||||
@@ -64,10 +65,11 @@
|
||||
</ul>
|
||||
<ul class="nav navbar-nav navbar-right">
|
||||
<p class="navbar-text avatar">
|
||||
<img ng-src="{{!currentUser.gravatar_url}}" class="img-circle" alt="{{!currentUser.name}}" width="40" height="40"/>
|
||||
<img ng-src="{{currentUser.gravatar_url}}" class="img-circle" alt="{{currentUser.name}}" width="40" height="40"/>
|
||||
</p>
|
||||
</ul>
|
||||
</div>
|
||||
{% endraw %}
|
||||
|
||||
</div>
|
||||
</nav>
|
||||
@@ -119,13 +121,13 @@
|
||||
<!-- endbuild -->
|
||||
|
||||
<script>
|
||||
var currentUser = {% raw user %};
|
||||
var currentUser = {{ user|safe }};
|
||||
|
||||
currentUser.canEdit = function(object) {
|
||||
return object.user && (object.user.indexOf(currentUser.name) != -1);
|
||||
};
|
||||
|
||||
{% raw analytics %}
|
||||
{{ analytics|safe }}
|
||||
</script>
|
||||
|
||||
</body>
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
|
||||
var WidgetCtrl = function ($scope, $http, $location, Query) {
|
||||
$scope.deleteWidget = function() {
|
||||
if (!confirm('Are you sure you want to remove "' + $scope.widget.query.name + '" from the dashboard?')) {
|
||||
if (!confirm('Are you sure you want to remove "' + $scope.widget.visualization.name + '" from the dashboard?')) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -20,11 +20,12 @@
|
||||
});
|
||||
};
|
||||
|
||||
$scope.open = function(query) {
|
||||
$scope.open = function(query, visualization) {
|
||||
$location.path('/queries/' + query.id);
|
||||
$location.hash(visualization.id);
|
||||
}
|
||||
|
||||
$scope.query = new Query($scope.widget.query);
|
||||
$scope.query = new Query($scope.widget.visualization.query);
|
||||
$scope.queryResult = $scope.query.getQueryResult();
|
||||
|
||||
$scope.updateTime = (new Date($scope.queryResult.getUpdatedAt())).toISOString();
|
||||
@@ -33,12 +34,14 @@
|
||||
$scope.updateTime = '';
|
||||
}
|
||||
|
||||
var QueryFiddleCtrl = function ($scope, $window, $routeParams, $http, $location, growl, notifications, Query) {
|
||||
var QueryFiddleCtrl = function ($scope, $window, $location, $routeParams, $http, $location, growl, notifications, Query, Visualization) {
|
||||
var DEFAULT_TAB = 'table';
|
||||
var pristineHash = null;
|
||||
$scope.dirty = undefined;
|
||||
|
||||
var leavingPageText = "You will lose your changes if you leave";
|
||||
|
||||
$scope.dirty = undefined;
|
||||
$scope.newVisualization = undefined;
|
||||
|
||||
$window.onbeforeunload = function(){
|
||||
if (currentUser.canEdit($scope.query) && $scope.dirty) {
|
||||
return leavingPageText;
|
||||
@@ -72,8 +75,9 @@
|
||||
|
||||
$scope.$parent.pageTitle = "Query Fiddle";
|
||||
|
||||
$scope.tabs = [{'key': 'table', 'name': 'Table'}, {'key': 'chart', 'name': 'Chart'},
|
||||
{'key': 'pivot', 'name': 'Pivot Table'}, {'key': 'cohort', 'name': 'Cohort'}];
|
||||
$scope.$watch(function() {return $location.hash()}, function(hash) {
|
||||
$scope.selectedTab = hash || DEFAULT_TAB;
|
||||
});
|
||||
|
||||
$scope.lockButton = function (lock) {
|
||||
$scope.queryExecuting = lock;
|
||||
@@ -109,6 +113,9 @@
|
||||
} else {
|
||||
// TODO: replace this with a safer method
|
||||
$location.path($location.path().replace(oldId, q.id)).replace();
|
||||
|
||||
// Reset visualizations tab to table after duplicating a query:
|
||||
$location.hash('table');
|
||||
}
|
||||
}
|
||||
}, function(httpResponse) {
|
||||
@@ -211,13 +218,26 @@
|
||||
$scope.queryResult = $scope.query.getQueryResult(0);
|
||||
$scope.lockButton(true);
|
||||
$scope.cancelling = false;
|
||||
}
|
||||
};
|
||||
|
||||
$scope.cancelExecution = function() {
|
||||
$scope.cancelling = true;
|
||||
$scope.queryResult.cancelExecution();
|
||||
}
|
||||
};
|
||||
|
||||
$scope.deleteVisualization = function($e, vis) {
|
||||
$e.preventDefault();
|
||||
if (confirm('Are you sure you want to delete ' + vis.name + ' ?')) {
|
||||
Visualization.delete(vis);
|
||||
if ($scope.selectedTab == vis.id) {
|
||||
$scope.selectedTab = DEFAULT_TAB;
|
||||
}
|
||||
$scope.query.visualizations =
|
||||
$scope.query.visualizations.filter(function(v) {
|
||||
return vis.id !== v.id;
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
var QueriesCtrl = function($scope, $http, $location, $filter, Query) {
|
||||
@@ -373,7 +393,7 @@
|
||||
.controller('DashboardCtrl', ['$scope', '$routeParams', '$http', 'Dashboard', DashboardCtrl])
|
||||
.controller('WidgetCtrl', ['$scope', '$http', '$location', 'Query', WidgetCtrl])
|
||||
.controller('QueriesCtrl', ['$scope', '$http', '$location', '$filter', 'Query', QueriesCtrl])
|
||||
.controller('QueryFiddleCtrl', ['$scope', '$window', '$routeParams', '$http', '$location', 'growl', 'notifications', 'Query', QueryFiddleCtrl])
|
||||
.controller('QueryFiddleCtrl', ['$scope', '$window', '$location', '$routeParams', '$http', '$location', 'growl', 'notifications', 'Query', 'Visualization', QueryFiddleCtrl])
|
||||
.controller('IndexCtrl', ['$scope', 'Dashboard', IndexCtrl])
|
||||
.controller('MainCtrl', ['$scope', 'Dashboard', 'notifications', MainCtrl]);
|
||||
})();
|
||||
|
||||
@@ -1,239 +1,402 @@
|
||||
var directives = angular.module('redash.directives', []);
|
||||
directives.directive('rdTabs', ['$location', '$rootScope', function($location, $rootScope) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
tabsCollection: '=',
|
||||
selectedTab: '='
|
||||
},
|
||||
template: '<ul class="nav nav-tabs"><li ng-class="{active: tab==selectedTab}" ng-repeat="tab in tabsCollection"><a href="#{{tab.key}}">{{tab.name}}</a></li></ul>',
|
||||
replace: true,
|
||||
link: function($scope, element, attrs) {
|
||||
$scope.selectTab = function(tabKey) {
|
||||
$scope.selectedTab = _.find($scope.tabsCollection, function(tab) { return tab.key == tabKey; });
|
||||
}
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
$scope.$watch(function() { return $location.hash()}, function(hash) {
|
||||
if (hash) {
|
||||
$scope.selectTab($location.hash());
|
||||
} else {
|
||||
$scope.selectTab($scope.tabsCollection[0].key);
|
||||
}
|
||||
});
|
||||
var directives = angular.module('redash.directives', []);
|
||||
|
||||
directives.directive('rdTab', ['$location', function($location) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
'id': '@',
|
||||
'name': '@'
|
||||
},
|
||||
transclude: true,
|
||||
template: '<li class="rd-tab" ng-class="{active: id==selectedTab}"><a href="#{{id}}">{{name}}<span ng-transclude></span></a></li>',
|
||||
replace: true,
|
||||
link: function(scope) {
|
||||
scope.$watch(function(){return scope.$parent.selectedTab}, function(tab) {
|
||||
scope.selectedTab = tab;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}])
|
||||
}]);
|
||||
|
||||
directives.directive('editDashboardForm', ['$http', '$location', '$timeout', 'Dashboard', function($http, $location, $timeout, Dashboard) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
dashboard: '='
|
||||
},
|
||||
templateUrl: '/views/edit_dashboard.html',
|
||||
replace: true,
|
||||
link: function($scope, element, attrs) {
|
||||
$scope.$watch('dashboard.widgets', function() {
|
||||
if ($scope.dashboard.widgets) {
|
||||
$scope.layout = [];
|
||||
_.each($scope.dashboard.widgets, function(row, rowIndex) {
|
||||
_.each(row, function(widget, colIndex) {
|
||||
$scope.layout.push({
|
||||
id: widget.id,
|
||||
col: colIndex+1,
|
||||
row: rowIndex+1,
|
||||
ySize: 1,
|
||||
xSize: widget.width,
|
||||
name: widget.query.name
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
$timeout(function () {
|
||||
$(".gridster ul").gridster({
|
||||
widget_margins: [5, 5],
|
||||
widget_base_dimensions: [260, 100],
|
||||
min_cols: 2,
|
||||
max_cols: 2,
|
||||
serialize_params: function ($w, wgd) {
|
||||
return { col: wgd.col, row: wgd.row, id: $w.data('widget-id') }
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
$scope.saveDashboard = function() {
|
||||
$scope.saveInProgress = true;
|
||||
// TODO: we should use the dashboard service here.
|
||||
if ($scope.dashboard.id) {
|
||||
var positions = $(element).find('.gridster ul').data('gridster').serialize();
|
||||
var layout = [];
|
||||
_.each(_.sortBy(positions, function (pos) {
|
||||
return pos.row * 10 + pos.col;
|
||||
}), function (pos) {
|
||||
var row = pos.row - 1;
|
||||
var col = pos.col - 1;
|
||||
layout[row] = layout[row] || [];
|
||||
if (col > 0 && layout[row][col - 1] == undefined) {
|
||||
layout[row][col - 1] = pos.id;
|
||||
} else {
|
||||
layout[row][col] = pos.id;
|
||||
}
|
||||
|
||||
});
|
||||
$scope.dashboard.layout = layout;
|
||||
|
||||
layout = JSON.stringify(layout);
|
||||
$http.post('/api/dashboards/' + $scope.dashboard.id, {'name': $scope.dashboard.name, 'layout': layout}).success(function(response) {
|
||||
$scope.dashboard = new Dashboard(response);
|
||||
$scope.saveInProgress = false;
|
||||
$(element).modal('hide');
|
||||
})
|
||||
} else {
|
||||
$http.post('/api/dashboards', {'name': $scope.dashboard.name}).success(function(response) {
|
||||
$(element).modal('hide');
|
||||
$location.path('/dashboard/' + response.slug).replace();
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}])
|
||||
|
||||
|
||||
directives.directive('newWidgetForm', ['$http', function($http) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
dashboard: '='
|
||||
},
|
||||
templateUrl: '/views/new_widget_form.html',
|
||||
replace: true,
|
||||
link: function($scope, element, attrs) {
|
||||
$scope.widgetTypes = [{name: 'Chart', value: 'chart'}, {name: 'Table', value: 'grid'}, {name: 'Cohort', value: 'cohort'}];
|
||||
$scope.widgetSizes = [{name: 'Regular Size', value: 1}, {name: 'Double Size', value: 2}];
|
||||
|
||||
var reset = function() {
|
||||
$scope.saveInProgress = false;
|
||||
$scope.widgetType = 'chart';
|
||||
$scope.widgetSize = 1;
|
||||
$scope.queryId = null;
|
||||
}
|
||||
|
||||
reset();
|
||||
|
||||
$scope.saveWidget = function() {
|
||||
$scope.saveInProgress = true;
|
||||
|
||||
var widget = {
|
||||
'query_id': $scope.queryId,
|
||||
'dashboard_id': $scope.dashboard.id,
|
||||
'type': $scope.widgetType,
|
||||
'options': {},
|
||||
'width': $scope.widgetSize
|
||||
directives.directive('rdTabs', ['$location', '$rootScope', function($location, $rootScope) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
tabsCollection: '=',
|
||||
selectedTab: '='
|
||||
},
|
||||
template: '<ul class="nav nav-tabs"><li ng-class="{active: tab==selectedTab}" ng-repeat="tab in tabsCollection"><a href="#{{tab.key}}">{{tab.name}}</a></li></ul>',
|
||||
replace: true,
|
||||
link: function($scope, element, attrs) {
|
||||
$scope.selectTab = function(tabKey) {
|
||||
$scope.selectedTab = _.find($scope.tabsCollection, function(tab) { return tab.key == tabKey; });
|
||||
}
|
||||
|
||||
$http.post('/api/widgets', widget).success(function(response) {
|
||||
// update dashboard layout
|
||||
$scope.dashboard.layout = response['layout'];
|
||||
if (response['new_row']) {
|
||||
$scope.dashboard.widgets.push([response['widget']]);
|
||||
$scope.$watch(function() { return $location.hash()}, function(hash) {
|
||||
if (hash) {
|
||||
$scope.selectTab($location.hash());
|
||||
} else {
|
||||
$scope.dashboard.widgets[$scope.dashboard.widgets.length-1].push(response['widget']);
|
||||
$scope.selectTab($scope.tabsCollection[0].key);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}]);
|
||||
|
||||
directives.directive('editVisulatizationForm', ['Visualization', 'growl', function(Visualization, growl) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
templateUrl: '/views/edit_visualization.html',
|
||||
replace: true,
|
||||
scope: {
|
||||
query: '=',
|
||||
vis: '=?'
|
||||
},
|
||||
link: function(scope, element, attrs) {
|
||||
scope.advancedMode = false;
|
||||
scope.visTypes = {
|
||||
'Chart': Visualization.prototype.TYPES.CHART,
|
||||
'Cohort': Visualization.prototype.TYPES.COHORT
|
||||
};
|
||||
scope.seriesTypes = {
|
||||
'Line': 'line',
|
||||
'Column': 'column',
|
||||
'Area': 'area',
|
||||
'Scatter': 'scatter',
|
||||
'Pie': 'pie'
|
||||
};
|
||||
|
||||
if (!scope.vis) {
|
||||
// create new visualization
|
||||
// wait for query to load to populate with defaults
|
||||
var unwatch = scope.$watch('query', function(q) {
|
||||
if (q && q.id) {
|
||||
unwatch();
|
||||
scope.vis = {
|
||||
'query_id': q.id,
|
||||
'type': Visualization.prototype.TYPES.CHART,
|
||||
'name': '',
|
||||
'description': q.description || '',
|
||||
'options': newOptions(Visualization.prototype.TYPES.CHART)
|
||||
};
|
||||
}
|
||||
}, true);
|
||||
}
|
||||
|
||||
function newOptions(chartType) {
|
||||
if (chartType === Visualization.prototype.TYPES.CHART) {
|
||||
return {
|
||||
'series': {
|
||||
'type': 'column'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// close the dialog
|
||||
$('#add_query_dialog').modal('hide');
|
||||
reset();
|
||||
return {};
|
||||
}
|
||||
|
||||
scope.$watch('vis.type', function(type) {
|
||||
// if not edited by user, set name to match type
|
||||
if (type && scope.vis && !scope.visForm.name.$dirty) {
|
||||
// poor man's titlecase
|
||||
scope.vis.name = scope.vis.type[0] + scope.vis.type.slice(1).toLowerCase();
|
||||
}
|
||||
});
|
||||
|
||||
scope.toggleAdvancedMode = function() {
|
||||
scope.advancedMode = !scope.advancedMode;
|
||||
};
|
||||
|
||||
scope.typeChanged = function() {
|
||||
scope.vis.options = newOptions();
|
||||
};
|
||||
|
||||
scope.submit = function() {
|
||||
Visualization.save(scope.vis, function success(result) {
|
||||
growl.addSuccessMessage("Visualization saved");
|
||||
|
||||
scope.vis = result;
|
||||
|
||||
var visIds = _.pluck(scope.query.visualizations, 'id');
|
||||
var index = visIds.indexOf(result.id);
|
||||
if (index > -1) {
|
||||
scope.query.visualizations[index] = result;
|
||||
} else {
|
||||
scope.query.visualizations.push(result);
|
||||
}
|
||||
}, function error() {
|
||||
growl.addErrorMessage("Visualization could not be saved");
|
||||
});
|
||||
};
|
||||
}
|
||||
}
|
||||
}]);
|
||||
|
||||
directives.directive('editDashboardForm', ['$http', '$location', '$timeout', 'Dashboard', function($http, $location, $timeout, Dashboard) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
dashboard: '='
|
||||
},
|
||||
templateUrl: '/views/edit_dashboard.html',
|
||||
replace: true,
|
||||
link: function($scope, element, attrs) {
|
||||
var gridster = element.find(".gridster ul").gridster({
|
||||
widget_margins: [5, 5],
|
||||
widget_base_dimensions: [260, 100],
|
||||
min_cols: 2,
|
||||
max_cols: 2,
|
||||
serialize_params: function($w, wgd) {
|
||||
return {
|
||||
col: wgd.col,
|
||||
row: wgd.row,
|
||||
id: $w.data('widget-id')
|
||||
}
|
||||
}
|
||||
}).data('gridster');
|
||||
|
||||
var gsItemTemplate = '<li data-widget-id="{id}" class="widget panel panel-default gs-w">' +
|
||||
'<div class="panel-heading">{name}' +
|
||||
'</div></li>';
|
||||
|
||||
$scope.$watch('dashboard.widgets', function(widgets) {
|
||||
$timeout(function () {
|
||||
gridster.remove_all_widgets();
|
||||
|
||||
if (widgets && widgets.length) {
|
||||
var layout = [];
|
||||
|
||||
_.each(widgets, function(row, rowIndex) {
|
||||
_.each(row, function(widget, colIndex) {
|
||||
layout.push({
|
||||
id: widget.id,
|
||||
col: colIndex+1,
|
||||
row: rowIndex+1,
|
||||
ySize: 1,
|
||||
xSize: widget.width,
|
||||
name: widget.visualization.query.name
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
_.each(layout, function(item) {
|
||||
var el = gsItemTemplate.replace('{id}', item.id).replace('{name}', item.name);
|
||||
gridster.add_widget(el, item.xSize, item.ySize, item.col, item.row);
|
||||
|
||||
});
|
||||
}
|
||||
});
|
||||
}, true);
|
||||
|
||||
$scope.saveDashboard = function() {
|
||||
$scope.saveInProgress = true;
|
||||
// TODO: we should use the dashboard service here.
|
||||
if ($scope.dashboard.id) {
|
||||
var positions = $(element).find('.gridster ul').data('gridster').serialize();
|
||||
var layout = [];
|
||||
_.each(_.sortBy(positions, function (pos) {
|
||||
return pos.row * 10 + pos.col;
|
||||
}), function (pos) {
|
||||
var row = pos.row - 1;
|
||||
var col = pos.col - 1;
|
||||
layout[row] = layout[row] || [];
|
||||
if (col > 0 && layout[row][col - 1] == undefined) {
|
||||
layout[row][col - 1] = pos.id;
|
||||
} else {
|
||||
layout[row][col] = pos.id;
|
||||
}
|
||||
|
||||
});
|
||||
$scope.dashboard.layout = layout;
|
||||
|
||||
layout = JSON.stringify(layout);
|
||||
$http.post('/api/dashboards/' + $scope.dashboard.id, {'name': $scope.dashboard.name, 'layout': layout}).success(function(response) {
|
||||
$scope.dashboard = new Dashboard(response);
|
||||
$scope.saveInProgress = false;
|
||||
$(element).modal('hide');
|
||||
})
|
||||
} else {
|
||||
$http.post('/api/dashboards', {'name': $scope.dashboard.name}).success(function(response) {
|
||||
$(element).modal('hide');
|
||||
$location.path('/dashboard/' + response.slug).replace();
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}]);
|
||||
|
||||
directives.directive('newWidgetForm', ['$http', 'Query', function($http, Query) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
dashboard: '='
|
||||
},
|
||||
templateUrl: '/views/new_widget_form.html',
|
||||
replace: true,
|
||||
link: function($scope, element, attrs) {
|
||||
$scope.widgetSizes = [{name: 'Regular', value: 1}, {name: 'Double', value: 2}];
|
||||
|
||||
var reset = function() {
|
||||
$scope.saveInProgress = false;
|
||||
$scope.widgetSize = 1;
|
||||
$scope.queryId = null;
|
||||
$scope.selectedVis = null;
|
||||
|
||||
}
|
||||
|
||||
reset();
|
||||
|
||||
$scope.loadVisualizations = function() {
|
||||
if (!$scope.queryId) {
|
||||
return;
|
||||
}
|
||||
|
||||
Query.get({
|
||||
id: $scope.queryId
|
||||
}, function(query) {
|
||||
if (query) {
|
||||
$scope.query = query;
|
||||
if(query.visualizations.length) {
|
||||
$scope.selectedVis = query.visualizations[0];
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
$scope.saveWidget = function() {
|
||||
$scope.saveInProgress = true;
|
||||
|
||||
var widget = {
|
||||
'visualization_id': $scope.selectedVis.id,
|
||||
'dashboard_id': $scope.dashboard.id,
|
||||
'options': {},
|
||||
'width': $scope.widgetSize
|
||||
}
|
||||
|
||||
$http.post('/api/widgets', widget).success(function(response) {
|
||||
// update dashboard layout
|
||||
$scope.dashboard.layout = response['layout'];
|
||||
if (response['new_row']) {
|
||||
$scope.dashboard.widgets.push([response['widget']]);
|
||||
} else {
|
||||
$scope.dashboard.widgets[$scope.dashboard.widgets.length-1].push(response['widget']);
|
||||
}
|
||||
|
||||
// close the dialog
|
||||
$('#add_query_dialog').modal('hide');
|
||||
reset();
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}])
|
||||
|
||||
// From: http://jsfiddle.net/joshdmiller/NDFHg/
|
||||
directives.directive('editInPlace', function () {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
value: '=',
|
||||
ignoreBlanks: '=',
|
||||
editable: '='
|
||||
},
|
||||
template: function(tElement, tAttrs) {
|
||||
var elType = tAttrs.editor || 'input';
|
||||
var placeholder = tAttrs.placeholder || 'Click to edit';
|
||||
return '<span ng-click="editable && edit()" ng-bind="value" ng-class="{editable: editable}"></span>' +
|
||||
'<span ng-click="editable && edit()" ng-show="editable && !value" ng-class="{editable: editable}">' + placeholder + '</span>' +
|
||||
'<{elType} ng-model="value" class="form-control" rows="2"></{elType}>'.replace('{elType}', elType);
|
||||
},
|
||||
link: function ($scope, element, attrs) {
|
||||
// Let's get a reference to the input element, as we'll want to reference it.
|
||||
var inputElement = angular.element(element.children()[2]);
|
||||
|
||||
// This directive should have a set class so we can style it.
|
||||
element.addClass('edit-in-place');
|
||||
|
||||
// Initially, we're not editing.
|
||||
$scope.editing = false;
|
||||
|
||||
// ng-click handler to activate edit-in-place
|
||||
$scope.edit = function () {
|
||||
if ($scope.ignoreBlanks) {
|
||||
$scope.oldValue = $scope.value;
|
||||
}
|
||||
|
||||
$scope.editing = true;
|
||||
|
||||
// We control display through a class on the directive itself. See the CSS.
|
||||
element.addClass('active');
|
||||
|
||||
// And we must focus the element.
|
||||
// `angular.element()` provides a chainable array, like jQuery so to access a native DOM function,
|
||||
// we have to reference the first element in the array.
|
||||
inputElement[0].focus();
|
||||
};
|
||||
|
||||
$(inputElement).blur(function() {
|
||||
if ($scope.ignoreBlanks && _.isEmpty($scope.value)) {
|
||||
$scope.value = $scope.oldValue;
|
||||
}
|
||||
$scope.editing = false;
|
||||
element.removeClass('active');
|
||||
})
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
}])
|
||||
// http://stackoverflow.com/a/17904092/1559840
|
||||
directives.directive('jsonText', function() {
|
||||
return {
|
||||
restrict: 'A',
|
||||
require: 'ngModel',
|
||||
link: function(scope, element, attr, ngModel) {
|
||||
function into(input) {
|
||||
return JSON.parse(input);
|
||||
}
|
||||
function out(data) {
|
||||
return JSON.stringify(data, undefined, 2);
|
||||
}
|
||||
ngModel.$parsers.push(into);
|
||||
ngModel.$formatters.push(out);
|
||||
|
||||
// From: http://jsfiddle.net/joshdmiller/NDFHg/
|
||||
directives.directive('editInPlace', function () {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
value: '=',
|
||||
ignoreBlanks: '=',
|
||||
editable: '='
|
||||
},
|
||||
template: function(tElement, tAttrs) {
|
||||
var elType = tAttrs.editor || 'input';
|
||||
var placeholder = tAttrs.placeholder || 'Click to edit';
|
||||
return '<span ng-click="editable && edit()" ng-bind="value" ng-class="{editable: editable}"></span>' +
|
||||
'<span ng-click="editable && edit()" ng-show="editable && !value" ng-class="{editable: editable}">' + placeholder + '</span>' +
|
||||
'<{elType} ng-model="value" class="form-control" rows="2"></{elType}>'.replace('{elType}', elType);
|
||||
},
|
||||
link: function ($scope, element, attrs) {
|
||||
// Let's get a reference to the input element, as we'll want to reference it.
|
||||
var inputElement = angular.element(element.children()[2]);
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// This directive should have a set class so we can style it.
|
||||
element.addClass('edit-in-place');
|
||||
directives.directive('rdTimer', ['$timeout', function ($timeout) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: { timestamp: '=' },
|
||||
template: '{{currentTime}}',
|
||||
controller: ['$scope' ,function ($scope) {
|
||||
$scope.currentTime = "00:00:00";
|
||||
var currentTimeout = null;
|
||||
|
||||
// Initially, we're not editing.
|
||||
$scope.editing = false;
|
||||
|
||||
// ng-click handler to activate edit-in-place
|
||||
$scope.edit = function () {
|
||||
if ($scope.ignoreBlanks) {
|
||||
$scope.oldValue = $scope.value;
|
||||
var updateTime = function() {
|
||||
$scope.currentTime = moment(moment() - moment($scope.timestamp)).utc().format("HH:mm:ss")
|
||||
currentTimeout = $timeout(updateTime, 1000);
|
||||
}
|
||||
|
||||
$scope.editing = true;
|
||||
|
||||
// We control display through a class on the directive itself. See the CSS.
|
||||
element.addClass('active');
|
||||
|
||||
// And we must focus the element.
|
||||
// `angular.element()` provides a chainable array, like jQuery so to access a native DOM function,
|
||||
// we have to reference the first element in the array.
|
||||
inputElement[0].focus();
|
||||
};
|
||||
|
||||
$(inputElement).blur(function() {
|
||||
if ($scope.ignoreBlanks && _.isEmpty($scope.value)) {
|
||||
$scope.value = $scope.oldValue;
|
||||
var cancelTimer = function() {
|
||||
if (currentTimeout) {
|
||||
$timeout.cancel(currentTimeout);
|
||||
currentTimeout = null;
|
||||
}
|
||||
}
|
||||
$scope.editing = false;
|
||||
element.removeClass('active');
|
||||
})
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
directives.directive('rdTimer', ['$timeout', function ($timeout) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: { timestamp: '=' },
|
||||
template: '{{currentTime}}',
|
||||
controller: ['$scope' ,function ($scope) {
|
||||
$scope.currentTime = "00:00:00";
|
||||
var currentTimeout = null;
|
||||
updateTime();
|
||||
|
||||
var updateTime = function() {
|
||||
$scope.currentTime = moment(moment() - moment($scope.timestamp)).utc().format("HH:mm:ss")
|
||||
currentTimeout = $timeout(updateTime, 1000);
|
||||
}
|
||||
|
||||
var cancelTimer = function() {
|
||||
if (currentTimeout) {
|
||||
$timeout.cancel(currentTimeout);
|
||||
currentTimeout = null;
|
||||
}
|
||||
}
|
||||
|
||||
updateTime();
|
||||
|
||||
$scope.$on('$destroy', function () {
|
||||
cancelTimer();
|
||||
});
|
||||
}]
|
||||
};
|
||||
}]);
|
||||
$scope.$on('$destroy', function () {
|
||||
cancelTimer();
|
||||
});
|
||||
}]
|
||||
};
|
||||
}]);
|
||||
})();
|
||||
|
||||
@@ -1,83 +1,272 @@
|
||||
'use strict';
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
angular.module('highchart', [])
|
||||
.directive('chart', ['$timeout', function ($timeout) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
template: '<div></div>',
|
||||
scope: {
|
||||
options: "=options",
|
||||
series: "=series"
|
||||
},
|
||||
transclude: true,
|
||||
replace: true,
|
||||
|
||||
link: function (scope, element, attrs) {
|
||||
var chartsDefaults = {
|
||||
chart: {
|
||||
renderTo: element[0],
|
||||
type: attrs.type || null,
|
||||
height: attrs.height || null,
|
||||
width: attrs.width || null
|
||||
}
|
||||
var defaultOptions = {
|
||||
title: {
|
||||
"text": null
|
||||
},
|
||||
xAxis: {
|
||||
type: 'datetime'
|
||||
},
|
||||
yAxis: {
|
||||
title: {
|
||||
text: null
|
||||
}
|
||||
},
|
||||
tooltip: {
|
||||
valueDecimals: 2,
|
||||
formatter: function () {
|
||||
if (!this.points) {
|
||||
this.points = [this.point];
|
||||
};
|
||||
|
||||
var deepCopy = true;
|
||||
var newSettings = {};
|
||||
$.extend(deepCopy, newSettings, chartsDefaults, scope.options);
|
||||
if (moment.isMoment(this.x)) {
|
||||
var s = '<b>' + moment(this.x).format("DD/MM/YY HH:mm") + '</b>',
|
||||
pointsCount = this.points.length;
|
||||
|
||||
// Making sure that the DOM is ready before creating the chart element, so it gets proper width.
|
||||
$timeout(function(){
|
||||
scope.chart = new Highcharts.Chart(newSettings);
|
||||
$.each(this.points, function (i, point) {
|
||||
s += '<br/><span style="color:' + point.series.color + '">' + point.series.name + '</span>: ' +
|
||||
Highcharts.numberFormat(point.y);
|
||||
|
||||
//Update when charts data changes
|
||||
scope.$watch(function () {
|
||||
return (scope.series && scope.series.length) || 0;
|
||||
}, function (length) {
|
||||
if (!length || length == 0) {
|
||||
scope.chart.showLoading();
|
||||
if (pointsCount > 1 && point.percentage) {
|
||||
s += " (" + Highcharts.numberFormat(point.percentage) + "%)";
|
||||
}
|
||||
});
|
||||
} else {
|
||||
var points = this.points;
|
||||
var name = points[0].key || points[0].name;
|
||||
|
||||
var s = "<b>" + name + "</b>";
|
||||
|
||||
$.each(points, function (i, point) {
|
||||
if (points.length > 1) {
|
||||
s += '<br/><span style="color:' + point.series.color + '">' + point.series.name + '</span>: ' + Highcharts.numberFormat(point.y);
|
||||
} else {
|
||||
while(scope.chart.series.length > 0) {
|
||||
scope.chart.series[0].remove(true);
|
||||
s += ": " + Highcharts.numberFormat(point.y);
|
||||
if (point.percentage < 100) {
|
||||
s += ' (' +Highcharts.numberFormat(point.percentage) + '%)';
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (_.some(scope.series[0].data, function(p) { return angular.isString(p.x) })) {
|
||||
scope.chart.xAxis[0].update({type: 'category'});
|
||||
return s;
|
||||
},
|
||||
shared: true
|
||||
},
|
||||
exporting: {
|
||||
chartOptions: {
|
||||
title: {
|
||||
text: ''
|
||||
}
|
||||
},
|
||||
buttons: {
|
||||
contextButton: {
|
||||
menuItems: [
|
||||
{
|
||||
text: 'Toggle % Stacking',
|
||||
onclick: function () {
|
||||
var newStacking = "normal";
|
||||
if (this.series[0].options.stacking == "normal") {
|
||||
newStacking = "percent";
|
||||
}
|
||||
|
||||
// We need to make sure that for each category, each series has a value.
|
||||
var categories = _.union.apply(this, _.map(scope.series, function(s) { return _.pluck(s.data,'x')}));
|
||||
|
||||
_.each(scope.series, function(s) {
|
||||
// TODO: move this logic to Query#getChartData
|
||||
var yValues = _.groupBy(s.data, 'x');
|
||||
|
||||
var newData = _.sortBy(_.map(categories, function(category) {
|
||||
return {
|
||||
name: category,
|
||||
y: yValues[category] && yValues[category][0].y
|
||||
}
|
||||
}), 'name');
|
||||
|
||||
s.data = newData;
|
||||
_.each(this.series, function (series) {
|
||||
series.update({stacking: newStacking}, true);
|
||||
});
|
||||
} else {
|
||||
scope.chart.xAxis[0].update({type: 'datetime'});
|
||||
}
|
||||
|
||||
scope.chart.counters.color = 0;
|
||||
|
||||
_.each(scope.series, function(s) {
|
||||
scope.chart.addSeries(s);
|
||||
})
|
||||
|
||||
scope.chart.redraw();
|
||||
scope.chart.hideLoading();
|
||||
};
|
||||
}, true);
|
||||
});
|
||||
|
||||
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
},
|
||||
credits: {
|
||||
enabled: false
|
||||
},
|
||||
plotOptions: {
|
||||
area: {
|
||||
marker: {
|
||||
enabled: false,
|
||||
symbol: 'circle',
|
||||
radius: 2,
|
||||
states: {
|
||||
hover: {
|
||||
enabled: true
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
column: {
|
||||
stacking: "normal",
|
||||
pointPadding: 0,
|
||||
borderWidth: 1,
|
||||
groupPadding: 0,
|
||||
shadow: false
|
||||
},
|
||||
line: {
|
||||
marker: {
|
||||
radius: 1
|
||||
},
|
||||
lineWidth: 2,
|
||||
states: {
|
||||
hover: {
|
||||
lineWidth: 2,
|
||||
marker: {
|
||||
radius: 3
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
pie: {
|
||||
allowPointSelect: true,
|
||||
cursor: 'pointer',
|
||||
dataLabels: {
|
||||
enabled: true,
|
||||
color: '#000000',
|
||||
connectorColor: '#000000',
|
||||
format: '<b>{point.name}</b>: {point.percentage:.1f} %'
|
||||
}
|
||||
},
|
||||
scatter: {
|
||||
marker: {
|
||||
radius: 5,
|
||||
states: {
|
||||
hover: {
|
||||
enabled: true,
|
||||
lineColor: 'rgb(100,100,100)'
|
||||
}
|
||||
}
|
||||
},
|
||||
tooltip: {
|
||||
headerFormat: '<b>{series.name}</b><br>',
|
||||
pointFormat: '{point.x}, {point.y}'
|
||||
}
|
||||
}
|
||||
},
|
||||
series: []
|
||||
};
|
||||
|
||||
}]);
|
||||
angular.module('highchart', [])
|
||||
.directive('chart', ['$timeout', function ($timeout) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
template: '<div></div>',
|
||||
scope: {
|
||||
options: "=options",
|
||||
series: "=series"
|
||||
},
|
||||
transclude: true,
|
||||
replace: true,
|
||||
|
||||
link: function (scope, element, attrs) {
|
||||
var chartsDefaults = {
|
||||
chart: {
|
||||
renderTo: element[0],
|
||||
type: attrs.type || null,
|
||||
height: attrs.height || null,
|
||||
width: attrs.width || null
|
||||
}
|
||||
};
|
||||
|
||||
var chartOptions = $.extend(true, {}, defaultOptions, chartsDefaults);
|
||||
|
||||
// $timeout makes sure that this function invoked after the DOM ready. When draw/init
|
||||
// invoked after the DOM is ready, we see first an empty HighCharts objects and later
|
||||
// they get filled up. Which gives the feeling that the charts loading faster (otherwise
|
||||
// we stare at an empty screen until the HighCharts object is ready).
|
||||
$timeout(function(){
|
||||
// Update when options change
|
||||
scope.$watch('options', function (newOptions) {
|
||||
initChart(newOptions);
|
||||
}, true);
|
||||
|
||||
//Update when charts data changes
|
||||
scope.$watch(function () {
|
||||
// TODO: this might be an issue in case the series change, but they stay
|
||||
// with the same length
|
||||
return (scope.series && scope.series.length) || 0;
|
||||
}, function (length) {
|
||||
if (!length || length == 0) {
|
||||
scope.chart.showLoading();
|
||||
} else {
|
||||
drawChart();
|
||||
};
|
||||
}, true);
|
||||
});
|
||||
|
||||
function initChart(options) {
|
||||
if (scope.chart) {
|
||||
scope.chart.destroy();
|
||||
};
|
||||
|
||||
$.extend(true, chartOptions, options);
|
||||
|
||||
scope.chart = new Highcharts.Chart(chartOptions);
|
||||
drawChart();
|
||||
}
|
||||
|
||||
function drawChart() {
|
||||
while (scope.chart.series.length > 0) {
|
||||
scope.chart.series[0].remove(false);
|
||||
};
|
||||
|
||||
if (_.some(scope.series[0].data, function (p) {
|
||||
return angular.isString(p.x)
|
||||
})) {
|
||||
scope.chart.xAxis[0].update({type: 'category'});
|
||||
|
||||
// We need to make sure that for each category, each series has a value.
|
||||
var categories = _.union.apply(this, _.map(scope.series, function (s) {
|
||||
return _.pluck(s.data, 'x')
|
||||
}));
|
||||
|
||||
_.each(scope.series, function (s) {
|
||||
// TODO: move this logic to Query#getChartData
|
||||
var yValues = _.groupBy(s.data, 'x');
|
||||
|
||||
var newData = _.sortBy(_.map(categories, function (category) {
|
||||
return {
|
||||
name: category,
|
||||
y: yValues[category] && yValues[category][0].y
|
||||
}
|
||||
}), 'name');
|
||||
|
||||
s.data = newData;
|
||||
});
|
||||
} else {
|
||||
scope.chart.xAxis[0].update({type: 'datetime'});
|
||||
}
|
||||
|
||||
scope.chart.counters.color = 0;
|
||||
|
||||
_.each(scope.series, function (s) {
|
||||
// here we override the series with the visualization config
|
||||
s = _.extend(s, chartOptions['series']);
|
||||
|
||||
if (s.type == 'area') {
|
||||
_.each(s.data, function (p) {
|
||||
// This is an insane hack: somewhere deep in HighChart's code,
|
||||
// when you stack areas, it tries to convert the string representation
|
||||
// of point's x into a number. With the default implementation of toString
|
||||
// it fails....
|
||||
|
||||
if (moment.isMoment(p.x)) {
|
||||
p.x.toString = function () {
|
||||
return String(this.toDate().getTime());
|
||||
};
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
scope.chart.addSeries(s, false);
|
||||
});
|
||||
|
||||
scope.chart.redraw();
|
||||
scope.chart.hideLoading();
|
||||
}
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
}]);
|
||||
})();
|
||||
@@ -1,83 +1,20 @@
|
||||
var renderers = angular.module('redash.renderers', []);
|
||||
var defaultChartOptions = {
|
||||
"title": {
|
||||
"text": null
|
||||
},
|
||||
"tooltip": {
|
||||
valueDecimals: 2,
|
||||
formatter: function () {
|
||||
if (moment.isMoment(this.x)) {
|
||||
var s = '<b>' + moment(this.x).format("DD/MM/YY HH:mm") + '</b>',
|
||||
pointsCount = this.points.length;
|
||||
|
||||
$.each(this.points, function (i, point) {
|
||||
s += '<br/><span style="color:'+point.series.color+'">' + point.series.name + '</span>: ' +
|
||||
Highcharts.numberFormat(point.y);
|
||||
|
||||
if (pointsCount > 1 && point.percentage) {
|
||||
s += " (" + Highcharts.numberFormat(point.percentage) + "%)";
|
||||
}
|
||||
});
|
||||
} else {
|
||||
var s = "<b>" + this.points[0].key + "</b>";
|
||||
$.each(this.points, function (i, point) {
|
||||
s+= '<br/><span style="color:'+point.series.color+'">' + point.series.name + '</span>: ' +
|
||||
Highcharts.numberFormat(point.y);
|
||||
});
|
||||
}
|
||||
|
||||
return s;
|
||||
renderers.directive('visualizationRenderer', function() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
visualization: '=',
|
||||
queryResult: '='
|
||||
},
|
||||
shared: true
|
||||
},
|
||||
xAxis: {
|
||||
type: 'datetime'
|
||||
},
|
||||
yAxis: {
|
||||
title: {
|
||||
text: null
|
||||
}
|
||||
},
|
||||
exporting: {
|
||||
chartOptions: {
|
||||
title: {
|
||||
text: this.description
|
||||
}
|
||||
},
|
||||
buttons: {
|
||||
contextButton: {
|
||||
menuItems: [
|
||||
{
|
||||
text: 'Toggle % Stacking',
|
||||
onclick: function () {
|
||||
var newStacking = "normal";
|
||||
if (this.series[0].options.stacking == "normal") {
|
||||
newStacking = "percent";
|
||||
}
|
||||
|
||||
_.each(this.series, function (series) {
|
||||
series.update({stacking: newStacking}, true);
|
||||
});
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
credits: {
|
||||
enabled: false
|
||||
},
|
||||
plotOptions: {
|
||||
"column": {
|
||||
"stacking": "normal",
|
||||
"pointPadding": 0,
|
||||
"borderWidth": 1,
|
||||
"groupPadding": 0,
|
||||
"shadow": false
|
||||
}
|
||||
},
|
||||
"series": []
|
||||
};
|
||||
template: '<div ng-switch on="visualization.type">' +
|
||||
'<grid-renderer ng-switch-when="TABLE" options="visualization.options" query-result="queryResult"></grid-renderer>' +
|
||||
'<chart-renderer ng-switch-when="CHART" options="visualization.options" query-result="queryResult"></chart-renderer>' +
|
||||
'<cohort-renderer ng-switch-when="COHORT" options="visualization.options" query-result="queryResult"></cohort-renderer>' +
|
||||
'</div>',
|
||||
replace: false
|
||||
}
|
||||
});
|
||||
|
||||
renderers.directive('chartRenderer', function () {
|
||||
return {
|
||||
@@ -90,8 +27,13 @@ renderers.directive('chartRenderer', function () {
|
||||
replace: false,
|
||||
controller: ['$scope', function ($scope) {
|
||||
$scope.chartSeries = [];
|
||||
$scope.chartOptions = defaultChartOptions;
|
||||
$scope.chartOptions = {};
|
||||
|
||||
$scope.$watch('options', function(chartOptions) {
|
||||
if (chartOptions) {
|
||||
$scope.chartOptions = chartOptions;
|
||||
}
|
||||
});
|
||||
$scope.$watch('queryResult && queryResult.getData()', function (data) {
|
||||
if (!data || $scope.queryResult.getData() == null) {
|
||||
$scope.chartSeries.splice(0, $scope.chartSeries.length);
|
||||
@@ -99,7 +41,7 @@ renderers.directive('chartRenderer', function () {
|
||||
$scope.chartSeries.splice(0, $scope.chartSeries.length);
|
||||
|
||||
_.each($scope.queryResult.getChartData(), function (s) {
|
||||
$scope.chartSeries.push(_.extend(s, {'stacking': 'normal'}, $scope.options));
|
||||
$scope.chartSeries.push(_.extend(s, {'stacking': 'normal'}));
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -142,13 +84,7 @@ renderers.directive('gridRenderer', function () {
|
||||
var gridData = _.map($scope.queryResult.getData(), function (row) {
|
||||
var newRow = {};
|
||||
_.each(row, function (val, key) {
|
||||
// TODO: hack to detect date fields, needed only for backward compatability
|
||||
if (val > 1000 * 1000 * 1000 * 100) {
|
||||
newRow[$scope.queryResult.getColumnCleanName(key)] = moment(val);
|
||||
} else {
|
||||
newRow[$scope.queryResult.getColumnCleanName(key)] = val;
|
||||
}
|
||||
|
||||
newRow[$scope.queryResult.getColumnCleanName(key)] = val;
|
||||
})
|
||||
return newRow;
|
||||
});
|
||||
@@ -264,4 +200,4 @@ renderers.directive('cohortRenderer', function() {
|
||||
});
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
@@ -253,7 +253,7 @@
|
||||
}
|
||||
|
||||
return QueryResult;
|
||||
}
|
||||
};
|
||||
|
||||
var Query = function ($resource, QueryResult) {
|
||||
var Query = $resource('/api/queries/:id', {id: '@id'});
|
||||
@@ -263,6 +263,7 @@
|
||||
ttl = this.ttl;
|
||||
}
|
||||
|
||||
|
||||
var queryResult = null;
|
||||
if (this.latest_query_data && ttl != 0) {
|
||||
queryResult = new QueryResult({'query_result': this.latest_query_data});
|
||||
@@ -273,17 +274,32 @@
|
||||
}
|
||||
|
||||
return queryResult;
|
||||
}
|
||||
};
|
||||
|
||||
Query.prototype.getHash = function() {
|
||||
return [this.name, this.description, this.query].join('!#');
|
||||
}
|
||||
};
|
||||
|
||||
return Query;
|
||||
}
|
||||
};
|
||||
|
||||
var Visualization = function($resource) {
|
||||
var Visualization = $resource('/api/visualizations/:id', {id: '@id'});
|
||||
|
||||
Visualization.prototype = {
|
||||
TYPES: {
|
||||
'CHART': 'CHART',
|
||||
'COHORT': 'COHORT',
|
||||
'TABLE': 'TABLE'
|
||||
}
|
||||
};
|
||||
|
||||
return Visualization;
|
||||
};
|
||||
|
||||
angular.module('redash.services', [])
|
||||
.factory('QueryResult', ['$resource', '$timeout', QueryResult])
|
||||
.factory('Query', ['$resource', 'QueryResult', Query])
|
||||
.factory('Visualization', ['$resource', Visualization])
|
||||
|
||||
})();
|
||||
|
||||
@@ -2,6 +2,10 @@ body {
|
||||
padding-top: 70px;
|
||||
}
|
||||
|
||||
a.link {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
a.page-title {
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
@@ -56,6 +60,10 @@ a.navbar-brand {
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
.panel-heading > a {
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
/* angular-growl */
|
||||
.growl {
|
||||
position: fixed;
|
||||
@@ -193,4 +201,16 @@ to add those CSS styles here. */
|
||||
-webkit-border-radius: 6px 0 6px 6px;
|
||||
-moz-border-radius: 6px 0 6px 6px;
|
||||
border-radius: 6px 0 6px 6px;
|
||||
}
|
||||
|
||||
.rd-tab .remove {
|
||||
cursor: pointer;
|
||||
color: #A09797;
|
||||
padding: 0 3px 1px 4px;
|
||||
font-size: 11px;
|
||||
}
|
||||
.rd-tab .remove:hover {
|
||||
color: white;
|
||||
background-color: #FF8080;
|
||||
border-radius: 50%;
|
||||
}
|
||||
@@ -8,7 +8,7 @@
|
||||
<button type="button" class="btn btn-default btn-xs" data-toggle="modal" href="#edit_dashboard_dialog" tooltip="Edit Dashboard (Name/Layout)"><span
|
||||
class="glyphicon glyphicon-cog"></span></button>
|
||||
<button type="button" class="btn btn-default btn-xs" data-toggle="modal"
|
||||
href="#add_query_dialog" tooltip="Add Widget (Chart/Table)"><span class="glyphicon glyphicon-import"></span>
|
||||
href="#add_query_dialog" tooltip="Add Widget (Chart/Table)"><span class="glyphicon glyphicon-plus"></span>
|
||||
</button>
|
||||
</span>
|
||||
</h2>
|
||||
@@ -21,7 +21,7 @@
|
||||
|
||||
<div class="panel panel-default">
|
||||
<div class="panel-heading">
|
||||
<h3 class="panel-title" style="cursor: pointer;" ng-click="open(query)">
|
||||
<h3 class="panel-title" style="cursor: pointer;" ng-click="open(query, widget.visualization)">
|
||||
<p>
|
||||
<span ng-bind="query.name"></span>
|
||||
</p>
|
||||
@@ -29,18 +29,15 @@
|
||||
</h3>
|
||||
</div>
|
||||
|
||||
<div ng-switch on="widget.type" class="panel-body">
|
||||
<chart-renderer ng-switch-when="chart" query-result="queryResult" options="widget.options"></chart-renderer>
|
||||
<grid-renderer ng-switch-when="grid" query-result="queryResult"></grid-renderer>
|
||||
<cohort-renderer ng-switch-when="cohort" query-result="queryResult"></cohort-renderer>
|
||||
</div>
|
||||
<visualization-renderer visualization="widget.visualization" query-result="queryResult"></visualization-renderer class="panel-body">
|
||||
|
||||
<div class="panel-footer">
|
||||
<span class="label label-default"
|
||||
tooltip="next update {{nextUpdateTime}} (query runtime: {{queryResult.getRuntime() | durationHumanize}})"
|
||||
tooltip-placement="bottom">Updated: <span am-time-ago="queryResult.getUpdatedAt()"></span></span>
|
||||
|
||||
<span class="pull-right">
|
||||
<a class="btn btn-default btn-xs" ng-href="/queries/{{query.id}}"><span class="glyphicon glyphicon-link"></span></a>
|
||||
<a class="btn btn-default btn-xs" ng-href="/queries/{{query.id}}#{{widget.visualization.id}}"><span class="glyphicon glyphicon-link"></span></a>
|
||||
<button type="button" class="btn btn-default btn-xs" ng-show="dashboard.canEdit()" ng-click="deleteWidget()" title="Remove Widget"><span class="glyphicon glyphicon-trash"></span></button>
|
||||
</span>
|
||||
|
||||
|
||||
@@ -10,17 +10,9 @@
|
||||
<input type="text" class="form-control" placeholder="Dashboard Name" ng-model="dashboard.name">
|
||||
</p>
|
||||
|
||||
<p ng-show="layout!='null'">
|
||||
<div class="gridster">
|
||||
<ul>
|
||||
<li ng-repeat="widget in layout" data-row="{{widget.row}}" data-col="{{widget.col}}"
|
||||
data-sizey="{{widget.ySize}}" data-sizex="{{widget.xSize}}" data-widget-id="{{widget.id}}"
|
||||
class="widget panel panel-default">
|
||||
<div class="panel-heading">{{widget.name}}</div>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</p>
|
||||
<div class="gridster">
|
||||
<ul></ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-default" ng-disabled="saveInProgress" data-dismiss="modal">Close</button>
|
||||
|
||||
21
rd_ui/app/views/edit_visualization.html
Normal file
21
rd_ui/app/views/edit_visualization.html
Normal file
@@ -0,0 +1,21 @@
|
||||
<form role="form" name="visForm" ng-submit="submit()">
|
||||
<div class="form-group">
|
||||
<label class="control-label">Name</label>
|
||||
<input name="name" type="text" class="form-control" ng-model="vis.name" placeholder="{{vis.type}}">
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label class="control-label">Visualization Type</label>
|
||||
<select required ng-model="vis.type" ng-options="value as key for (key, value) in visTypes" class="form-control" ng-change="typeChanged()"></select>
|
||||
</div>
|
||||
|
||||
<div class="form-group" ng-show="vis.type == visTypes.Chart">
|
||||
<label class="control-label">Chart Type</label>
|
||||
<select required ng-model="vis.options.series.type" ng-options="value as key for (key, value) in seriesTypes" class="form-control"></select>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<button type="submit" class="btn btn-primary">Save</button>
|
||||
</div>
|
||||
|
||||
</form>
|
||||
@@ -7,15 +7,27 @@
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<p>
|
||||
<input type="text" class="form-control" placeholder="Query Id" ng-model="queryId">
|
||||
<form class="form-inline" role="form" ng-submit="loadVisualizations()">
|
||||
<div class="form-group">
|
||||
<input class="form-control" placeholder="Query Id" ng-model="queryId">
|
||||
</div>
|
||||
<button type="submit" class="btn btn-primary" ng-disabled="!queryId">
|
||||
<span class="glyphicon glyphicon-refresh"></span> Load
|
||||
</button>
|
||||
</form>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<select class="form-control" ng-model="widgetType" ng-options="c.value as c.name for c in widgetTypes"></select>
|
||||
</p>
|
||||
<p>
|
||||
<select class="form-control" ng-model="widgetSize" ng-options="c.value as c.name for c in widgetSizes"></select>
|
||||
</p>
|
||||
<div ng-show="query">
|
||||
<div class="form-group">
|
||||
<label for="">Choose Visualation</label>
|
||||
<select ng-model="selectedVis" ng-options="vis as vis.name group by vis.type for vis in query.visualizations" class="form-control"></select>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="">Widget Size</label>
|
||||
<select class="form-control" ng-model="widgetSize" ng-options="c.value as c.name for c in widgetSizes"></select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-default" ng-disabled="saveInProgress" data-dismiss="modal">Close</button>
|
||||
|
||||
@@ -51,22 +51,52 @@
|
||||
</div>
|
||||
|
||||
<div class="row" ng-show="queryResult.getStatus() == 'done'">
|
||||
<rd-tabs tabs-collection='tabs' selected-tab='selectedTab'></rd-tabs>
|
||||
<ul class="nav nav-tabs">
|
||||
<rd-tab id="table" name="Table"></rd-tab>
|
||||
<rd-tab id="pivot" name="Pivot Table"></rd-tab>
|
||||
<!-- hide the table visualization -->
|
||||
<rd-tab id="{{vis.id}}" name="{{vis.name}}" ng-hide="vis.type=='TABLE'" ng-repeat="vis in query.visualizations">
|
||||
<span class="remove" ng-click="deleteVisualization($event, vis)" ng-show="currentUser.canEdit(query)"> ×</span>
|
||||
</rd-tab>
|
||||
<rd-tab id="add" name="+New" removeable="true" ng-show="currentUser.canEdit(query)"></rd-tab>
|
||||
</ul>
|
||||
|
||||
<div ng-show="selectedTab.key == 'chart'" class="col-lg-12">
|
||||
<chart-renderer query-result="queryResult"></chart-renderer>
|
||||
</div>
|
||||
|
||||
<div class="col-lg-12" ng-show="selectedTab.key == 'table'">
|
||||
<div class="col-lg-12" ng-show="selectedTab == 'table'">
|
||||
<grid-renderer query-result="queryResult" items-per-page="50"></grid-renderer>
|
||||
</div>
|
||||
|
||||
<div class="col-lg-12" ng-show="selectedTab.key == 'pivot'">
|
||||
<div class="col-lg-12" ng-show="selectedTab == 'pivot'">
|
||||
<pivot-table-renderer query-result="queryResult"></pivot-table-renderer>
|
||||
</div>
|
||||
|
||||
<div class="col-lg-12" ng-show="selectedTab.key == 'cohort'">
|
||||
<cohort-renderer query-result="queryResult"></cohort-renderer>
|
||||
<div class="col-lg-12" ng-show="selectedTab == vis.id" ng-repeat="vis in query.visualizations">
|
||||
<div class="row" ng-show="currentUser.canEdit(query)">
|
||||
<p>
|
||||
<div class="col-lg-12">
|
||||
<edit-visulatization-form vis="vis" query="query"></edit-visulatization-form>
|
||||
</div>
|
||||
</p>
|
||||
</div>
|
||||
<div class="row">
|
||||
<p>
|
||||
<div class="col-lg-12">
|
||||
<visualization-renderer visualization="vis" query-result="queryResult"></visualization-renderer>
|
||||
</div>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="col-lg-12" ng-show="selectedTab == 'add'">
|
||||
<div class="row">
|
||||
<p>
|
||||
<div class="col-lg-6">
|
||||
<edit-visulatization-form vis="newVisualization" query="query"></edit-visulatization-form>
|
||||
</div>
|
||||
<div class="col-lg-6">
|
||||
<visualization-renderer visualization="newVisualization" query-result="queryResult"></visualization-renderer>
|
||||
</div>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
41
redash/__init__.py
Normal file
41
redash/__init__.py
Normal file
@@ -0,0 +1,41 @@
|
||||
import json
|
||||
import urlparse
|
||||
from flask import Flask, make_response
|
||||
from flask.ext.restful import Api
|
||||
from flask_peewee.db import Database
|
||||
|
||||
import redis
|
||||
from redash import settings, utils
|
||||
|
||||
__version__ = '0.3.2'
|
||||
|
||||
app = Flask(__name__,
|
||||
template_folder=settings.STATIC_ASSETS_PATH,
|
||||
static_folder=settings.STATIC_ASSETS_PATH,
|
||||
static_path='/static')
|
||||
|
||||
|
||||
api = Api(app)
|
||||
|
||||
# configure our database
|
||||
settings.DATABASE_CONFIG.update({'threadlocals': True})
|
||||
app.config['DATABASE'] = settings.DATABASE_CONFIG
|
||||
db = Database(app)
|
||||
|
||||
from redash.authentication import setup_authentication
|
||||
auth = setup_authentication(app)
|
||||
|
||||
@api.representation('application/json')
|
||||
def json_representation(data, code, headers=None):
|
||||
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
|
||||
resp.headers.extend(headers or {})
|
||||
return resp
|
||||
|
||||
|
||||
redis_url = urlparse.urlparse(settings.REDIS_URL)
|
||||
redis_connection = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=0, password=redis_url.password)
|
||||
|
||||
from redash import data
|
||||
data_manager = data.Manager(redis_connection, db)
|
||||
|
||||
from redash import controllers
|
||||
59
redash/authentication.py
Normal file
59
redash/authentication.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import functools
|
||||
import hashlib
|
||||
import hmac
|
||||
from flask import request, make_response
|
||||
from flask.ext.googleauth import GoogleFederated
|
||||
import time
|
||||
from werkzeug.contrib.fixers import ProxyFix
|
||||
import werkzeug.wrappers
|
||||
from redash import models, settings
|
||||
|
||||
|
||||
def sign(key, path, expires):
|
||||
if not key:
|
||||
return None
|
||||
|
||||
h = hmac.new(str(key), msg=path, digestmod=hashlib.sha1)
|
||||
h.update(str(expires))
|
||||
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
class HMACAuthentication(object):
|
||||
def __init__(self, auth):
|
||||
self.auth = auth
|
||||
|
||||
def required(self, fn):
|
||||
wrapped_fn = self.auth.required(fn)
|
||||
|
||||
@functools.wraps(fn)
|
||||
def decorated(*args, **kwargs):
|
||||
signature = request.args.get('signature')
|
||||
expires = float(request.args.get('expires') or 0)
|
||||
query_id = request.view_args.get('query_id', None)
|
||||
|
||||
# TODO: 3600 should be a setting
|
||||
if signature and query_id and time.time() < expires <= time.time() + 3600:
|
||||
query = models.Query.get(models.Query.id == query_id)
|
||||
calculated_signature = sign(query.api_key, request.path, expires)
|
||||
|
||||
if query.api_key and signature == calculated_signature:
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
# Work around for flask-restful testing only for flask.wrappers.Resource instead of
|
||||
# werkzeug.wrappers.Response
|
||||
resp = wrapped_fn(*args, **kwargs)
|
||||
if isinstance(resp, werkzeug.wrappers.Response):
|
||||
resp = make_response(resp)
|
||||
|
||||
return resp
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
def setup_authentication(app):
|
||||
openid_auth = GoogleFederated(settings.GOOGLE_APPS_DOMAIN, app)
|
||||
app.wsgi_app = ProxyFix(app.wsgi_app)
|
||||
app.secret_key = settings.COOKIE_SECRET
|
||||
|
||||
return HMACAuthentication(openid_auth)
|
||||
338
redash/controllers.py
Normal file
338
redash/controllers.py
Normal file
@@ -0,0 +1,338 @@
|
||||
"""
|
||||
Flask-restful based API implementation for re:dash.
|
||||
|
||||
Currently the Flask server is used to serve the static assets (and the Angular.js app),
|
||||
but this is only due to configuration issues and temporary.
|
||||
"""
|
||||
import csv
|
||||
import hashlib
|
||||
import json
|
||||
import numbers
|
||||
import cStringIO
|
||||
import datetime
|
||||
|
||||
from flask import g, render_template, send_from_directory, make_response, request, jsonify
|
||||
from flask.ext.restful import Resource, abort
|
||||
|
||||
import sqlparse
|
||||
from redash import settings, utils
|
||||
from redash import data
|
||||
|
||||
from redash import app, auth, api, redis_connection, data_manager
|
||||
from redash import models
|
||||
|
||||
|
||||
@app.route('/ping', methods=['GET'])
|
||||
def ping():
|
||||
return 'PONG.'
|
||||
|
||||
|
||||
@app.route('/admin/<anything>')
|
||||
@app.route('/dashboard/<anything>')
|
||||
@app.route('/queries')
|
||||
@app.route('/queries/<anything>')
|
||||
@app.route('/')
|
||||
@auth.required
|
||||
def index(anything=None):
|
||||
email_md5 = hashlib.md5(g.user['email'].lower()).hexdigest()
|
||||
gravatar_url = "https://www.gravatar.com/avatar/%s?s=40" % email_md5
|
||||
|
||||
user = {
|
||||
'gravatar_url': gravatar_url,
|
||||
'is_admin': g.user['email'] in settings.ADMINS,
|
||||
'name': g.user['email']
|
||||
}
|
||||
|
||||
return render_template("index.html", user=json.dumps(user), analytics=settings.ANALYTICS)
|
||||
|
||||
|
||||
@app.route('/status.json')
|
||||
@auth.required
|
||||
def status_api():
|
||||
status = {}
|
||||
info = redis_connection.info()
|
||||
status['redis_used_memory'] = info['used_memory_human']
|
||||
|
||||
status['queries_count'] = models.Query.select().count()
|
||||
status['query_results_count'] = models.QueryResult.select().count()
|
||||
status['dashboards_count'] = models.Dashboard.select().count()
|
||||
status['widgets_count'] = models.Widget.select().count()
|
||||
|
||||
status['workers'] = [redis_connection.hgetall(w)
|
||||
for w in redis_connection.smembers('workers')]
|
||||
|
||||
manager_status = redis_connection.hgetall('manager:status')
|
||||
status['manager'] = manager_status
|
||||
status['manager']['queue_size'] = redis_connection.zcard('jobs')
|
||||
|
||||
return jsonify(status)
|
||||
|
||||
|
||||
@app.route('/api/queries/format', methods=['POST'])
|
||||
@auth.required
|
||||
def format_sql_query():
|
||||
arguments = request.get_json(force=True)
|
||||
query = arguments.get("query", "")
|
||||
|
||||
return sqlparse.format(query, reindent=True, keyword_case='upper')
|
||||
|
||||
|
||||
class BaseResource(Resource):
|
||||
decorators = [auth.required]
|
||||
|
||||
@property
|
||||
def current_user(self):
|
||||
return g.user['email']
|
||||
|
||||
|
||||
class DashboardListAPI(BaseResource):
|
||||
def get(self):
|
||||
dashboards = [d.to_dict() for d in
|
||||
models.Dashboard.select().where(models.Dashboard.is_archived==False)]
|
||||
|
||||
return dashboards
|
||||
|
||||
def post(self):
|
||||
dashboard_properties = request.get_json(force=True)
|
||||
dashboard = models.Dashboard(name=dashboard_properties['name'],
|
||||
user=self.current_user,
|
||||
layout='[]')
|
||||
dashboard.save()
|
||||
return dashboard.to_dict()
|
||||
|
||||
|
||||
class DashboardAPI(BaseResource):
|
||||
def get(self, dashboard_slug=None):
|
||||
try:
|
||||
dashboard = models.Dashboard.get_by_slug(dashboard_slug)
|
||||
except models.Dashboard.DoesNotExist:
|
||||
abort(404)
|
||||
|
||||
return dashboard.to_dict(with_widgets=True)
|
||||
|
||||
def post(self, dashboard_slug):
|
||||
# TODO: either convert all requests to use slugs or ids
|
||||
dashboard_properties = request.get_json(force=True)
|
||||
dashboard = models.Dashboard.get(models.Dashboard.id == dashboard_slug)
|
||||
dashboard.layout = dashboard_properties['layout']
|
||||
dashboard.name = dashboard_properties['name']
|
||||
dashboard.save()
|
||||
|
||||
return dashboard.to_dict(with_widgets=True)
|
||||
|
||||
def delete(self, dashboard_slug):
|
||||
dashboard = models.Dashboard.get_by_slug(dashboard_slug)
|
||||
dashboard.is_archived = True
|
||||
dashboard.save()
|
||||
|
||||
api.add_resource(DashboardListAPI, '/api/dashboards', endpoint='dashboards')
|
||||
api.add_resource(DashboardAPI, '/api/dashboards/<dashboard_slug>', endpoint='dashboard')
|
||||
|
||||
|
||||
class WidgetListAPI(BaseResource):
|
||||
def post(self):
|
||||
widget_properties = request.get_json(force=True)
|
||||
widget_properties['options'] = json.dumps(widget_properties['options'])
|
||||
widget_properties.pop('id', None)
|
||||
widget_properties['dashboard'] = widget_properties.pop('dashboard_id')
|
||||
widget_properties['visualization'] = widget_properties.pop('visualization_id')
|
||||
widget = models.Widget(**widget_properties)
|
||||
widget.save()
|
||||
|
||||
layout = json.loads(widget.dashboard.layout)
|
||||
new_row = True
|
||||
|
||||
if len(layout) == 0 or widget.width == 2:
|
||||
layout.append([widget.id])
|
||||
elif len(layout[-1]) == 1:
|
||||
neighbour_widget = models.Widget.get(models.Widget.id == layout[-1][0])
|
||||
if neighbour_widget.width == 1:
|
||||
layout[-1].append(widget.id)
|
||||
new_row = False
|
||||
else:
|
||||
layout.append([widget.id])
|
||||
else:
|
||||
layout.append([widget.id])
|
||||
|
||||
widget.dashboard.layout = json.dumps(layout)
|
||||
widget.dashboard.save()
|
||||
|
||||
return {'widget': widget.to_dict(), 'layout': layout, 'new_row': new_row}
|
||||
|
||||
|
||||
class WidgetAPI(BaseResource):
|
||||
def delete(self, widget_id):
|
||||
widget = models.Widget.get(models.Widget.id == widget_id)
|
||||
# TODO: reposition existing ones
|
||||
layout = json.loads(widget.dashboard.layout)
|
||||
layout = map(lambda row: filter(lambda w: w != widget_id, row), layout)
|
||||
layout = filter(lambda row: len(row) > 0, layout)
|
||||
widget.dashboard.layout = json.dumps(layout)
|
||||
widget.dashboard.save()
|
||||
|
||||
widget.delete_instance()
|
||||
|
||||
api.add_resource(WidgetListAPI, '/api/widgets', endpoint='widgets')
|
||||
api.add_resource(WidgetAPI, '/api/widgets/<int:widget_id>', endpoint='widget')
|
||||
|
||||
|
||||
class QueryListAPI(BaseResource):
|
||||
def post(self):
|
||||
query_def = request.get_json(force=True)
|
||||
# id, created_at, api_key
|
||||
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data']:
|
||||
query_def.pop(field, None)
|
||||
|
||||
query_def['user'] = self.current_user
|
||||
query = models.Query(**query_def)
|
||||
query.save()
|
||||
|
||||
query.create_default_visualizations()
|
||||
|
||||
return query.to_dict(with_result=False)
|
||||
|
||||
def get(self):
|
||||
return [q.to_dict(with_result=False, with_stats=True) for q in models.Query.all_queries()]
|
||||
|
||||
|
||||
class QueryAPI(BaseResource):
|
||||
def post(self, query_id):
|
||||
query_def = request.get_json(force=True)
|
||||
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data']:
|
||||
query_def.pop(field, None)
|
||||
|
||||
if 'latest_query_data_id' in query_def:
|
||||
query_def['latest_query_data'] = query_def.pop('latest_query_data_id')
|
||||
|
||||
models.Query.update_instance(query_id, **query_def)
|
||||
|
||||
query = models.Query.get_by_id(query_id)
|
||||
|
||||
return query.to_dict(with_result=False, with_visualizations=True)
|
||||
|
||||
def get(self, query_id):
|
||||
q = models.Query.get(models.Query.id == query_id)
|
||||
if q:
|
||||
return q.to_dict(with_visualizations=True)
|
||||
else:
|
||||
abort(404, message="Query not found.")
|
||||
|
||||
api.add_resource(QueryListAPI, '/api/queries', endpoint='queries')
|
||||
api.add_resource(QueryAPI, '/api/queries/<query_id>', endpoint='query')
|
||||
|
||||
|
||||
class VisualizationListAPI(BaseResource):
|
||||
def post(self):
|
||||
kwargs = request.get_json(force=True)
|
||||
kwargs['options'] = json.dumps(kwargs['options'])
|
||||
kwargs['query'] = kwargs.pop('query_id')
|
||||
|
||||
vis = models.Visualization(**kwargs)
|
||||
vis.save()
|
||||
|
||||
return vis.to_dict(with_query=False)
|
||||
|
||||
|
||||
class VisualizationAPI(BaseResource):
|
||||
def post(self, visualization_id):
|
||||
kwargs = request.get_json(force=True)
|
||||
if 'options' in kwargs:
|
||||
kwargs['options'] = json.dumps(kwargs['options'])
|
||||
kwargs.pop('id', None)
|
||||
|
||||
update = models.Visualization.update(**kwargs).where(models.Visualization.id == visualization_id)
|
||||
update.execute()
|
||||
|
||||
vis = models.Visualization.get_by_id(visualization_id)
|
||||
|
||||
return vis.to_dict(with_query=False)
|
||||
|
||||
def delete(self, visualization_id):
|
||||
vis = models.Visualization.get(models.Visualization.id == visualization_id)
|
||||
vis.delete_instance()
|
||||
|
||||
api.add_resource(VisualizationListAPI, '/api/visualizations', endpoint='visualizations')
|
||||
api.add_resource(VisualizationAPI, '/api/visualizations/<visualization_id>', endpoint='visualization')
|
||||
|
||||
|
||||
class QueryResultListAPI(BaseResource):
|
||||
def post(self):
|
||||
params = request.json
|
||||
|
||||
if params['ttl'] == 0:
|
||||
query_result = None
|
||||
else:
|
||||
query_result = data_manager.get_query_result(params['query'], int(params['ttl']))
|
||||
|
||||
if query_result:
|
||||
return {'query_result': query_result.to_dict(parse_data=True)}
|
||||
else:
|
||||
job = data_manager.add_job(params['query'], data.Job.HIGH_PRIORITY)
|
||||
return {'job': job.to_dict()}
|
||||
|
||||
|
||||
class QueryResultAPI(BaseResource):
|
||||
def get(self, query_result_id):
|
||||
query_result = data_manager.get_query_result_by_id(query_result_id)
|
||||
if query_result:
|
||||
return {'query_result': query_result.to_dict(parse_data=True)}
|
||||
else:
|
||||
abort(404)
|
||||
|
||||
|
||||
class CsvQueryResultsAPI(BaseResource):
|
||||
def get(self, query_id, query_result_id=None):
|
||||
if not query_result_id:
|
||||
query = models.Query.get(models.Query.id == query_id)
|
||||
if query:
|
||||
query_result_id = query._data['latest_query_data']
|
||||
|
||||
query_result = query_result_id and data_manager.get_query_result_by_id(query_result_id)
|
||||
if query_result:
|
||||
s = cStringIO.StringIO()
|
||||
|
||||
query_data = json.loads(query_result.data)
|
||||
writer = csv.DictWriter(s, fieldnames=[col['name'] for col in query_data['columns']])
|
||||
writer.writer = utils.UnicodeWriter(s)
|
||||
writer.writeheader()
|
||||
for row in query_data['rows']:
|
||||
for k, v in row.iteritems():
|
||||
if isinstance(v, numbers.Number) and (v > 1000 * 1000 * 1000 * 100):
|
||||
row[k] = datetime.datetime.fromtimestamp(v/1000.0)
|
||||
|
||||
writer.writerow(row)
|
||||
|
||||
return make_response(s.getvalue(), 200, {'Content-Type': "text/csv; charset=UTF-8"})
|
||||
else:
|
||||
abort(404)
|
||||
|
||||
api.add_resource(CsvQueryResultsAPI, '/api/queries/<query_id>/results/<query_result_id>.csv',
|
||||
'/api/queries/<query_id>/results.csv',
|
||||
endpoint='csv_query_results')
|
||||
api.add_resource(QueryResultListAPI, '/api/query_results', endpoint='query_results')
|
||||
api.add_resource(QueryResultAPI, '/api/query_results/<query_result_id>', endpoint='query_result')
|
||||
|
||||
|
||||
class JobAPI(BaseResource):
|
||||
def get(self, job_id):
|
||||
# TODO: if finished, include the query result
|
||||
job = data.Job.load(data_manager.redis_connection, job_id)
|
||||
return {'job': job.to_dict()}
|
||||
|
||||
def delete(self, job_id):
|
||||
job = data.Job.load(data_manager.redis_connection, job_id)
|
||||
job.cancel()
|
||||
|
||||
api.add_resource(JobAPI, '/api/jobs/<job_id>', endpoint='job')
|
||||
|
||||
@app.route('/<path:filename>')
|
||||
@auth.required
|
||||
def send_static(filename):
|
||||
return send_from_directory(settings.STATIC_ASSETS_PATH, filename)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(debug=True)
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,2 @@
|
||||
from manager import Manager
|
||||
from worker import Job
|
||||
import models
|
||||
import utils
|
||||
@@ -1,18 +1,17 @@
|
||||
"""
|
||||
Data manager. Used to manage and coordinate execution of queries.
|
||||
"""
|
||||
import collections
|
||||
from contextlib import contextmanager
|
||||
import collections
|
||||
import json
|
||||
import time
|
||||
import logging
|
||||
import psycopg2
|
||||
import qr
|
||||
import redis
|
||||
import time
|
||||
import query_runner
|
||||
import worker
|
||||
from utils import gen_query_hash
|
||||
|
||||
from redash import settings
|
||||
from redash.data import worker
|
||||
from redash.utils import gen_query_hash
|
||||
|
||||
class QueryResult(collections.namedtuple('QueryData', 'id query data runtime retrieved_at query_hash')):
|
||||
def to_dict(self, parse_data=False):
|
||||
@@ -25,10 +24,10 @@ class QueryResult(collections.namedtuple('QueryData', 'id query data runtime ret
|
||||
|
||||
|
||||
class Manager(object):
|
||||
def __init__(self, redis_connection, db_connection_string, db_max_connections):
|
||||
def __init__(self, redis_connection, db):
|
||||
self.redis_connection = redis_connection
|
||||
self.db = db
|
||||
self.workers = []
|
||||
self.db_connection_string = db_connection_string
|
||||
self.queue = qr.PriorityQueue("jobs", **self.redis_connection.connection_pool.connection_kwargs)
|
||||
self.max_retries = 5
|
||||
self.status = {
|
||||
@@ -150,11 +149,19 @@ class Manager(object):
|
||||
|
||||
return data
|
||||
|
||||
def start_workers(self, workers_count, connection_string):
|
||||
def start_workers(self, workers_count, connection_type, connection_string):
|
||||
if self.workers:
|
||||
return self.workers
|
||||
|
||||
runner = query_runner.redshift(connection_string)
|
||||
|
||||
if connection_type == 'mysql':
|
||||
from redash.data import query_runner_mysql
|
||||
runner = query_runner_mysql.mysql(connection_string)
|
||||
elif connection_type == 'graphite':
|
||||
from redash.data import query_runner_graphite
|
||||
runner = query_runner_graphite.graphite(connection_string)
|
||||
else:
|
||||
from redash.data import query_runner
|
||||
runner = query_runner.redshift(connection_string)
|
||||
|
||||
redis_connection_params = self.redis_connection.connection_pool.connection_kwargs
|
||||
self.workers = [worker.Worker(self, redis_connection_params, runner)
|
||||
@@ -171,17 +178,18 @@ class Manager(object):
|
||||
|
||||
@contextmanager
|
||||
def db_transaction(self):
|
||||
connection = psycopg2.connect(self.db_connection_string)
|
||||
cursor = connection.cursor()
|
||||
self.db.connect_db()
|
||||
|
||||
cursor = self.db.database.get_cursor()
|
||||
try:
|
||||
yield cursor
|
||||
except:
|
||||
connection.rollback()
|
||||
self.db.database.rollback()
|
||||
raise
|
||||
else:
|
||||
connection.commit()
|
||||
self.db.database.commit()
|
||||
finally:
|
||||
connection.close()
|
||||
self.db.close_db(None)
|
||||
|
||||
def _save_status(self):
|
||||
self.redis_connection.hmset('manager:status', self.status)
|
||||
@@ -6,17 +6,19 @@ QueryRunner is the function that the workers use, to execute queries. This is th
|
||||
Because the worker just pass the query, this can be used with any data store that has some sort of
|
||||
query language (for example: HiveQL).
|
||||
"""
|
||||
import logging
|
||||
import json
|
||||
import psycopg2
|
||||
import sys
|
||||
import select
|
||||
from .utils import JSONEncoder
|
||||
|
||||
import psycopg2
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
def redshift(connection_string):
|
||||
def column_friendly_name(column_name):
|
||||
return column_name
|
||||
|
||||
|
||||
def wait(conn):
|
||||
while 1:
|
||||
state = conn.poll()
|
||||
@@ -28,24 +30,24 @@ def redshift(connection_string):
|
||||
select.select([conn.fileno()], [], [])
|
||||
else:
|
||||
raise psycopg2.OperationalError("poll() returned %s" % state)
|
||||
|
||||
|
||||
def query_runner(query):
|
||||
connection = psycopg2.connect(connection_string, async=True)
|
||||
wait(connection)
|
||||
|
||||
|
||||
cursor = connection.cursor()
|
||||
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
wait(connection)
|
||||
|
||||
|
||||
column_names = [col.name for col in cursor.description]
|
||||
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||
columns = [{'name': col.name,
|
||||
'friendly_name': column_friendly_name(col.name),
|
||||
'type': None} for col in cursor.description]
|
||||
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
@@ -61,7 +63,7 @@ def redshift(connection_string):
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
return query_runner
|
||||
46
redash/data/query_runner_graphite.py
Normal file
46
redash/data/query_runner_graphite.py
Normal file
@@ -0,0 +1,46 @@
|
||||
"""
|
||||
QueryRunner for Graphite.
|
||||
"""
|
||||
import json
|
||||
import datetime
|
||||
import requests
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
|
||||
def graphite(connection_params):
|
||||
def transform_result(response):
|
||||
columns = [{'name': 'Time::x'}, {'name': 'value::y'}, {'name': 'name::series'}]
|
||||
rows = []
|
||||
|
||||
for series in response.json():
|
||||
for values in series['datapoints']:
|
||||
timestamp = datetime.datetime.fromtimestamp(int(values[1]))
|
||||
rows.append({'Time::x': timestamp, 'name::series': series['target'], 'value::y': values[0]})
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
return json.dumps(data, cls=JSONEncoder)
|
||||
|
||||
def query_runner(query):
|
||||
base_url = "%s/render?format=json&" % connection_params['url']
|
||||
url = "%s%s" % (base_url, "&".join(query.split("\n")))
|
||||
error = None
|
||||
data = None
|
||||
|
||||
try:
|
||||
response = requests.get(url, auth=connection_params['auth'],
|
||||
verify=connection_params['verify'])
|
||||
|
||||
if response.status_code == 200:
|
||||
data = transform_result(response)
|
||||
else:
|
||||
error = "Failed getting results (%d)" % response.status_code
|
||||
|
||||
except Exception, ex:
|
||||
data = None
|
||||
error = ex.message
|
||||
|
||||
return data, error
|
||||
|
||||
query_runner.annotate_query = False
|
||||
|
||||
return query_runner
|
||||
55
redash/data/query_runner_mysql.py
Normal file
55
redash/data/query_runner_mysql.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""
|
||||
QueryRunner is the function that the workers use, to execute queries. This is the Redshift
|
||||
(PostgreSQL in fact) version, but easily we can write another to support additional databases
|
||||
(MySQL and others).
|
||||
|
||||
Because the worker just pass the query, this can be used with any data store that has some sort of
|
||||
query language (for example: HiveQL).
|
||||
"""
|
||||
import logging
|
||||
import json
|
||||
import MySQLdb
|
||||
import sys
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
def mysql(connection_string):
|
||||
if connection_string.endswith(';'):
|
||||
connection_string = connection_string[0:-1]
|
||||
|
||||
def query_runner(query):
|
||||
connections_params = [entry.split('=')[1] for entry in connection_string.split(';')]
|
||||
connection = MySQLdb.connect(*connections_params)
|
||||
cursor = connection.cursor()
|
||||
|
||||
logging.debug("mysql got query: %s", query)
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
|
||||
data = cursor.fetchall()
|
||||
|
||||
num_fields = len(cursor.description)
|
||||
column_names = [i[0] for i in cursor.description]
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in data]
|
||||
|
||||
columns = [{'name': col_name,
|
||||
'friendly_name': col_name,
|
||||
'type': None} for col_name in column_names]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
cursor.close()
|
||||
except MySQLdb.Error, e:
|
||||
json_data = None
|
||||
error = e.message
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
return query_runner
|
||||
@@ -11,7 +11,7 @@ import time
|
||||
import signal
|
||||
import setproctitle
|
||||
import redis
|
||||
from utils import gen_query_hash
|
||||
from redash.utils import gen_query_hash
|
||||
|
||||
|
||||
class Job(object):
|
||||
@@ -234,8 +234,11 @@ class Worker(threading.Thread):
|
||||
start_time = time.time()
|
||||
self.set_title("running query %s" % job_id)
|
||||
|
||||
annotated_query = "/* Pid: %s, Job Id: %s, Query hash: %s, Priority: %s */ %s" % \
|
||||
(pid, job.id, job.query_hash, job.priority, job.query)
|
||||
if getattr(self.query_runner, 'annotate_query', True):
|
||||
annotated_query = "/* Pid: %s, Job Id: %s, Query hash: %s, Priority: %s */ %s" % \
|
||||
(pid, job.id, job.query_hash, job.priority, job.query)
|
||||
else:
|
||||
annotated_query = job.query
|
||||
|
||||
# TODO: here's the part that needs to be forked, not all of the worker process...
|
||||
data, error = self.query_runner(annotated_query)
|
||||
263
redash/models.py
Normal file
263
redash/models.py
Normal file
@@ -0,0 +1,263 @@
|
||||
import json
|
||||
import hashlib
|
||||
import time
|
||||
import datetime
|
||||
from flask.ext.peewee.utils import slugify
|
||||
import peewee
|
||||
from redash import db, utils
|
||||
|
||||
|
||||
#class User(db.Model):
|
||||
# id = db.Column(db.Integer, primary_key=True)
|
||||
# name = db.Column(db.String(320))
|
||||
# email = db.Column(db.String(160), unique=True)
|
||||
#
|
||||
# def __repr__(self):
|
||||
# return '<User %r, %r>' % (self.name, self.email)
|
||||
|
||||
|
||||
class BaseModel(db.Model):
|
||||
@classmethod
|
||||
def get_by_id(cls, model_id):
|
||||
return cls.get(cls.id == model_id)
|
||||
|
||||
|
||||
class QueryResult(db.Model):
|
||||
id = peewee.PrimaryKeyField()
|
||||
query_hash = peewee.CharField(max_length=32, index=True)
|
||||
query = peewee.TextField()
|
||||
data = peewee.TextField()
|
||||
runtime = peewee.FloatField()
|
||||
retrieved_at = peewee.DateTimeField()
|
||||
|
||||
class Meta:
|
||||
db_table = 'query_results'
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'query_hash': self.query_hash,
|
||||
'query': self.query,
|
||||
'data': json.loads(self.data),
|
||||
'runtime': self.runtime,
|
||||
'retrieved_at': self.retrieved_at
|
||||
}
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at)
|
||||
|
||||
|
||||
class Query(BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
latest_query_data = peewee.ForeignKeyField(QueryResult, null=True)
|
||||
name = peewee.CharField(max_length=255)
|
||||
description = peewee.CharField(max_length=4096)
|
||||
query = peewee.TextField()
|
||||
query_hash = peewee.CharField(max_length=32)
|
||||
api_key = peewee.CharField(max_length=40)
|
||||
ttl = peewee.IntegerField()
|
||||
user = peewee.CharField(max_length=360)
|
||||
created_at = peewee.DateTimeField(default=datetime.datetime.now)
|
||||
|
||||
class Meta:
|
||||
db_table = 'queries'
|
||||
|
||||
def create_default_visualizations(self):
|
||||
table_visualization = Visualization(query=self, name="Table",
|
||||
description='',
|
||||
type="TABLE", options="{}")
|
||||
table_visualization.save()
|
||||
|
||||
def to_dict(self, with_result=True, with_stats=False, with_visualizations=False):
|
||||
d = {
|
||||
'id': self.id,
|
||||
'latest_query_data_id': self._data.get('latest_query_data', None),
|
||||
'name': self.name,
|
||||
'description': self.description,
|
||||
'query': self.query,
|
||||
'query_hash': self.query_hash,
|
||||
'ttl': self.ttl,
|
||||
'user': self.user,
|
||||
'api_key': self.api_key,
|
||||
'created_at': self.created_at,
|
||||
}
|
||||
|
||||
if with_stats:
|
||||
d['avg_runtime'] = self.avg_runtime
|
||||
d['min_runtime'] = self.min_runtime
|
||||
d['max_runtime'] = self.max_runtime
|
||||
d['last_retrieved_at'] = self.last_retrieved_at
|
||||
d['times_retrieved'] = self.times_retrieved
|
||||
|
||||
if with_visualizations:
|
||||
d['visualizations'] = [vis.to_dict(with_query=False)
|
||||
for vis in self.visualizations]
|
||||
|
||||
if with_result and self.latest_query_data:
|
||||
d['latest_query_data'] = self.latest_query_data.to_dict()
|
||||
|
||||
return d
|
||||
|
||||
@classmethod
|
||||
def all_queries(cls):
|
||||
query = """SELECT queries.*, query_stats.*
|
||||
FROM queries
|
||||
LEFT OUTER JOIN
|
||||
(SELECT qu.query_hash,
|
||||
count(0) AS "times_retrieved",
|
||||
avg(runtime) AS "avg_runtime",
|
||||
min(runtime) AS "min_runtime",
|
||||
max(runtime) AS "max_runtime",
|
||||
max(retrieved_at) AS "last_retrieved_at"
|
||||
FROM queries qu
|
||||
JOIN query_results qr ON qu.query_hash=qr.query_hash
|
||||
GROUP BY qu.query_hash) query_stats ON query_stats.query_hash = queries.query_hash
|
||||
"""
|
||||
return cls.raw(query)
|
||||
|
||||
@classmethod
|
||||
def update_instance(cls, query_id, **kwargs):
|
||||
if 'query' in kwargs:
|
||||
kwargs['query_hash'] = utils.gen_query_hash(kwargs['query'])
|
||||
|
||||
update = cls.update(**kwargs).where(cls.id == query_id)
|
||||
return update.execute()
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.query_hash = utils.gen_query_hash(self.query)
|
||||
self._set_api_key()
|
||||
super(Query, self).save(*args, **kwargs)
|
||||
|
||||
def _set_api_key(self):
|
||||
if not self.api_key:
|
||||
self.api_key = hashlib.sha1(
|
||||
u''.join([str(time.time()), self.query, self.user, self.name])).hexdigest()
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self.id)
|
||||
|
||||
|
||||
class Dashboard(db.Model):
|
||||
id = peewee.PrimaryKeyField()
|
||||
slug = peewee.CharField(max_length=140, index=True)
|
||||
name = peewee.CharField(max_length=100)
|
||||
user = peewee.CharField(max_length=360)
|
||||
layout = peewee.TextField()
|
||||
is_archived = peewee.BooleanField(default=False, index=True)
|
||||
created_at = peewee.DateTimeField(default=datetime.datetime.now)
|
||||
|
||||
class Meta:
|
||||
db_table = 'dashboards'
|
||||
|
||||
def to_dict(self, with_widgets=False):
|
||||
layout = json.loads(self.layout)
|
||||
|
||||
if with_widgets:
|
||||
widgets = Widget.select(Widget, Visualization, Query, QueryResult).\
|
||||
where(Widget.dashboard == self.id).join(Visualization).join(Query).join(QueryResult)
|
||||
widgets = {w.id: w.to_dict() for w in widgets}
|
||||
widgets_layout = map(lambda row: map(lambda widget_id: widgets.get(widget_id, None), row), layout)
|
||||
else:
|
||||
widgets_layout = None
|
||||
|
||||
return {
|
||||
'id': self.id,
|
||||
'slug': self.slug,
|
||||
'name': self.name,
|
||||
'user': self.user,
|
||||
'layout': layout,
|
||||
'widgets': widgets_layout
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_by_slug(cls, slug):
|
||||
return cls.get(cls.slug==slug)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.slug:
|
||||
self.slug = slugify(self.name)
|
||||
|
||||
tries = 1
|
||||
while self.select().where(Dashboard.slug == self.slug).first() is not None:
|
||||
self.slug = slugify(self.name) + "_{0}".format(tries)
|
||||
tries += 1
|
||||
|
||||
super(Dashboard, self).save(*args, **kwargs)
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s=%s" % (self.id, self.name)
|
||||
|
||||
|
||||
class Visualization(BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
type = peewee.CharField(max_length=100)
|
||||
query = peewee.ForeignKeyField(Query, related_name='visualizations')
|
||||
name = peewee.CharField(max_length=255)
|
||||
description = peewee.CharField(max_length=4096, null=True)
|
||||
options = peewee.TextField()
|
||||
|
||||
class Meta:
|
||||
db_table = 'visualizations'
|
||||
|
||||
def to_dict(self, with_query=True):
|
||||
d = {
|
||||
'id': self.id,
|
||||
'type': self.type,
|
||||
'name': self.name,
|
||||
'description': self.description,
|
||||
'options': json.loads(self.options),
|
||||
}
|
||||
|
||||
if with_query:
|
||||
d['query'] = self.query.to_dict()
|
||||
|
||||
return d
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s %s" % (self.id, self.type)
|
||||
|
||||
|
||||
class Widget(db.Model):
|
||||
id = peewee.PrimaryKeyField()
|
||||
visualization = peewee.ForeignKeyField(Visualization, related_name='widgets')
|
||||
|
||||
width = peewee.IntegerField()
|
||||
options = peewee.TextField()
|
||||
dashboard = peewee.ForeignKeyField(Dashboard, related_name='widgets', index=True)
|
||||
created_at = peewee.DateTimeField(default=datetime.datetime.now)
|
||||
|
||||
# unused; kept for backward compatability:
|
||||
type = peewee.CharField(max_length=100, null=True)
|
||||
query_id = peewee.IntegerField(null=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'widgets'
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'width': self.width,
|
||||
'options': json.loads(self.options),
|
||||
'visualization': self.visualization.to_dict(),
|
||||
'dashboard_id': self._data['dashboard']
|
||||
}
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s" % self.id
|
||||
|
||||
all_models = (QueryResult, Query, Dashboard, Visualization, Widget)
|
||||
|
||||
|
||||
def create_db(create_tables, drop_tables):
|
||||
db.connect_db()
|
||||
|
||||
for model in all_models:
|
||||
if drop_tables and model.table_exists():
|
||||
# TODO: submit PR to peewee to allow passing cascade option to drop_table.
|
||||
db.database.execute_sql('DROP TABLE %s CASCADE' % model._meta.db_table)
|
||||
#model.drop_table()
|
||||
|
||||
if create_tables:
|
||||
model.create_table()
|
||||
|
||||
db.close_db(None)
|
||||
49
redash/settings.py
Normal file
49
redash/settings.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import os
|
||||
import urlparse
|
||||
|
||||
|
||||
def parse_db_url(url):
|
||||
url_parts = urlparse.urlparse(url)
|
||||
connection = {
|
||||
'engine': 'peewee.PostgresqlDatabase',
|
||||
}
|
||||
|
||||
if url_parts.hostname and not url_parts.path:
|
||||
connection['name'] = url_parts.hostname
|
||||
else:
|
||||
connection['name'] = url_parts.path[1:]
|
||||
connection['host'] = url_parts.hostname
|
||||
connection['port'] = url_parts.port
|
||||
connection['user'] = url_parts.username
|
||||
connection['password'] = url_parts.password
|
||||
|
||||
return connection
|
||||
|
||||
|
||||
def fix_assets_path(path):
|
||||
fullpath = os.path.join(os.path.dirname(__file__), path)
|
||||
return fullpath
|
||||
|
||||
REDIS_URL = os.environ.get('REDASH_REDIS_URL', "redis://localhost:6379")
|
||||
|
||||
# "pg", "graphite" or "mysql"
|
||||
CONNECTION_ADAPTER = os.environ.get("REDASH_CONNECTION_ADAPTER", "pg")
|
||||
# Connection string for the database that is used to run queries against. Examples:
|
||||
# -- mysql: CONNECTION_STRING = "Server=;User=;Pwd=;Database="
|
||||
# -- pg: CONNECTION_STRING = "user= password= host= port=5439 dbname="
|
||||
# -- graphite: CONNECTION_STRING = {'url': 'https://graphite.yourcompany.com', 'auth': ('user', 'password'), 'verify': True}
|
||||
CONNECTION_STRING = os.environ.get("REDASH_CONNECTION_STRING", "user= password= host= port=5439 dbname=")
|
||||
|
||||
# Connection settings for re:dash's own database (where we store the queries, results, etc)
|
||||
DATABASE_CONFIG = parse_db_url(os.environ.get("REDASH_DATABASE_URL", "postgresql://postgres"))
|
||||
|
||||
# Google Apps domain to allow access from; any user with email in this Google Apps will be allowed
|
||||
# access
|
||||
GOOGLE_APPS_DOMAIN = os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", "")
|
||||
# Email addresses of admin users (comma separated)
|
||||
ADMINS = os.environ.get("REDASH_ADMINS", '').split(',')
|
||||
STATIC_ASSETS_PATH = fix_assets_path(os.environ.get("REDASH_STATIC_ASSETS_PATH", "../rd_ui/dist/"))
|
||||
WORKERS_COUNT = int(os.environ.get("REDASH_WORKERS_COUNT", "2"))
|
||||
COOKIE_SECRET = os.environ.get("REDASH_COOKIE_SECRET", "c292a0a3aa32397cdb050e233733900f")
|
||||
LOG_LEVEL = os.environ.get("REDASH_LOG_LEVEL", "INFO")
|
||||
ANALYTICS = os.environ.get("REDASH_ANALYTICS", "")
|
||||
@@ -36,6 +36,10 @@ class JSONEncoder(json.JSONEncoder):
|
||||
super(JSONEncoder, self).default(o)
|
||||
|
||||
|
||||
def json_dumps(data):
|
||||
return json.dumps(data, cls=JSONEncoder)
|
||||
|
||||
|
||||
class UnicodeWriter:
|
||||
"""
|
||||
A CSV writer which will write rows to CSV file "f",
|
||||
26
requirements.txt
Normal file
26
requirements.txt
Normal file
@@ -0,0 +1,26 @@
|
||||
Flask==0.10.1
|
||||
Flask-GoogleAuth==0.4
|
||||
Flask-RESTful==0.2.10
|
||||
Jinja2==2.7.2
|
||||
MarkupSafe==0.18
|
||||
WTForms==1.0.5
|
||||
Werkzeug==0.9.4
|
||||
aniso8601==0.82
|
||||
atfork==0.1.2
|
||||
blinker==1.3
|
||||
flask-peewee==0.6.5
|
||||
itsdangerous==0.23
|
||||
peewee==2.2.0
|
||||
psycopg2==2.5.1
|
||||
python-dateutil==2.1
|
||||
pytz==2013.9
|
||||
qr==0.6.0
|
||||
redis==2.7.5
|
||||
requests==2.2.0
|
||||
setproctitle==1.1.8
|
||||
six==1.5.2
|
||||
sqlparse==0.1.8
|
||||
wsgiref==0.1.2
|
||||
wtf-peewee==0.2.2
|
||||
Flask-Script==0.6.6
|
||||
honcho==0.5.0
|
||||
24
tests/__init__.py
Normal file
24
tests/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from unittest import TestCase
|
||||
from redash import settings, db, app
|
||||
import redash.models
|
||||
|
||||
# TODO: this isn't pretty... :-)
|
||||
settings.DATABASE_CONFIG = {
|
||||
'name': 'circle_test',
|
||||
'engine': 'peewee.PostgresqlDatabase',
|
||||
'threadlocals': True
|
||||
}
|
||||
app.config['DATABASE'] = settings.DATABASE_CONFIG
|
||||
db.load_database()
|
||||
|
||||
for model in redash.models.all_models:
|
||||
model._meta.database = db.database
|
||||
|
||||
|
||||
class BaseTestCase(TestCase):
|
||||
def setUp(self):
|
||||
redash.models.create_db(True, True)
|
||||
|
||||
def tearDown(self):
|
||||
db.close_db(None)
|
||||
redash.models.create_db(False, True)
|
||||
58
tests/factories.py
Normal file
58
tests/factories.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import datetime
|
||||
import redash.models
|
||||
|
||||
|
||||
class ModelFactory(object):
|
||||
def __init__(self, model, **kwargs):
|
||||
self.model = model
|
||||
self.kwargs = kwargs
|
||||
|
||||
def _get_kwargs(self, override_kwargs):
|
||||
kwargs = self.kwargs.copy()
|
||||
kwargs.update(override_kwargs)
|
||||
|
||||
for key, arg in kwargs.items():
|
||||
if callable(arg):
|
||||
kwargs[key] = arg()
|
||||
|
||||
return kwargs
|
||||
|
||||
def instance(self, **override_kwargs):
|
||||
kwargs = self._get_kwargs(override_kwargs)
|
||||
|
||||
return self.model(**kwargs)
|
||||
|
||||
def create(self, **override_kwargs):
|
||||
kwargs = self._get_kwargs(override_kwargs)
|
||||
return self.model.create(**kwargs)
|
||||
|
||||
dashboard_factory = ModelFactory(redash.models.Dashboard,
|
||||
name='test', user='test@everything.me', layout='[]')
|
||||
|
||||
query_factory = ModelFactory(redash.models.Query,
|
||||
name='New Query',
|
||||
description='',
|
||||
query='SELECT 1',
|
||||
ttl=-1,
|
||||
user='test@everything.me')
|
||||
|
||||
query_result_factory = ModelFactory(redash.models.QueryResult,
|
||||
data='{"columns":{}, "rows":[]}',
|
||||
runtime=1,
|
||||
retrieved_at=datetime.datetime.now(),
|
||||
query=query_factory.create,
|
||||
query_hash='')
|
||||
|
||||
visualization_factory = ModelFactory(redash.models.Visualization,
|
||||
type='CHART',
|
||||
query=query_factory.create,
|
||||
name='Chart',
|
||||
description='',
|
||||
options='{}')
|
||||
|
||||
widget_factory = ModelFactory(redash.models.Widget,
|
||||
type='chart',
|
||||
width=1,
|
||||
options='{}',
|
||||
dashboard=dashboard_factory.create,
|
||||
visualization=visualization_factory.create)
|
||||
323
tests/test_controllers.py
Normal file
323
tests/test_controllers.py
Normal file
@@ -0,0 +1,323 @@
|
||||
from contextlib import contextmanager
|
||||
import json
|
||||
import time
|
||||
from tests import BaseTestCase
|
||||
from tests.factories import dashboard_factory, widget_factory, visualization_factory, query_factory, \
|
||||
query_result_factory
|
||||
from redash import app, models
|
||||
from redash.utils import json_dumps
|
||||
from redash.authentication import sign
|
||||
|
||||
|
||||
@contextmanager
|
||||
def authenticated_user(c, user='test@example.com', name='John Test'):
|
||||
with c.session_transaction() as sess:
|
||||
sess['openid'] = {'email': user, 'name': name}
|
||||
|
||||
yield
|
||||
|
||||
|
||||
def json_request(method, path, data=None):
|
||||
if data:
|
||||
response = method(path, data=json_dumps(data))
|
||||
else:
|
||||
response = method(path)
|
||||
|
||||
if response.data:
|
||||
response.json = json.loads(response.data)
|
||||
else:
|
||||
response.json = None
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class AuthenticationTestMixin():
|
||||
def test_redirects_when_not_authenticated(self):
|
||||
with app.test_client() as c:
|
||||
for path in self.paths:
|
||||
rv = c.get(path)
|
||||
self.assertEquals(302, rv.status_code)
|
||||
|
||||
def test_returns_content_when_authenticated(self):
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
for path in self.paths:
|
||||
rv = c.get(path)
|
||||
self.assertEquals(200, rv.status_code)
|
||||
|
||||
|
||||
class PingTest(BaseTestCase):
|
||||
def test_ping(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/ping')
|
||||
self.assertEquals(200, rv.status_code)
|
||||
self.assertEquals('PONG.', rv.data)
|
||||
|
||||
|
||||
class IndexTest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
self.paths = ['/', '/dashboard/example', '/queries/1', '/admin/status']
|
||||
super(IndexTest, self).setUp()
|
||||
|
||||
|
||||
class StatusTest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
self.paths = ['/status.json']
|
||||
super(StatusTest, self).setUp()
|
||||
|
||||
|
||||
class DashboardAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
self.paths = ['/api/dashboards']
|
||||
super(DashboardAPITest, self).setUp()
|
||||
|
||||
def test_get_dashboard(self):
|
||||
d1 = dashboard_factory.create()
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = c.get('/api/dashboards/{0}'.format(d1.slug))
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertDictEqual(json.loads(rv.data), d1.to_dict(with_widgets=True))
|
||||
|
||||
def test_get_non_existint_dashbaord(self):
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = c.get('/api/dashboards/not_existing')
|
||||
self.assertEquals(rv.status_code, 404)
|
||||
|
||||
def test_create_new_dashboard(self):
|
||||
user_email = 'test@everything.me'
|
||||
with app.test_client() as c, authenticated_user(c, user=user_email):
|
||||
dashboard_name = 'Test Dashboard'
|
||||
rv = json_request(c.post, '/api/dashboards', data={'name': dashboard_name})
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertEquals(rv.json['name'], 'Test Dashboard')
|
||||
self.assertEquals(rv.json['user'], user_email)
|
||||
self.assertEquals(rv.json['layout'], [])
|
||||
|
||||
def test_update_dashboard(self):
|
||||
d = dashboard_factory.create()
|
||||
new_name = 'New Name'
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.post, '/api/dashboards/{0}'.format(d.id),
|
||||
data={'name': new_name, 'layout': '[]'})
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertEquals(rv.json['name'], new_name)
|
||||
|
||||
def test_delete_dashboard(self):
|
||||
d = dashboard_factory.create()
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.delete, '/api/dashboards/{0}'.format(d.slug))
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
|
||||
d = models.Dashboard.get_by_slug(d.slug)
|
||||
self.assertTrue(d.is_archived)
|
||||
|
||||
|
||||
class WidgetAPITest(BaseTestCase):
|
||||
def create_widget(self, dashboard, visualization, width=1):
|
||||
data = {
|
||||
'visualization_id': visualization.id,
|
||||
'dashboard_id': dashboard.id,
|
||||
'options': {},
|
||||
'width': width
|
||||
}
|
||||
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.post, '/api/widgets', data=data)
|
||||
|
||||
return rv
|
||||
|
||||
def test_create_widget(self):
|
||||
dashboard = dashboard_factory.create()
|
||||
vis = visualization_factory.create()
|
||||
|
||||
rv = self.create_widget(dashboard, vis)
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
|
||||
dashboard = models.Dashboard.get(models.Dashboard.id == dashboard.id)
|
||||
self.assertEquals(unicode(rv.json['layout']), dashboard.layout)
|
||||
|
||||
self.assertEquals(dashboard.widgets, 1)
|
||||
self.assertEquals(rv.json['layout'], [[rv.json['widget']['id']]])
|
||||
self.assertEquals(rv.json['new_row'], True)
|
||||
|
||||
rv2 = self.create_widget(dashboard, vis)
|
||||
self.assertEquals(dashboard.widgets, 2)
|
||||
self.assertEquals(rv2.json['layout'],
|
||||
[[rv.json['widget']['id'], rv2.json['widget']['id']]])
|
||||
self.assertEquals(rv2.json['new_row'], False)
|
||||
|
||||
rv3 = self.create_widget(dashboard, vis)
|
||||
self.assertEquals(rv3.json['new_row'], True)
|
||||
rv4 = self.create_widget(dashboard, vis, width=2)
|
||||
self.assertEquals(rv4.json['layout'],
|
||||
[[rv.json['widget']['id'], rv2.json['widget']['id']],
|
||||
[rv3.json['widget']['id']],
|
||||
[rv4.json['widget']['id']]])
|
||||
self.assertEquals(rv4.json['new_row'], True)
|
||||
|
||||
def test_delete_widget(self):
|
||||
widget = widget_factory.create()
|
||||
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.delete, '/api/widgets/{0}'.format(widget.id))
|
||||
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
dashboard = models.Dashboard.get_by_slug(widget.dashboard.slug)
|
||||
self.assertEquals(dashboard.widgets.count(), 0)
|
||||
self.assertEquals(dashboard.layout, '[]')
|
||||
|
||||
# TODO: test how it updates the layout
|
||||
|
||||
|
||||
class QueryAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
self.paths = ['/api/queries']
|
||||
super(QueryAPITest, self).setUp()
|
||||
|
||||
def test_update_query(self):
|
||||
query = query_factory.create()
|
||||
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.post, '/api/queries/{0}'.format(query.id), data={'name': 'Testing'})
|
||||
self.assertEqual(rv.status_code, 200)
|
||||
self.assertEquals(rv.json['name'], 'Testing')
|
||||
|
||||
def test_create_query(self):
|
||||
user = 'test@everything.me'
|
||||
query_data = {
|
||||
'name': 'Testing',
|
||||
'description': 'Description',
|
||||
'query': 'SELECT 1',
|
||||
'ttl': 3600
|
||||
}
|
||||
|
||||
with app.test_client() as c, authenticated_user(c, user=user):
|
||||
rv = json_request(c.post, '/api/queries', data=query_data)
|
||||
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertDictContainsSubset(query_data, rv.json)
|
||||
self.assertEquals(rv.json['user'], user)
|
||||
self.assertIsNotNone(rv.json['api_key'])
|
||||
self.assertIsNotNone(rv.json['query_hash'])
|
||||
|
||||
query = models.Query.get_by_id(rv.json['id'])
|
||||
self.assertEquals(len(list(query.visualizations)), 1)
|
||||
|
||||
def test_get_query(self):
|
||||
query = query_factory.create()
|
||||
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.get, '/api/queries/{0}'.format(query.id))
|
||||
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
d = query.to_dict(with_visualizations=True)
|
||||
d.pop('created_at')
|
||||
self.assertDictContainsSubset(d, rv.json)
|
||||
|
||||
def test_get_all_queries(self):
|
||||
queries = [query_factory.create() for _ in range(10)]
|
||||
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.get, '/api/queries')
|
||||
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertEquals(len(rv.json), 10)
|
||||
|
||||
|
||||
class VisualizationAPITest(BaseTestCase):
|
||||
def test_create_visualization(self):
|
||||
query = query_factory.create()
|
||||
data = {
|
||||
'query_id': query.id,
|
||||
'name': 'Chart',
|
||||
'description':'',
|
||||
'options': {},
|
||||
'type': 'CHART'
|
||||
}
|
||||
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.post, '/api/visualizations', data=data)
|
||||
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
data.pop('query_id')
|
||||
self.assertDictContainsSubset(data, rv.json)
|
||||
|
||||
def test_delete_visualization(self):
|
||||
visualization = visualization_factory.create()
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.delete, '/api/visualizations/{0}'.format(visualization.id))
|
||||
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertEquals(models.Visualization.select().count(), 0)
|
||||
|
||||
def test_update_visualization(self):
|
||||
visualization = visualization_factory.create()
|
||||
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.post, '/api/visualizations/{0}'.format(visualization.id),
|
||||
data={'name': 'After Update'})
|
||||
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
self.assertEquals(rv.json['name'], 'After Update')
|
||||
|
||||
|
||||
class QueryResultAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
self.paths = []
|
||||
super(QueryResultAPITest, self).setUp()
|
||||
|
||||
|
||||
class JobAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
self.paths = []
|
||||
super(JobAPITest, self).setUp()
|
||||
|
||||
|
||||
class CsvQueryResultAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
super(CsvQueryResultAPITest, self).setUp()
|
||||
self.paths = []
|
||||
self.query_result = query_result_factory.create()
|
||||
self.path = '/api/queries/{0}/results/{1}.csv'.format(self.query_result.query.id, self.query_result.id)
|
||||
|
||||
# TODO: factor out the HMAC authentication tests
|
||||
|
||||
def signature(self, expires):
|
||||
return sign(self.query_result.query.api_key, self.path, expires)
|
||||
|
||||
def test_redirect_when_unauthenticated(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get(self.path)
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
|
||||
def test_redirect_for_wrong_signature(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query_result.query.id, self.query_result.id), query_string={'signature': 'whatever', 'expires': 0})
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
|
||||
def test_redirect_for_correct_signature_and_wrong_expires(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query_result.query.id, self.query_result.id), query_string={'signature': self.signature(0), 'expires': 0})
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
|
||||
def test_redirect_for_correct_signature_and_no_expires(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query_result.query.id, self.query_result.id), query_string={'signature': self.signature(time.time()+3600)})
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
|
||||
def test_redirect_for_correct_signature_and_expires_too_long(self):
|
||||
with app.test_client() as c:
|
||||
expires = time.time()+(10*3600)
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query_result.query.id, self.query_result.id), query_string={'signature': self.signature(expires), 'expires': expires})
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
|
||||
def test_returns_200_for_correct_signature(self):
|
||||
with app.test_client() as c:
|
||||
expires = time.time()+3600
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query_result.query.id, self.query_result.id), query_string={'signature': self.signature(expires), 'expires': expires})
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
|
||||
def test_returns_200_for_authenticated_user(self):
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query_result.query.id, self.query_result.id))
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
|
||||
28
tests/test_models.py
Normal file
28
tests/test_models.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from tests import BaseTestCase
|
||||
from redash import models
|
||||
from factories import dashboard_factory, query_factory
|
||||
|
||||
|
||||
class DashboardTest(BaseTestCase):
|
||||
def test_appends_suffix_to_slug_when_duplicate(self):
|
||||
d1 = dashboard_factory.create()
|
||||
self.assertEquals(d1.slug, 'test')
|
||||
|
||||
d2 = dashboard_factory.create()
|
||||
self.assertNotEquals(d1.slug, d2.slug)
|
||||
|
||||
d3 = dashboard_factory.create()
|
||||
self.assertNotEquals(d1.slug, d3.slug)
|
||||
self.assertNotEquals(d2.slug, d3.slug)
|
||||
|
||||
|
||||
class QueryTest(BaseTestCase):
|
||||
def test_changing_query_text_changes_hash(self):
|
||||
q = query_factory.create()
|
||||
|
||||
old_hash = q.query_hash
|
||||
models.Query.update_instance(q.id, query="SELECT 2;")
|
||||
|
||||
q = models.Query.get_by_id(q.id)
|
||||
|
||||
self.assertNotEquals(old_hash, q.query_hash)
|
||||
27
tests/test_settings.py
Normal file
27
tests/test_settings.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from redash import settings as settings
|
||||
from unittest import TestCase
|
||||
|
||||
|
||||
class TestDatabaseUrlParser(TestCase):
|
||||
def test_only_database_name(self):
|
||||
config = settings.parse_db_url("postgresql://postgres")
|
||||
self.assertEquals(config['name'], 'postgres')
|
||||
|
||||
def test_host_and_database_name(self):
|
||||
config = settings.parse_db_url("postgresql://localhost/postgres")
|
||||
self.assertEquals(config['name'], 'postgres')
|
||||
self.assertEquals(config['host'], 'localhost')
|
||||
|
||||
def test_host_with_port_and_database_name(self):
|
||||
config = settings.parse_db_url("postgresql://localhost:5432/postgres")
|
||||
self.assertEquals(config['name'], 'postgres')
|
||||
self.assertEquals(config['host'], 'localhost')
|
||||
self.assertEquals(config['port'], 5432)
|
||||
|
||||
def test_full_url(self):
|
||||
config = settings.parse_db_url("postgresql://user:pass@localhost:5432/postgres")
|
||||
self.assertEquals(config['name'], 'postgres')
|
||||
self.assertEquals(config['host'], 'localhost')
|
||||
self.assertEquals(config['port'], 5432)
|
||||
self.assertEquals(config['user'], 'user')
|
||||
self.assertEquals(config['password'], 'pass')
|
||||
Reference in New Issue
Block a user